Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion AGENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ This repository is an npm workspace monorepo:
- Language: TypeScript across apps/packages, with `strict` mode enabled.
- Use `import type` where applicable (`@typescript-eslint/consistent-type-imports` is enforced).
- Keep unused parameters/locals prefixed with `_` to satisfy lint rules.
- Match existing file naming patterns: kebab-case files (`openai-llm.ts`), PascalCase classes, UPPER_SNAKE_CASE constants.
- Match existing file naming patterns: kebab-case files (`openai-agents-llm.ts`), PascalCase classes, UPPER_SNAKE_CASE constants.
- Follow existing indentation/style in each file; do not reformat unrelated code.

## Testing Guidelines
Expand All @@ -46,3 +46,4 @@ This repository is an npm workspace monorepo:
- Treat `design-system/` (git submodule) as the source of truth for UI primitives.
- Prefer existing tokens/components from the submodule before building custom UI in `apps/web`.
- If a needed component does not exist in `design-system/`, pause and consult the user before adding a new component or introducing a non-design-system alternative.
- In a brand new git worktree, initialize submodules before development (`git submodule update --init --recursive`), or the design-system assets will be missing.
3 changes: 3 additions & 0 deletions CLAUDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ npm run build:packages # Build only shared packages (types + engine)

**Approval/pause flow:** When engine hits an Approval node, it sets `waitingForInput=true` and the engine instance is stored in an in-memory `Map` (`store/active-workflows.ts`). Client calls `POST /api/resume` with user input to continue.

**Agent backend:** Server agent execution is implemented through OpenAI Agents SDK (`apps/server/src/services/openai-agents-llm.ts`). Agent runs are capped with `maxTurns: 20` to bound loop iterations.

**Build dependency chain:** `packages/types` → `packages/workflow-engine` → `apps/server` / `apps/web`. Always run `build:packages` before typechecking or building apps.

## Design System (Git Submodule)
Expand Down Expand Up @@ -106,3 +108,4 @@ Agent node `userPrompt` fields support `{{PREVIOUS_OUTPUT}}` as a template token
- `data/runs/` is gitignored — created automatically at runtime.
- `.config/default-workflow.json` is gitignored — create it locally to preload a workflow on startup.
- Clone with `--recurse-submodules` to pull the design-system submodule.
- In a brand new git worktree, run `git submodule update --init --recursive` before development so the design-system submodule is available.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ data/

- **Visual Editor** – Canvas, floating palette, zoom controls, and inline node forms for prompts, branching rules, and approval copy.
- **Run Console** – Chat-style panel that renders agent responses progressively as they arrive via SSE, with per-agent labels, spinner states, and approval requests.
- **Workflow Engine** – Handles graph traversal, approvals, and LLM invocation (OpenAI Responses API or mock).
- **Workflow Engine** – Handles graph traversal, approvals, and LLM invocation (OpenAI Agents SDK).
- **Persistent Audit Trail** – Every run writes `data/runs/run_<timestamp>.json` containing the workflow graph plus raw execution logs, independent of what the UI chooses to display.

## Getting Started
Expand Down
4 changes: 2 additions & 2 deletions apps/server/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@
"test": "npm run typecheck"
},
"dependencies": {
"@openai/agents": "^0.1.0",
"@agentic/types": "file:../../packages/types",
"@agentic/workflow-engine": "file:../../packages/workflow-engine",
"cors": "^2.8.5",
"express": "^4.19.2",
"openai": "^6.9.1"
"express": "^4.19.2"
},
"devDependencies": {
"@types/cors": "^2.8.17",
Expand Down
10 changes: 4 additions & 6 deletions apps/server/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,12 @@ import fs from 'node:fs';
import path from 'node:path';
import express, { type Request, type Response } from 'express';
import cors from 'cors';
import OpenAI from 'openai';
import type { WorkflowRunRecord } from '@agentic/types';
import WorkflowEngine, { type WorkflowLLM } from '@agentic/workflow-engine';
import { config } from './config';
import { logger } from './logger';
import { createWorkflowRouter } from './routes/workflows';
import { OpenAILLMService } from './services/openai-llm';
import { OpenAIAgentsLLMService } from './services/openai-agents-llm';
import { addWorkflow } from './store/active-workflows';

const isProduction = process.env.NODE_ENV === 'production';
Expand Down Expand Up @@ -69,11 +68,10 @@ async function bootstrap() {
app.use(cors());
app.use(express.json({ limit: '1mb' }));

let llmService: OpenAILLMService | undefined;
let llmService: WorkflowLLM | undefined;
if (config.openAiApiKey) {
logger.info('OPENAI_API_KEY detected, enabling live OpenAI responses');
const client = new OpenAI({ apiKey: config.openAiApiKey });
llmService = new OpenAILLMService(client);
logger.info('OPENAI_API_KEY detected, enabling live OpenAI Agents SDK backend');
llmService = new OpenAIAgentsLLMService();
} else {
logger.warn('OPENAI_API_KEY missing. Agent workflows will be rejected.');
}
Expand Down
77 changes: 77 additions & 0 deletions apps/server/src/services/openai-agents-llm.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
import type { AgentInvocation, WorkflowLLM } from '@agentic/workflow-engine';

type AgentsSdkModule = {
Agent: new (config: Record<string, unknown>) => unknown;
run: (
agent: unknown,
input: string,
options?: { maxTurns?: number }
) => Promise<{ finalOutput?: unknown }>;
webSearchTool: () => unknown;
};

let sdkModulePromise: Promise<AgentsSdkModule> | null = null;
const MAX_AGENT_TURNS = 20;

async function loadAgentsSdk(): Promise<AgentsSdkModule> {
if (!sdkModulePromise) {
sdkModulePromise = (new Function('moduleName', 'return import(moduleName);') as (
moduleName: string
) => Promise<AgentsSdkModule>)('@openai/agents').catch((error: unknown) => {
sdkModulePromise = null;
const message = error instanceof Error ? error.message : String(error);
throw new Error(
`OpenAI Agents SDK is required but '@openai/agents' is unavailable: ${message}`
);
});
}
return sdkModulePromise;
}

function buildAgentTools(invocation: AgentInvocation, sdk: AgentsSdkModule): unknown[] {
const tools: unknown[] = [];

if (invocation.tools?.web_search) {
tools.push(sdk.webSearchTool());
}

return tools;
}

function toTextOutput(finalOutput: unknown): string {
if (typeof finalOutput === 'string') {
return finalOutput.trim();
}
if (finalOutput === undefined || finalOutput === null) {
return 'Model returned no text output.';
}
return JSON.stringify(finalOutput);
}

export class OpenAIAgentsLLMService implements WorkflowLLM {
async respond(invocation: AgentInvocation): Promise<string> {
const sdk = await loadAgentsSdk();
const tools = buildAgentTools(invocation, sdk);
const agentConfig: Record<string, unknown> = {
name: 'Workflow Agent',
instructions: invocation.systemPrompt,
model: invocation.model
};

if (tools.length > 0) {
agentConfig.tools = tools;
}

if (invocation.reasoningEffort) {
agentConfig.modelSettings = {
reasoning: {
effort: invocation.reasoningEffort
}
};
}

const agent = new sdk.Agent(agentConfig);
const result = await sdk.run(agent, invocation.userContent, { maxTurns: MAX_AGENT_TURNS });
return toTextOutput(result.finalOutput);
}
}
73 changes: 0 additions & 73 deletions apps/server/src/services/openai-llm.ts

This file was deleted.

Loading