AgentForge

Custom LLM Provider

Implement a custom LLM provider for AgentForge.

LLM Interface

Implement the LLM interface:

import type { LLM, ChatRequest, ChatResponse } from '@ahzan-agentforge/core';

class MyCustomLLM implements LLM {
  async chat(request: ChatRequest): Promise<ChatResponse> {
    // Call your custom LLM API
    const response = await myApi.generate({
      system: request.system,
      messages: request.messages,
      tools: request.tools,
    });

    return {
      type: response.hasToolCalls ? 'tool_calls' : 'text',
      content: response.text,
      toolCalls: response.toolCalls ?? [],
      usage: {
        inputTokens: response.usage.input,
        outputTokens: response.usage.output,
      },
    };
  }
}

With Streaming

Implement StreamingLLM for streaming support:

import type { StreamingLLM, ChatStreamEvent } from '@ahzan-agentforge/core';

class MyStreamingLLM implements StreamingLLM {
  async chat(request: ChatRequest): Promise<ChatResponse> {
    // Non-streaming implementation
  }

  async *chatStream(request: ChatRequest): AsyncGenerator<ChatStreamEvent> {
    const stream = await myApi.streamGenerate(request);

    for await (const chunk of stream) {
      if (chunk.type === 'text') {
        yield { type: 'text_delta', content: chunk.text };
      }
      // Handle tool calls, usage, done
    }

    yield { type: 'done' };
  }
}

Usage

const agent = defineAgent({
  name: 'custom-agent',
  description: 'Agent with custom LLM',
  tools: [myTool],
  llm: new MyCustomLLM(),
  systemPrompt: '...',
});

Next Steps