Guided workflow to add a new LLM provider to XSky
Guided workflow to integrate a new LLM provider into XSky's AI agent framework. Use when adding support for providers like Mistral or Cohere.
/plugin marketplace add anujkumar001111/xsky-agent/plugin install anujkumar001111-xsky-dev-team@anujkumar001111/xsky-agentcommands/You are guiding the user through adding a new LLM provider to the XSky AI Agent framework.
Ask the user:
XSky uses Vercel's AI SDK. Check if provider has official adapter:
# Search for existing adapter
npm search @ai-sdk/<provider>
If adapter exists:
pnpm --filter @xsky/ai-agent-core add @ai-sdk/<provider>
If no adapter, use @ai-sdk/openai-compatible for OpenAI-compatible APIs.
Location: packages/ai-agent-core/src/llm/providers/
// packages/ai-agent-core/src/llm/providers/mistral.ts
import { createMistral } from "@ai-sdk/mistral";
import { LLMConfig } from "../../types/llm.types";
export function createMistralProvider(config: LLMConfig) {
const mistral = createMistral({
apiKey: config.apiKey,
baseURL: config.config?.baseURL,
});
return mistral(config.model, {
// Model-specific options
});
}
// packages/ai-agent-core/src/llm/index.ts
import { createMistralProvider } from "./providers/mistral";
export function createProvider(llmConfig: LLMConfig) {
switch (llmConfig.provider) {
case "openai":
return createOpenAIProvider(llmConfig);
case "anthropic":
return createAnthropicProvider(llmConfig);
case "mistral": // Add new case
return createMistralProvider(llmConfig);
// ...
default:
throw new Error(`Unknown provider: ${llmConfig.provider}`);
}
}
// packages/ai-agent-core/src/types/llm.types.ts
export type LLMProvider =
| "openai"
| "anthropic"
| "google"
| "aws"
| "openrouter"
| "openai-compatible"
| "mistral"; // Add new provider
// packages/ai-agent-core/test/llm/mistral.test.ts
describe('Mistral Provider', () => {
it('should create provider with config', () => {
const provider = createMistralProvider({
provider: 'mistral',
model: 'mistral-large-latest',
apiKey: 'test-key'
});
expect(provider).toBeDefined();
});
});
Add to README:
### Mistral
{
provider: "mistral",
model: "mistral-large-latest",
apiKey: process.env.MISTRAL_API_KEY,
}
pnpm build
pnpm test
MISTRAL_API_KEY=your-key
Now, which LLM provider do you want to add?