The LLM Factory implements a flexible provider pattern for managing multiple language model integrations. It provides a unified interface for generating responses while supporting provider-specific features like function calling and streaming.
Copy interface LLMResponse {
content : string ;
totalTokens ?: number ;
toolCalls ?: Array <{
id : string ;
type : 'function' ;
function : {
name : string ;
arguments : string ;
}
}>;
}
interface LLMProvider {
generateResponse (
messages : Array <{ role : string ; content : string }> ,
cleanContent : string
) : Promise < LLMResponse >;
}
export type ModelType = 'OPENAI' | 'GEMINI' | 'GROK' | 'DEEPSEEK' ;
Copy export class LLMFactory {
private static instance : LLMFactory ;
private providers : Map < ModelType , LLMProvider >;
private currentProvider : LLMProvider | null ;
private constructor () {
this .providers = new Map ();
this . providers .set ( 'OPENAI' , new OpenAIProvider ());
this . providers .set ( 'GEMINI' , new GeminiProvider ());
this . providers .set ( 'GROK' , new GrokProvider ());
this . providers .set ( 'DEEPSEEK' , new DeepSeekProvider ());
this .currentProvider = this . providers .get ( 'OPENAI' ) || null ;
}
public static getInstance () : LLMFactory ;
public getAvailableModels () : ModelType [];
public setModel (model : ModelType ) : boolean ;
public getCurrentModel () : ModelType | null ;
public getCurrentProvider () : LLMProvider | null ;
public generateResponse (
messages : Array <{ role : string ; content : string }> ,
cleanContent : string
) : Promise < LLMResponse >;
}
Copy class OpenAIProvider implements LLMProvider {
private client : OpenAI ;
constructor () {
this .client = new OpenAI ({
apiKey : process . env . OPENAI_API_KEY ,
});
}
async generateResponse (
messages : Array <{ role : string ; content : string }>
) : Promise < LLMResponse >;
}
Copy class GeminiProvider implements LLMProvider {
private model : GenerativeModel ;
constructor () {
const genAI = new GoogleGenerativeAI ( process . env . GEMINI_API_KEY ! );
this .model = genAI .getGenerativeModel ({ model : 'gemini-pro' });
}
async generateResponse (
messages : Array <{ role : string ; content : string }> ,
cleanContent : string
) : Promise < LLMResponse >;
}
Copy class GrokProvider implements LLMProvider {
private client : OpenAI ;
constructor () {
this .client = new OpenAI ({
apiKey : process . env . XAI_API_KEY ,
baseURL : 'https://api.x.ai/v1' ,
});
}
async generateResponse (
messages : Array <{ role : string ; content : string }> ,
cleanContent : string
) : Promise < LLMResponse >;
}
Copy class DeepSeekProvider implements LLMProvider {
async generateResponse (
messages : Array <{ role : string ; content : string }> ,
cleanContent : string
) : Promise < LLMResponse >;
}
Copy const tools = [{
type : "function" ,
function : {
name : "generate_image" ,
description : "Generate an image based on the user's request" ,
parameters : {
type : "object" ,
properties : {
prompt : {
type : "string" ,
description : "The description of the image to generate"
}
} ,
required : [ "prompt" ]
}
}
} ,
{
type : "function" ,
function : {
name : "get_weather" ,
description : "Get current weather data for any major city" ,
parameters : {
type : "object" ,
properties : {
location : {
type : "object" ,
properties : {
name : { type : "string" } ,
country : { type : "string" } ,
coordinates : {
type : "object" ,
properties : {
lat : { type : "number" } ,
lon : { type : "number" }
} ,
required : [ "lat" , "lon" ]
}
} ,
required : [ "name" , "country" , "coordinates" ]
} ,
type : {
type : "string" ,
enum : [ "current" , "forecast" ]
}
} ,
required : [ "location" ]
}
}
} ,
{
type : "function" ,
function : {
name : "get_market_data" ,
description : "Get cryptocurrency market data" ,
parameters : {
type : "object" ,
properties : {
type : {
type : "string" ,
enum : [ "token" , "trending" , "top" , "latest" , "boosted" ]
} ,
query : {
type : "string" ,
description : "Token symbol or address"
}
} ,
required : [ "type" ]
}
}
} ,
{
type : "function" ,
function : {
name : "get_time" ,
description : "Gets the current time for a location" ,
parameters : {
type : "object" ,
properties : {
location : {
type : "object" ,
properties : {
name : { type : "string" } ,
country : { type : "string" } ,
coordinates : {
type : "object" ,
properties : {
lat : { type : "number" } ,
lon : { type : "number" }
}
}
}
}
} ,
required : [ "location" ]
}
}
}]
Copy const factory = LLMFactory .getInstance ();
const response = await factory .generateResponse (messages , cleanContent);
Copy const factory = LLMFactory .getInstance ();
const success = factory .setModel ( 'GEMINI' );
if (success) {
const response = await factory .generateResponse (messages , cleanContent);
}
Copy try {
const response = await factory .generateResponse (messages , cleanContent);
} catch (error) {
console .error ( 'LLM Error:' , error);
// Handle provider-specific errors
}