OpenAI
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:62
Extends
Section titled “Extends”ToolCallLLM<OpenAIAdditionalChatOptions>
Constructors
Section titled “Constructors”Constructor
Section titled “Constructor”new OpenAI(
init?):OpenAI
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:94
Parameters
Section titled “Parameters”Omit<Partial<OpenAI>, "session"> & object
Returns
Section titled “Returns”OpenAI
Overrides
Section titled “Overrides”ToolCallLLM<OpenAIAdditionalChatOptions>.constructor
Properties
Section titled “Properties”model:
ChatModel|string&object
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:63
temperature
Section titled “temperature”temperature:
number
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:67
reasoningEffort?
Section titled “reasoningEffort?”
optionalreasoningEffort:"low"|"medium"|"high"|"minimal"
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:68
topP:
number
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:69
maxTokens?
Section titled “maxTokens?”
optionalmaxTokens:number
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:70
additionalChatOptions?
Section titled “additionalChatOptions?”
optionaladditionalChatOptions:OpenAIAdditionalChatOptions
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:71
apiKey?
Section titled “apiKey?”
optionalapiKey:string=undefined
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:74
baseURL?
Section titled “baseURL?”
optionalbaseURL:string=undefined
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:75
maxRetries
Section titled “maxRetries”maxRetries:
number
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:76
timeout?
Section titled “timeout?”
optionaltimeout:number
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:77
additionalSessionOptions?
Section titled “additionalSessionOptions?”
optionaladditionalSessionOptions:Omit<Partial<ClientOptions>,"apiKey"|"maxRetries"|"timeout">
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:78
lazySession()
Section titled “lazySession()”lazySession: () =>
Promise<LLMInstance>
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:85
Returns
Section titled “Returns”Promise<LLMInstance>
Accessors
Section titled “Accessors”session
Section titled “session”Get Signature
Section titled “Get Signature”get session():
Promise<LLMInstance>
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:87
Returns
Section titled “Returns”Promise<LLMInstance>
supportToolCall
Section titled “supportToolCall”Get Signature
Section titled “Get Signature”get supportToolCall():
boolean
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:132
Returns
Section titled “Returns”boolean
Overrides
Section titled “Overrides”ToolCallLLM.supportToolCall
Get Signature
Section titled “Get Signature”get live():
OpenAILive
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:136
Returns
Section titled “Returns”OpenAILive
metadata
Section titled “metadata”Get Signature
Section titled “Get Signature”get metadata():
LLMMetadata&object
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:147
Returns
Section titled “Returns”LLMMetadata & object
Overrides
Section titled “Overrides”ToolCallLLM.metadata
Methods
Section titled “Methods”toOpenAIRole()
Section titled “toOpenAIRole()”
statictoOpenAIRole(messageType):ChatCompletionRole
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:163
Parameters
Section titled “Parameters”messageType
Section titled “messageType”MessageType
Returns
Section titled “Returns”ChatCompletionRole
toOpenAIMessage()
Section titled “toOpenAIMessage()”
statictoOpenAIMessage(messages):ChatCompletionMessageParam[]
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:176
Parameters
Section titled “Parameters”messages
Section titled “messages”ChatMessage<ToolCallLLMMessageOptions>[]
Returns
Section titled “Returns”ChatCompletionMessageParam[]
chat()
Section titled “chat()”Call Signature
Section titled “Call Signature”chat(
params):Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>,any,any>>
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:267
Parameters
Section titled “Parameters”params
Section titled “params”LLMChatParamsStreaming<OpenAIAdditionalChatOptions, ToolCallLLMMessageOptions>
Returns
Section titled “Returns”Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>, any, any>>
Overrides
Section titled “Overrides”ToolCallLLM.chat
Call Signature
Section titled “Call Signature”chat(
params):Promise<ChatResponse<ToolCallLLMMessageOptions>>
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:273
Parameters
Section titled “Parameters”params
Section titled “params”LLMChatParamsNonStreaming<OpenAIAdditionalChatOptions, ToolCallLLMMessageOptions>
Returns
Section titled “Returns”Promise<ChatResponse<ToolCallLLMMessageOptions>>
Overrides
Section titled “Overrides”ToolCallLLM.chat
streamChat()
Section titled “streamChat()”
protectedstreamChat(baseRequestParams):AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:370
Parameters
Section titled “Parameters”baseRequestParams
Section titled “baseRequestParams”ChatCompletionCreateParams
Returns
Section titled “Returns”AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>
toTool()
Section titled “toTool()”
statictoTool(tool):ChatCompletionTool
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:449
Parameters
Section titled “Parameters”BaseTool
Returns
Section titled “Returns”ChatCompletionTool