Skip to content

OpenAIResponses

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:49

  • ToolCallLLM<OpenAIResponsesChatOptions>

new OpenAIResponses(init?): OpenAIResponses

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:77

Omit<Partial<OpenAIResponses>, "session"> & object

OpenAIResponses

ToolCallLLM<OpenAIResponsesChatOptions>.constructor

model: string

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:50


temperature: number

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:51


topP: number

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:52


optional maxOutputTokens: number

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:53


optional additionalChatOptions: OpenAIResponsesChatOptions

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:54


optional reasoningEffort: "low" | "medium" | "high"

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:55


optional reasoningSummary: "auto" | "concise" | "detailed"

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:56


optional apiKey: string

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:57


optional baseURL: string

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:58


maxRetries: number

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:59


optional timeout: number

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:60


optional additionalSessionOptions: Omit<Partial<ClientOptions>, "apiKey" | "maxRetries" | "timeout">

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:61


lazySession: () => Promise<LLMInstance>

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:65

Promise<LLMInstance>


trackPreviousResponses: boolean

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:66


store: boolean

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:67


user: string

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:68


callMetadata: StoredValue

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:69


builtInTools: null | Tool[]

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:70


strict: boolean

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:71


include: null | ResponseIncludable[]

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:72


instructions: string

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:73


previousResponseId: null | string

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:74


truncation: null | "auto" | "disabled"

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:75

get session(): Promise<LLMInstance>

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:131

Promise<LLMInstance>


get supportToolCall(): boolean

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:138

boolean

ToolCallLLM.supportToolCall


get metadata(): LLMMetadata & object

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:142

LLMMetadata & object

ToolCallLLM.metadata

chat(params): Promise<AsyncIterable<ChatResponseChunk<OpenAIResponsesMessageOptions>, any, any>>

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:552

LLMChatParamsStreaming<OpenAIResponsesChatOptions, OpenAIResponsesMessageOptions>

Promise<AsyncIterable<ChatResponseChunk<OpenAIResponsesMessageOptions>, any, any>>

ToolCallLLM.chat

chat(params): Promise<ChatResponse<OpenAIResponsesMessageOptions>>

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:558

LLMChatParamsNonStreaming<OpenAIResponsesChatOptions, OpenAIResponsesMessageOptions>

Promise<ChatResponse<OpenAIResponsesMessageOptions>>

ToolCallLLM.chat


protected streamChat(baseRequestParams): AsyncIterable<ChatResponseChunk<OpenAIResponsesMessageOptions>>

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:644

ResponseCreateParams

AsyncIterable<ChatResponseChunk<OpenAIResponsesMessageOptions>>


toOpenAIResponsesRole(messageType): OpenAIResponsesRole

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:663

MessageType

OpenAIResponsesRole


toOpenAIResponseMessage(message): ResponseInputItem | ResponseInputItem[]

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:786

ChatMessage<OpenAIResponsesMessageOptions>

ResponseInputItem | ResponseInputItem[]


toOpenAIResponseMessages(messages): ResponseInput

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:804

ChatMessage<OpenAIResponsesMessageOptions>[]

ResponseInput


toResponsesTool(tool): Tool

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:819

BaseTool

Tool