Skip to main content

Class: OpenAI

Hierarchy

Constructors

constructor

new OpenAI(init?): OpenAI

Parameters

NameType
init?Partial<OpenAI> & { azure?: AzureOpenAIConfig }

Returns

OpenAI

Overrides

ToolCallLLM&lt;OpenAIAdditionalChatOptions&gt;.constructor

Defined in

packages/core/src/llm/openai.ts:179

Properties

additionalChatOptions

Optional additionalChatOptions: OpenAIAdditionalChatOptions

Defined in

packages/core/src/llm/openai.ts:167


additionalSessionOptions

Optional additionalSessionOptions: Omit<Partial<ClientOptions>, "apiKey" | "timeout" | "maxRetries">

Defined in

packages/core/src/llm/openai.ts:174


apiKey

Optional apiKey: string = undefined

Defined in

packages/core/src/llm/openai.ts:170


maxRetries

maxRetries: number

Defined in

packages/core/src/llm/openai.ts:171


maxTokens

Optional maxTokens: number

Defined in

packages/core/src/llm/openai.ts:166


model

model: string

Defined in

packages/core/src/llm/openai.ts:163


session

session: OpenAISession

Defined in

packages/core/src/llm/openai.ts:173


temperature

temperature: number

Defined in

packages/core/src/llm/openai.ts:164


timeout

Optional timeout: number

Defined in

packages/core/src/llm/openai.ts:172


topP

topP: number

Defined in

packages/core/src/llm/openai.ts:165

Accessors

metadata

get metadata(): LLMMetadata

Returns

LLMMetadata

Overrides

ToolCallLLM.metadata

Defined in

packages/core/src/llm/openai.ts:236


supportToolCall

get supportToolCall(): boolean

Returns

boolean

Overrides

ToolCallLLM.supportToolCall

Defined in

packages/core/src/llm/openai.ts:232

Methods

chat

chat(params): Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>>

Parameters

NameType
paramsLLMChatParamsStreaming<OpenAIAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>>

Overrides

ToolCallLLM.chat

Defined in

packages/core/src/llm/openai.ts:313

chat(params): Promise<ChatResponse<ToolCallLLMMessageOptions>>

Parameters

NameType
paramsLLMChatParamsNonStreaming<OpenAIAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<ChatResponse<ToolCallLLMMessageOptions>>

Overrides

ToolCallLLM.chat

Defined in

packages/core/src/llm/openai.ts:319


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

ToolCallLLM.complete

Defined in

packages/core/src/llm/base.ts:22

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

ToolCallLLM.complete

Defined in

packages/core/src/llm/base.ts:25


streamChat

streamChat(baseRequestParams): AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>

Parameters

NameType
baseRequestParamsChatCompletionCreateParams

Returns

AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>

Defined in

packages/core/src/llm/openai.ts:385


toOpenAIMessage

toOpenAIMessage(messages): ChatCompletionMessageParam[]

Parameters

NameType
messagesChatMessage<ToolCallLLMMessageOptions>[]

Returns

ChatCompletionMessageParam[]

Defined in

packages/core/src/llm/openai.ts:264


toOpenAIRole

toOpenAIRole(messageType): ChatCompletionRole

Parameters

NameType
messageTypeMessageType

Returns

ChatCompletionRole

Defined in

packages/core/src/llm/openai.ts:251


toTool

toTool(tool): ChatCompletionTool

Parameters

NameType
toolBaseTool<any>

Returns

ChatCompletionTool

Defined in

packages/core/src/llm/openai.ts:466