انتقل إلى المحتوى الرئيسي

Interface: LLM

Unified language model interface

Implemented by

Properties

metadata

metadata: LLMMetadata

Defined in

packages/core/src/llm/types.ts:8

Methods

chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Get a chat response from the LLM

Parameters

NameType
paramsLLMChatParamsStreaming

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Defined in

packages/core/src/llm/types.ts:14

chat(params): Promise<ChatResponse>

Parameters

NameType
paramsLLMChatParamsNonStreaming

Returns

Promise<ChatResponse>

Defined in

packages/core/src/llm/types.ts:17


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Get a prompt completion from the LLM

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Defined in

packages/core/src/llm/types.ts:23

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Defined in

packages/core/src/llm/types.ts:26


tokens

tokens(messages): number

Calculates the number of tokens needed for the given chat messages

Parameters

NameType
messagesChatMessage[]

Returns

number

Defined in

packages/core/src/llm/types.ts:33