|
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596 |
- import {
- FinishableChoiceBase,
- ConsumeStream,
- DataEventId,
- DoFetch,
- PlatformError,
- PlatformResponse,
- } from '../common';
- import {
- UsageMetadata,
- } from '../usage';
- import { ChatCompletionModel } from '../models';
- import { normalizeChatMessage, Message, MessageObject } from '../chat';
-
- export interface CreateChatCompletionParams {
- messages: Message | Message[];
- model: ChatCompletionModel;
- temperature?: number;
- topP?: number;
- n?: number;
- stop?: string | string[];
- maxTokens?: number;
- presencePenalty?: number;
- frequencyPenalty?: number;
- logitBias?: Record<string, number>;
- user?: string;
- }
-
- export interface ChatCompletionChunkChoice extends FinishableChoiceBase {
- delta: Partial<MessageObject>;
- }
-
- export interface ChatCompletionChoice extends FinishableChoiceBase {
- message: Partial<Message>;
- }
-
- export enum DataEventObjectType {
- CHAT_COMPLETION_CHUNK = 'chat.completion.chunk',
- CHAT_COMPLETION = 'chat.completion',
- }
-
- export interface CreateChatCompletionDataEvent<
- C extends Partial<FinishableChoiceBase>
- > extends PlatformResponse {
- id: DataEventId;
- object: DataEventObjectType;
- model: ChatCompletionModel;
- choices: C[];
- }
-
- export interface ChatCompletion
- extends CreateChatCompletionDataEvent<Partial<ChatCompletionChoice>>, UsageMetadata {}
-
- export type ChatCompletionChunkDataEvent = CreateChatCompletionDataEvent<ChatCompletionChunkChoice>;
-
- export function createChatCompletion(
- this: NodeJS.EventEmitter,
- doFetch: DoFetch,
- consumeStream: ConsumeStream,
- params: CreateChatCompletionParams,
- ) {
- doFetch('POST', '/chat/completions', {
- messages: normalizeChatMessage(params.messages),
- model: params.model ?? ChatCompletionModel.GPT_3_5_TURBO,
- temperature: params.temperature ?? 1,
- top_p: params.topP ?? 1,
- n: params.n ?? 1,
- stop: params.stop ?? null,
- stream: true,
- max_tokens: params.maxTokens,
- presence_penalty: params.presencePenalty ?? 0,
- frequency_penalty: params.frequencyPenalty ?? 0,
- logit_bias: params.logitBias ?? {},
- user: params.user,
- })
- .then(async (response) => {
- if (!response.ok) {
- this.emit('error', new PlatformError(
- // eslint-disable-next-line @typescript-eslint/restrict-template-expressions
- `Create chat completion returned with status: ${response.status}`,
- response,
- ));
- this.emit('end');
- return;
- }
-
- await consumeStream(response);
- this.emit('end');
- })
- .catch((err) => {
- this.emit('error', err as Error);
- this.emit('end');
- });
-
- return this;
- }
|