Many-in-one AI client.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

97 lines
2.5 KiB

  1. import {
  2. FinishableChoiceBase,
  3. ConsumeStream,
  4. DataEventId,
  5. DoFetch,
  6. PlatformError,
  7. PlatformResponse,
  8. } from '../common';
  9. import {
  10. UsageMetadata,
  11. } from '../usage';
  12. import { ChatCompletionModel } from '../models';
  13. import { normalizeChatMessage, Message, MessageObject } from '../chat';
  14. export interface CreateChatCompletionParams {
  15. messages: Message | Message[];
  16. model: ChatCompletionModel;
  17. temperature?: number;
  18. topP?: number;
  19. n?: number;
  20. stop?: string | string[];
  21. maxTokens?: number;
  22. presencePenalty?: number;
  23. frequencyPenalty?: number;
  24. logitBias?: Record<string, number>;
  25. user?: string;
  26. }
  27. export interface ChatCompletionChunkChoice extends FinishableChoiceBase {
  28. delta: Partial<MessageObject>;
  29. }
  30. export interface ChatCompletionChoice extends FinishableChoiceBase {
  31. message: Partial<Message>;
  32. }
  33. export enum DataEventObjectType {
  34. CHAT_COMPLETION_CHUNK = 'chat.completion.chunk',
  35. CHAT_COMPLETION = 'chat.completion',
  36. }
  37. export interface CreateChatCompletionDataEvent<
  38. C extends Partial<FinishableChoiceBase>
  39. > extends PlatformResponse {
  40. id: DataEventId;
  41. object: DataEventObjectType;
  42. model: ChatCompletionModel;
  43. choices: C[];
  44. }
  45. export interface ChatCompletion
  46. extends CreateChatCompletionDataEvent<Partial<ChatCompletionChoice>>, UsageMetadata {}
  47. export type ChatCompletionChunkDataEvent = CreateChatCompletionDataEvent<ChatCompletionChunkChoice>;
  48. export function createChatCompletion(
  49. this: NodeJS.EventEmitter,
  50. doFetch: DoFetch,
  51. consumeStream: ConsumeStream,
  52. params: CreateChatCompletionParams,
  53. ) {
  54. doFetch('POST', '/chat/completions', {
  55. messages: normalizeChatMessage(params.messages),
  56. model: params.model ?? ChatCompletionModel.GPT_3_5_TURBO,
  57. temperature: params.temperature ?? 1,
  58. top_p: params.topP ?? 1,
  59. n: params.n ?? 1,
  60. stop: params.stop ?? null,
  61. stream: true,
  62. max_tokens: params.maxTokens,
  63. presence_penalty: params.presencePenalty ?? 0,
  64. frequency_penalty: params.frequencyPenalty ?? 0,
  65. logit_bias: params.logitBias ?? {},
  66. user: params.user,
  67. })
  68. .then(async (response) => {
  69. if (!response.ok) {
  70. this.emit('error', new PlatformError(
  71. // eslint-disable-next-line @typescript-eslint/restrict-template-expressions
  72. `Create chat completion returned with status: ${response.status}`,
  73. response,
  74. ));
  75. this.emit('end');
  76. return;
  77. }
  78. await consumeStream(response);
  79. this.emit('end');
  80. })
  81. .catch((err) => {
  82. this.emit('error', err as Error);
  83. this.emit('end');
  84. });
  85. return this;
  86. }