Many-in-one AI client.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

98 lines
2.7 KiB

  1. import {
  2. FinishableChoiceBase,
  3. DataEventId,
  4. CreatedResource,
  5. } from '../common';
  6. import {
  7. CompletionUsage,
  8. UsageMetadata,
  9. } from '../usage';
  10. import { ChatCompletionModel } from '../models';
  11. import { normalizeChatMessage, Message, MessageObject } from '../chat';
  12. import { ConsumeStream, DoFetch } from '../../../packages/request';
  13. import { PlatformApiError } from '../../../common';
  14. export interface CreateChatCompletionParams {
  15. messages: Message | Message[];
  16. model: ChatCompletionModel;
  17. temperature?: number;
  18. topP?: number;
  19. n?: number;
  20. stop?: string | string[];
  21. maxTokens?: number;
  22. presencePenalty?: number;
  23. frequencyPenalty?: number;
  24. logitBias?: Record<string, number>;
  25. user?: string;
  26. }
  27. export interface ChatCompletionChunkChoice extends FinishableChoiceBase {
  28. delta: Partial<MessageObject>;
  29. }
  30. export interface ChatCompletionChoice extends FinishableChoiceBase {
  31. message: Partial<Message>;
  32. }
  33. export enum DataEventObjectType {
  34. CHAT_COMPLETION_CHUNK = 'chat.completion.chunk',
  35. CHAT_COMPLETION = 'chat.completion',
  36. }
  37. export interface CreateChatCompletionDataEvent<
  38. C extends Partial<FinishableChoiceBase>
  39. > extends CreatedResource {
  40. id: DataEventId;
  41. object: DataEventObjectType;
  42. model: ChatCompletionModel;
  43. choices: C[];
  44. }
  45. export interface ChatCompletion
  46. extends CreateChatCompletionDataEvent<Partial<ChatCompletionChoice>>,
  47. UsageMetadata<CompletionUsage> {}
  48. export type ChatCompletionChunkDataEvent = CreateChatCompletionDataEvent<ChatCompletionChunkChoice>;
  49. export function createChatCompletion(
  50. this: NodeJS.EventEmitter,
  51. doFetch: DoFetch,
  52. consumeStream: ConsumeStream,
  53. params: CreateChatCompletionParams,
  54. ) {
  55. doFetch('POST', '/chat/completions', {
  56. messages: normalizeChatMessage(params.messages),
  57. model: params.model ?? ChatCompletionModel.GPT_3_5_TURBO,
  58. temperature: params.temperature ?? 1,
  59. top_p: params.topP ?? 1,
  60. n: params.n ?? 1,
  61. stop: params.stop ?? null,
  62. stream: true,
  63. max_tokens: params.maxTokens,
  64. presence_penalty: params.presencePenalty ?? 0,
  65. frequency_penalty: params.frequencyPenalty ?? 0,
  66. logit_bias: params.logitBias ?? {},
  67. user: params.user,
  68. } as Record<string, unknown>)
  69. .then(async (response) => {
  70. if (!response.ok) {
  71. this.emit('error', new PlatformApiError(
  72. // eslint-disable-next-line @typescript-eslint/restrict-template-expressions
  73. `Create chat completion returned with status: ${response.status}`,
  74. response,
  75. ));
  76. this.emit('end');
  77. return;
  78. }
  79. await consumeStream(response);
  80. this.emit('end');
  81. })
  82. .catch((err) => {
  83. this.emit('error', err as Error);
  84. this.emit('end');
  85. });
  86. return this;
  87. }