- import fetchPonyfill from 'fetch-ponyfill';
- import Handlebars from 'handlebars';
- import { resolve } from 'path';
- import { readFile } from 'fs/promises';
- import * as config from './config';
- import { OpenAiParams } from './common';
-
- export interface MakeAiCallParams {
- prompts: string[];
- openAiParams: OpenAiParams;
- }
-
- export class AiCallError extends Error {
- constructor(message: string, public readonly response: Response) {
- super(message);
- this.name = 'AiCallError';
- }
- }
-
- const makeAiCall = async (params: MakeAiCallParams): Promise<string> => {
- const {
- prompts,
- openAiParams: {
- apiKey,
- organizationId,
- model = 'gpt-3.5-turbo',
- temperature = 0.6,
- },
- } = params;
-
- const headers: Record<string, string> = {
- 'Content-Type': 'application/json',
- Accept: 'application/json',
- Authorization: `Bearer ${apiKey}`,
- };
-
- if (organizationId) {
- headers['OpenAI-Organization'] = organizationId;
- }
-
- const { fetch } = fetchPonyfill();
- const response = await fetch(
- new URL('/v1/chat/completions', 'https://api.openai.com'),
- {
- method: 'POST',
- headers,
- body: JSON.stringify({
- model,
- temperature,
- messages: [
- {
- role: 'user',
- content: prompts[Math.floor(Math.random() * prompts.length)].trim(),
- },
- ],
- }),
- },
- );
-
- if (!response.ok) {
- const { error } = await response.json();
- throw new AiCallError(`OpenAI API call failed with status ${response.status}: ${error.message}`, response);
- }
-
- const { choices } = await response.json();
-
- // should we use all the response choices?
- return choices[0].message.content;
- };
-
- const compilePrompts = async (filename: string, params: Record<string, unknown>): Promise<string[]> => {
- const rawPromptText = await readFile(resolve(config.openAi.promptsDir, filename), 'utf-8');
- const fill = Handlebars.compile(rawPromptText, { noEscape: true });
- const filledText = fill(params);
- return filledText.split('---').map((s) => s.trim());
- };
-
- export interface NormalizeTranscriptTextParams {
- rawTranscriptText: string,
- openAiParams: OpenAiParams,
- }
-
- export const normalizeTranscriptText = async (params: NormalizeTranscriptTextParams) => {
- const {
- rawTranscriptText,
- openAiParams,
- } = params;
- const prompts = await compilePrompts(
- 'normalize-transcript-text.hbs',
- {
- transcript: rawTranscriptText,
- },
- );
-
- return makeAiCall({
- prompts,
- openAiParams,
- });
- };
-
- export interface SummarizeTranscriptParams {
- normalizedTranscript: string,
- openAiParams: OpenAiParams,
- }
-
- export const summarizeTranscript = async (params: SummarizeTranscriptParams) => {
- const {
- normalizedTranscript,
- openAiParams,
- } = params;
- const prompts = await compilePrompts(
- 'summarize-transcript.hbs',
- {
- transcript: normalizedTranscript,
- },
- );
-
- return makeAiCall({
- prompts,
- openAiParams,
- });
- };
|