|
|
@@ -1,90 +1,90 @@ |
|
|
|
import yargs from 'yargs'; |
|
|
|
import { hideBin } from 'yargs/helpers'; |
|
|
|
import inquirer from 'inquirer'; |
|
|
|
import { createOpenAiClient } from './app'; |
|
|
|
import { OpenAi } from '@modal-sh/mio-ai'; |
|
|
|
import { OpenAi, PlatformEventEmitter } from '@modal-sh/mio-ai'; |
|
|
|
|
|
|
|
export type Argv = Record<string, unknown>; |
|
|
|
|
|
|
|
const main = (argv: Argv) => new Promise<number>((resolve) => { |
|
|
|
const aiClient = createOpenAiClient({ |
|
|
|
apiKey: process.env.OPENAI_API_KEY as string, |
|
|
|
organizationId: process.env.OPENAI_ORGANIZATION_ID as string, |
|
|
|
apiVersion: OpenAi.ApiVersion.V1, |
|
|
|
}); |
|
|
|
let result: Partial<OpenAi.ChatCompletion> | undefined; |
|
|
|
let done = false; |
|
|
|
|
|
|
|
const receiveData = (aiClient: PlatformEventEmitter, theContent: string, argv: Record<string, unknown>) => new Promise<void>((r2, rj) => { |
|
|
|
let countTokens: number; |
|
|
|
aiClient.on<OpenAi.ChatCompletionChunkDataEvent>('data', (d) => { |
|
|
|
d.choices.forEach((c) => { |
|
|
|
if (!result) { |
|
|
|
result = { |
|
|
|
id: d.id, |
|
|
|
object: OpenAi.ChatCompletionDataEventObjectType.CHAT_COMPLETION, |
|
|
|
created: d.created, |
|
|
|
model: d.model, |
|
|
|
}; |
|
|
|
} |
|
|
|
|
|
|
|
if (!Array.isArray(result?.choices)) { |
|
|
|
result.choices = []; |
|
|
|
} |
|
|
|
|
|
|
|
if (!result.choices[c.index]) { |
|
|
|
result.choices[c.index] = { |
|
|
|
message: { content: '' }, |
|
|
|
index: c.index, |
|
|
|
finish_reason: c.finish_reason, |
|
|
|
}; |
|
|
|
} |
|
|
|
|
|
|
|
if (result.choices[c.index].message) { |
|
|
|
if (c.delta.role) { |
|
|
|
(result.choices[c.index].message as Record<string, unknown>).role = c.delta.role; |
|
|
|
} |
|
|
|
|
|
|
|
if (c.delta.content) { |
|
|
|
(result.choices[c.index].message as Record<string, unknown>) |
|
|
|
.content += c.delta.content; |
|
|
|
} |
|
|
|
} |
|
|
|
if (d.choices?.[0]?.delta?.role) { |
|
|
|
process.stdout.write(`${d.choices[0].delta.role}: `); |
|
|
|
} |
|
|
|
|
|
|
|
if (c.finish_reason) { |
|
|
|
result.choices[c.index].finish_reason = c.finish_reason; |
|
|
|
} |
|
|
|
}); |
|
|
|
if (d.choices?.[0]?.delta?.content) { |
|
|
|
countTokens += 1; |
|
|
|
process.stdout.write(d.choices[0].delta.content); |
|
|
|
} |
|
|
|
}); |
|
|
|
|
|
|
|
aiClient.on('end', () => { |
|
|
|
// resolve?.(result); |
|
|
|
process.stdout.write('\n\n'); |
|
|
|
process.stdout.write(`info: ${countTokens} completion tokens.\n\n`); |
|
|
|
// TODO count tokens |
|
|
|
r2(); |
|
|
|
}); |
|
|
|
|
|
|
|
aiClient.on('error', (error: Error) => { |
|
|
|
done = true; |
|
|
|
resolve(-1); |
|
|
|
process.stderr.write(error.message); |
|
|
|
process.stderr.write('\n'); |
|
|
|
rj(error); |
|
|
|
}); |
|
|
|
|
|
|
|
countTokens = 0; |
|
|
|
aiClient.createChatCompletion({ |
|
|
|
model: OpenAi.ChatCompletionModel.GPT_3_5_TURBO, |
|
|
|
messages: theContent, |
|
|
|
temperature: argv.temperature as number, |
|
|
|
maxTokens: argv.maxTokens as number, |
|
|
|
topP: argv.topP as number, |
|
|
|
}); |
|
|
|
}); |
|
|
|
|
|
|
|
const main = (argv: Argv) => new Promise<number>(async (resolve) => { |
|
|
|
let done = false; |
|
|
|
let resolveResult = 0; |
|
|
|
|
|
|
|
inquirer.prompt([ |
|
|
|
{ |
|
|
|
type: 'input', |
|
|
|
message: 'Type your inquiry.', |
|
|
|
}, |
|
|
|
]) |
|
|
|
.then((answers) => { |
|
|
|
console.log(answers); |
|
|
|
// aiClient.createChatCompletion({ |
|
|
|
// model: OpenAi.ChatCompletionModel.GPT_3_5_TURBO, |
|
|
|
// messages: { |
|
|
|
// role: OpenAi.MessageRole.USER, |
|
|
|
// content: answers[0] as string, |
|
|
|
// }, |
|
|
|
// }); |
|
|
|
}) |
|
|
|
.catch(() => { |
|
|
|
resolve(-1); |
|
|
|
process.stdout.write('Welcome to mio-ai CLI!\n\n'); |
|
|
|
|
|
|
|
process.stdout.write('This is a simple example of how to use mio-ai.\n'); |
|
|
|
process.stdout.write('You can send messages to the OpenAI API.\n'); |
|
|
|
process.stdout.write('You can also send empty messages to exit.\n'); |
|
|
|
process.stdout.write('Alternatively, you can press Ctrl+C anytime to exit.\n\n'); |
|
|
|
do { |
|
|
|
process.stdout.write('---\n\n'); |
|
|
|
const aiClient = createOpenAiClient({ |
|
|
|
apiKey: process.env.OPENAI_API_KEY as string, |
|
|
|
organizationId: process.env.OPENAI_ORGANIZATION_ID as string, |
|
|
|
apiVersion: OpenAi.ApiVersion.V1, |
|
|
|
}); |
|
|
|
const { createPromptModule } = await import('inquirer'); |
|
|
|
const prompt = createPromptModule(); |
|
|
|
let content: string; |
|
|
|
try { |
|
|
|
const answers = await prompt([ |
|
|
|
{ |
|
|
|
type: 'input', |
|
|
|
name: 'content', |
|
|
|
prefix: '', |
|
|
|
message: `${OpenAi.MessageRole.USER}:`, |
|
|
|
}, |
|
|
|
]); |
|
|
|
content = answers.content; |
|
|
|
if (content.trim().length < 1) { |
|
|
|
done = true; |
|
|
|
break; |
|
|
|
} |
|
|
|
process.stdout.write('\n'); |
|
|
|
await receiveData(aiClient, content, argv); |
|
|
|
} catch { |
|
|
|
resolveResult = -1; |
|
|
|
done = true; |
|
|
|
} |
|
|
|
} while (!done); |
|
|
|
|
|
|
|
resolve(0); |
|
|
|
process.stdout.write('Bye!\n'); |
|
|
|
resolve(resolveResult); |
|
|
|
}); |
|
|
|
|
|
|
|
main(yargs(hideBin(process.argv)).argv as unknown as Argv) |
|
|
|