Browse Source

Improve UI

Add descriptions of arguments in example CLI.
master
TheoryOfNekomata 1 year ago
parent
commit
54fc085279
1 changed files with 30 additions and 6 deletions
  1. +30
    -6
      examples/cli/src/index.ts

+ 30
- 6
examples/cli/src/index.ts View File

@@ -42,11 +42,11 @@ const receiveData = (
});
memory.push(assistantMessage as OpenAi.MessageObject);
process.stdout.write('\n\n');
const { length: promptTokens } = OpenAi.getPromptTokens(
normalizedChatMessage,
model,
);
process.stdout.write(`info:\n${promptTokens} prompt tokens\n${completionTokens} completion tokens`);
// const { length: promptTokens } = OpenAi.getPromptTokens(
// normalizedChatMessage,
// model,
// );
// process.stdout.write(`info:\n${promptTokens} prompt tokens\n${completionTokens} completion tokens`);
process.stdout.write(`info:\n${completionTokens} completion tokens`);
if (argv.memory) {
process.stdout.write(`\n${memory.length} memory items`);
@@ -132,7 +132,31 @@ const main = (argv: Argv) => new Promise<number>(async (resolve) => {
resolve(resolveResult);
});

main(yargs(hideBin(process.argv)).argv as unknown as Argv)
main(
yargs(hideBin(process.argv))
.options({
t: {
alias: 'temperature',
type: 'number',
description: 'Temperature argument.',
},
p: {
alias: 'topP',
type: 'number',
description: '"top_p" argument.',
},
x: {
alias: 'maxTokens',
type: 'number',
description: 'Maximum tokens ChatGPT will use.',
},
m: {
alias: 'memory',
description: 'If ChatGPT will use memory. Supply a numeric value to get only the last X memory items (includes messages from all roles).',
},
})
.argv as unknown as Argv,
)
.then((result) => {
process.exit(result);
})


Loading…
Cancel
Save