Browse Source

Add example project

Include example project for consuming the client.
master
TheoryOfNekomata 1 year ago
parent
commit
30a75c7960
22 changed files with 4048 additions and 14 deletions
  1. +9
    -0
      examples/cli/.eslintrc
  2. +108
    -0
      examples/cli/.gitignore
  3. +57
    -0
      examples/cli/package.json
  4. +3
    -0
      examples/cli/pridepack.json
  5. +14
    -0
      examples/cli/src/app.ts
  6. +53
    -0
      examples/cli/src/events.ts
  7. +32
    -0
      examples/cli/src/index.ts
  8. +1
    -0
      examples/cli/src/packages/@modal-sh/mio-ai
  9. +8
    -0
      examples/cli/test/index.test.ts
  10. +24
    -0
      examples/cli/tsconfig.eslint.json
  11. +24
    -0
      examples/cli/tsconfig.json
  12. +3646
    -0
      examples/cli/yarn.lock
  13. +1
    -0
      package.json
  14. +0
    -10
      src/platforms/openai/common.ts
  15. +1
    -0
      src/platforms/openai/events.ts
  16. +3
    -1
      src/platforms/openai/features/chat-completion.ts
  17. +3
    -1
      src/platforms/openai/features/edit.ts
  18. +3
    -1
      src/platforms/openai/features/text-completion.ts
  19. +2
    -0
      src/platforms/openai/index.ts
  20. +29
    -0
      src/platforms/openai/usage.ts
  21. +22
    -1
      test/platforms/openai/api.test.ts
  22. +5
    -0
      yarn.lock

+ 9
- 0
examples/cli/.eslintrc View File

@@ -0,0 +1,9 @@
{
"root": true,
"extends": [
"lxsmnsyc/typescript"
],
"parserOptions": {
"project": "./tsconfig.eslint.json"
}
}

+ 108
- 0
examples/cli/.gitignore View File

@@ -0,0 +1,108 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*

# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json

# Runtime data
pids
*.pid
*.seed
*.pid.lock

# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov

# Coverage directory used by tools like istanbul
coverage
*.lcov

# nyc test coverage
.nyc_output

# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt

# Bower dependency directory (https://bower.io/)
bower_components

# node-waf configuration
.lock-wscript

# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release

# Dependency directories
node_modules/
jspm_packages/

# TypeScript v1 declaration files
typings/

# TypeScript cache
*.tsbuildinfo

# Optional npm cache directory
.npm

# Optional eslint cache
.eslintcache

# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/

# Optional REPL history
.node_repl_history

# Output of 'npm pack'
*.tgz

# Yarn Integrity file
.yarn-integrity

# dotenv environment variables file
.env
.env.production
.env.development

# parcel-bundler cache (https://parceljs.org/)
.cache

# Next.js build output
.next

# Nuxt.js build / generate output
.nuxt
dist

# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and *not* Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public

# vuepress build output
.vuepress/dist

# Serverless directories
.serverless/

# FuseBox cache
.fusebox/

# DynamoDB Local files
.dynamodb/

# TernJS port file
.tern-port

.npmrc
types/

+ 57
- 0
examples/cli/package.json View File

@@ -0,0 +1,57 @@
{
"name": "cli",
"version": "0.0.0",
"files": [
"dist",
"src"
],
"engines": {
"node": ">=12"
},
"keywords": [
"pridepack"
],
"devDependencies": {
"@types/node": "^18.14.1",
"@types/yargs": "^17.0.24",
"eslint": "^8.35.0",
"eslint-config-lxsmnsyc": "^0.5.0",
"fetch-ponyfill": "^7.1.0",
"handlebars": "^4.7.7",
"pridepack": "2.4.4",
"tslib": "^2.5.0",
"typescript": "^4.9.5",
"vitest": "^0.28.1"
},
"scripts": {
"prepublishOnly": "pridepack clean && pridepack build",
"build": "pridepack build",
"type-check": "pridepack check",
"lint": "pridepack lint",
"clean": "pridepack clean",
"watch": "pridepack watch",
"start": "pridepack start",
"dev": "pridepack dev",
"test": "vitest"
},
"private": true,
"description": "Example CLI app for mio-ai",
"repository": {
"url": "",
"type": "git"
},
"homepage": "",
"bugs": {
"url": ""
},
"author": "TheoryOfNekomata <allan.crisostomo@outlook.com>",
"publishConfig": {
"access": "restricted"
},
"alias": {
"@modal-sh/mio-ai": "../../src"
},
"dependencies": {
"yargs": "^17.7.1"
}
}

+ 3
- 0
examples/cli/pridepack.json View File

@@ -0,0 +1,3 @@
{
"target": "es2018"
}

+ 14
- 0
examples/cli/src/app.ts View File

@@ -0,0 +1,14 @@
import { createAiClient as createOpenAiClient, OpenAi } from '@modal-sh/mio-ai';

export const createAiClient = () => {
const aiClient = createOpenAiClient({
platform: OpenAi.PLATFORM_ID,
platformConfiguration: {
apiKey: process.env.OPENAI_API_KEY as string,
organizationId: process.env.OPENAI_ORGANIZATION_ID as string,
apiVersion: OpenAi.ApiVersion.V1,
},
});

return aiClient;
};

+ 53
- 0
examples/cli/src/events.ts View File

@@ -0,0 +1,53 @@
import { PlatformEventEmitter, OpenAi } from '@modal-sh/mio-ai';

export const addEvents = (aiClient: PlatformEventEmitter, resolve?: Function, reject?: Function) => {
let result: Partial<OpenAi.ChatCompletion> | undefined;

aiClient.on<OpenAi.ChatCompletionChunkDataEvent>('data', (d) => {
d.choices.forEach((c) => {
if (!result) {
result = {
id: d.id,
object: OpenAi.ChatCompletionDataEventObjectType.CHAT_COMPLETION,
created: d.created,
model: d.model,
};
}

if (!Array.isArray(result?.choices)) {
result.choices = [];
}

if (!result.choices[c.index]) {
result.choices[c.index] = {
message: { content: '' },
index: c.index,
finish_reason: c.finish_reason,
};
}

if (result.choices[c.index].message) {
if (c.delta.role) {
(result.choices[c.index].message as Record<string, unknown>).role = c.delta.role;
}

if (c.delta.content) {
(result.choices[c.index].message as Record<string, unknown>)
.content += c.delta.content;
}
}

if (c.finish_reason) {
result.choices[c.index].finish_reason = c.finish_reason;
}
});
});

aiClient.on('end', () => {
resolve?.(result);
});

aiClient.on('error', (error: Error) => {
reject?.(error);
});
};

+ 32
- 0
examples/cli/src/index.ts View File

@@ -0,0 +1,32 @@
import yargs from 'yargs';
import { hideBin } from 'yargs/helpers';
import { createAiClient } from './app';
import { addEvents } from './events';

export type Argv = Record<string, unknown>;

const main = (argv: Argv) => new Promise<number>((resolve) => {
const aiClient = createAiClient();
const onResolve = (result: unknown) => {
console.log(result);
resolve(0);
};

const onReject = (error: Error) => {
console.error(error);
resolve(-1);
};

addEvents(aiClient, onResolve, onReject);
console.log(argv);
resolve(0);
});

main(yargs(hideBin(process.argv)).argv as unknown as Argv)
.then((result) => {
process.exit(result);
})
.catch((error) => {
console.error(error);
process.exit(-1);
});

+ 1
- 0
examples/cli/src/packages/@modal-sh/mio-ai View File

@@ -0,0 +1 @@
../../../

+ 8
- 0
examples/cli/test/index.test.ts View File

@@ -0,0 +1,8 @@
import { describe, it, expect } from 'vitest';
import add from '../src';

describe('blah', () => {
it('works', () => {
expect(add(1, 1)).toEqual(2);
});
});

+ 24
- 0
examples/cli/tsconfig.eslint.json View File

@@ -0,0 +1,24 @@
{
"exclude": ["node_modules"],
"include": ["src", "types", "test"],
"compilerOptions": {
"module": "ESNext",
"lib": ["ESNext", "DOM"],
"importHelpers": true,
"declaration": true,
"sourceMap": true,
"strict": true,
"rootDir": ".",
"noUnusedLocals": true,
"noUnusedParameters": true,
"noImplicitReturns": true,
"noFallthroughCasesInSwitch": true,
"moduleResolution": "node",
"jsx": "react",
"esModuleInterop": true,
"target": "es2018",
"paths": {
"@modal-sh/mio-ai": ["./src/packages/@modal-sh/mio-ai"]
}
}
}

+ 24
- 0
examples/cli/tsconfig.json View File

@@ -0,0 +1,24 @@
{
"exclude": ["node_modules"],
"include": ["src", "types"],
"compilerOptions": {
"module": "ESNext",
"lib": ["ESNext", "DOM"],
"importHelpers": true,
"declaration": true,
"sourceMap": true,
"strict": true,
"rootDir": "./src",
"noUnusedLocals": true,
"noUnusedParameters": true,
"noImplicitReturns": true,
"noFallthroughCasesInSwitch": true,
"moduleResolution": "node",
"jsx": "react",
"esModuleInterop": true,
"target": "es2018",
"paths": {
"@modal-sh/mio-ai": ["./src/packages/@modal-sh/mio-ai"]
}
}
}

+ 3646
- 0
examples/cli/yarn.lock
File diff suppressed because it is too large
View File


+ 1
- 0
package.json View File

@@ -48,6 +48,7 @@
"access": "public"
},
"dependencies": {
"@dqbd/tiktoken": "^1.0.6",
"fetch-ponyfill": "^7.1.0",
"handlebars": "^4.7.7"
}


+ 0
- 10
src/platforms/openai/common.ts View File

@@ -15,16 +15,6 @@ export type DataEventId = string;

export type Timestamp = number;

export interface Usage {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
}

export interface UsageMetadata {
usage: Usage;
}

export interface PlatformResponse {
created: Timestamp;
}


+ 1
- 0
src/platforms/openai/events.ts View File

@@ -69,6 +69,7 @@ export class PlatformEventEmitterImpl extends EventEmitter implements PlatformEv
for await (const chunk of response.body as unknown as PassThrough) {
const chunkStringMaybeMultiple = chunk.toString();
const chunkStrings = chunkStringMaybeMultiple
.trim()
.split('\n')
.filter((chunkString: string) => chunkString.length > 0);
chunkStrings.forEach((chunkString: string) => {


+ 3
- 1
src/platforms/openai/features/chat-completion.ts View File

@@ -5,8 +5,10 @@ import {
DoFetch,
PlatformError,
PlatformResponse,
UsageMetadata,
} from '../common';
import {
UsageMetadata,
} from '../usage';
import { ChatCompletionModel } from '../models';
import { normalizeChatMessage, Message, MessageObject } from '../chat';



+ 3
- 1
src/platforms/openai/features/edit.ts View File

@@ -3,8 +3,10 @@ import {
DoFetch,
PlatformError,
PlatformResponse,
UsageMetadata,
} from '../common';
import {
UsageMetadata,
} from '../usage';
import { EditModel } from '../models';

export enum DataEventObjectType {


+ 3
- 1
src/platforms/openai/features/text-completion.ts View File

@@ -6,8 +6,10 @@ import {
FinishableChoiceBase,
PlatformError,
PlatformResponse,
UsageMetadata,
} from '../common';
import {
UsageMetadata,
} from '../usage';

export enum DataEventObjectType {
TEXT_COMPLETION = 'text_completion',


+ 2
- 0
src/platforms/openai/index.ts View File

@@ -7,6 +7,8 @@ export { PlatformEventEmitter, PlatformEventEmitterImpl } from './events';
export {
ChatCompletion,
ChatCompletionChunkDataEvent,
ChatCompletionChunkChoice,
ChatCompletionChoice,
DataEventObjectType as ChatCompletionDataEventObjectType,
} from './features/chat-completion';
export {


+ 29
- 0
src/platforms/openai/usage.ts View File

@@ -0,0 +1,29 @@
import {
encoding_for_model as encodingForModel,
TiktokenModel,
} from '@dqbd/tiktoken';
import { Message } from './chat';

export const getPromptTokens = (message: Message | Message[], model: TiktokenModel) => {
// TODO proper calculation of tokens
// refer to https://tiktokenizer.vercel.app/ for counting tokens on multiple messages
const enc = encodingForModel(model);
const messageArray = Array.isArray(message) ? message : [message];
return messageArray.map((m) => {
if (typeof m === 'string') {
return enc.encode(m);
}

return enc.encode(m.content);
});
};

export interface Usage {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
}

export interface UsageMetadata {
usage: Usage;
}

+ 22
- 1
test/platforms/openai/api.test.ts View File

@@ -11,6 +11,8 @@ import {
PlatformEventEmitter,
OpenAi,
} from '../../../src';
import { getPromptTokens } from '../../../src/platforms/openai/usage';
import { ChatCompletionModel } from '../../../src/platforms/openai';

describe('OpenAI', () => {
beforeAll(() => {
@@ -34,17 +36,30 @@ describe('OpenAI', () => {
describe('createChatCompletion', () => {
let result: Partial<OpenAi.ChatCompletion> | undefined;

let prompt: string;

beforeEach(() => {
result = undefined;

aiClient.on<OpenAi.ChatCompletionChunkDataEvent>('data', (d) => {
d.choices.forEach((c) => {
if (!result) {
const promptTokens = getPromptTokens(
prompt,
ChatCompletionModel.GPT_3_5_TURBO,
)
.length;

result = {
id: d.id,
object: OpenAi.ChatCompletionDataEventObjectType.CHAT_COMPLETION,
created: d.created,
model: d.model,
usage: {
prompt_tokens: promptTokens,
completion_tokens: 0,
total_tokens: promptTokens,
},
};
}

@@ -68,6 +83,11 @@ describe('OpenAI', () => {
if (c.delta.content) {
(result.choices[c.index].message as Record<string, unknown>)
.content += c.delta.content;

result.usage!.completion_tokens += 1;
result.usage!.total_tokens = (
result.usage!.prompt_tokens + result.usage!.completion_tokens
);
}
}

@@ -92,8 +112,9 @@ describe('OpenAI', () => {
reject(error);
});

prompt = 'Count from 1 to 20 in increments of a random number from 1 to 10.';
aiClient.createChatCompletion({
messages: 'Count from 1 to 20 in increments of a random number from 1 to 10.',
messages: prompt,
model: OpenAi.ChatCompletionModel.GPT_3_5_TURBO,
n: 2,
});


+ 5
- 0
yarn.lock View File

@@ -233,6 +233,11 @@
"@babel/helper-validator-identifier" "^7.19.1"
to-fast-properties "^2.0.0"
"@dqbd/tiktoken@^1.0.6":
version "1.0.6"
resolved "https://registry.yarnpkg.com/@dqbd/tiktoken/-/tiktoken-1.0.6.tgz#96bfd0a4909726c61551a8c783493f01841bd163"
integrity sha512-umSdeZTy/SbPPKVuZKV/XKyFPmXSN145CcM3iHjBbmhlohBJg7vaDp4cPCW+xNlWL6L2U1sp7T2BD+di2sUKdA==
"@esbuild/android-arm64@0.17.16":
version "0.17.16"
resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.17.16.tgz#7b18cab5f4d93e878306196eed26b6d960c12576"


Loading…
Cancel
Save