Browse Source

Implement elevenlabs API endpoints

Make requests from elevenlabs API.
master
TheoryOfNekomata 1 year ago
parent
commit
f3fd114337
18 changed files with 258 additions and 70 deletions
  1. +3
    -3
      README.md
  2. BIN
      docs/assets/91986900_p7.jpg
  3. BIN
      docs/assets/mio-ai.png
  4. +16
    -0
      src/common.ts
  5. +15
    -3
      src/index.ts
  6. +28
    -0
      src/packages/request.ts
  7. +3
    -1
      src/platforms/elevenlabs/common.ts
  8. +72
    -0
      src/platforms/elevenlabs/events.ts
  9. +43
    -0
      src/platforms/elevenlabs/features/tts.ts
  10. +38
    -0
      src/platforms/elevenlabs/features/voice.ts
  11. +1
    -0
      src/platforms/elevenlabs/index.ts
  12. +3
    -20
      src/platforms/openai/common.ts
  13. +19
    -25
      src/platforms/openai/events.ts
  14. +3
    -4
      src/platforms/openai/features/chat-completion.ts
  15. +3
    -3
      src/platforms/openai/features/edit.ts
  16. +5
    -5
      src/platforms/openai/features/image.ts
  17. +3
    -2
      src/platforms/openai/features/model.ts
  18. +3
    -4
      src/platforms/openai/features/text-completion.ts

+ 3
- 3
README.md View File

@@ -1,6 +1,6 @@
# mio-ai

[![Mio](./docs/assets/91986900_p7.jpg)](https://www.pixiv.net/en/artworks/91986900)
[![Mio](./docs/assets/mio-ai.png)](https://www.pixiv.net/en/artworks/91986900)

Many-in-one AI client.

@@ -21,5 +21,5 @@ Many-in-one AI client.
- [ ] fine-tunes
- [ ] moderations
* ElevenLabs
- [ ] TTS (stream)
- [ ] get voices
- [X] TTS (stream)
- [X] get voices

BIN
docs/assets/91986900_p7.jpg View File

Before After
Width: 1450  |  Height: 2000  |  Size: 1.3 MiB

BIN
docs/assets/mio-ai.png View File

Before After
Width: 1450  |  Height: 2000  |  Size: 2.0 MiB

+ 16
- 0
src/common.ts View File

@@ -0,0 +1,16 @@
export type DataEventCallback<D> = (data: D) => void;

export type ErrorEventCallback = (event: Error) => void;

export interface PlatformEventEmitter extends NodeJS.EventEmitter {
on<D>(event: 'data', callback: DataEventCallback<D>): this;
on(event: 'end', callback: () => void): this;
on(event: 'error', callback: ErrorEventCallback): this;
}

export class PlatformApiError extends Error {
constructor(message: string, readonly response: Response) {
super(message);
this.name = 'PlatformApiError';
}
}

+ 15
- 3
src/index.ts View File

@@ -1,10 +1,22 @@
import * as OpenAiImpl from './platforms/openai';
import * as ElevenLabsImpl from './platforms/elevenlabs';

const SUPPORTED_PLATFORMS = { OpenAi: OpenAiImpl } as const;
const SUPPORTED_PLATFORMS = {
OpenAi: OpenAiImpl,
ElevenLabs: ElevenLabsImpl,
} as const;

export * as OpenAi from './platforms/openai';
export type PlatformConfig = OpenAiImpl.PlatformConfig;
export type PlatformEventEmitter = OpenAiImpl.PlatformEventEmitter;
export * as ElevenLabs from './platforms/elevenlabs';

export type PlatformConfig = (
OpenAiImpl.PlatformConfig
| ElevenLabsImpl.PlatformConfig
)
export type PlatformEventEmitter = (
OpenAiImpl.PlatformEventEmitter
| ElevenLabsImpl.PlatformEventEmitter
);

export const createAiClient = (configParams: PlatformConfig): PlatformEventEmitter => {
const {


+ 28
- 0
src/packages/request.ts View File

@@ -0,0 +1,28 @@
export type DoFetchBody = BodyInit | Record<string, unknown>

export type DoFetch = (
method: string,
path: string,
body?: DoFetchBody
) => Promise<Response>;

export type ConsumeStream = (
response: Response,
) => Promise<void>;

export const processRequest = (body: DoFetchBody, requestHeaders: Record<string, string>) => {
if (
body instanceof FormData
|| body instanceof URLSearchParams
) {
return { body };
}

return {
body: JSON.stringify(body),
headers: {
...requestHeaders,
'Content-Type': 'application/json',
},
};
};

+ 3
- 1
src/platforms/elevenlabs/common.ts View File

@@ -1,4 +1,4 @@
export const enum ApiVersion {
export enum ApiVersion {
V1 = 'v1',
}

@@ -7,3 +7,5 @@ export interface Configuration {
apiVersion: ApiVersion;
baseUrl?: string;
}

export const DEFAULT_BASE_URL = 'https://api.elevenlabs.io' as const;

+ 72
- 0
src/platforms/elevenlabs/events.ts View File

@@ -0,0 +1,72 @@
import { PassThrough } from 'stream';
import { EventEmitter } from 'events';
import fetchPonyfill from 'fetch-ponyfill';
import { DoFetchBody, processRequest } from '../../packages/request';
import * as AllPlatformsCommon from '../../common';
import { Configuration, DEFAULT_BASE_URL } from './common';
import { createTextToSpeech, CreateTextToSpeechParams } from './features/tts';
import { getVoices } from './features/voice';

export interface PlatformEventEmitter extends AllPlatformsCommon.PlatformEventEmitter {
getVoices(): void;
createTextToSpeech(params: CreateTextToSpeechParams): void;
}

export class PlatformEventEmitterImpl extends EventEmitter implements PlatformEventEmitter {
readonly getVoices: PlatformEventEmitter['getVoices'];

readonly createTextToSpeech: PlatformEventEmitter['createTextToSpeech'];

constructor(configParams: Configuration) {
super();
const platformHeaders: Record<string, string> = {
'XI-API-Key': `Bearer ${configParams.apiKey}`,
};

const { fetch: fetchInstance } = fetchPonyfill();
const doFetch = (method: string, path: string, body?: DoFetchBody) => {
let finalBody: BodyInit | undefined;
let finalHeaders = {
...platformHeaders,
};
if (body) {
const finalRequest = processRequest(body, finalHeaders);
finalBody = finalRequest.body;
if (finalRequest.headers) {
finalHeaders = finalRequest.headers;
}
}

const theFetchParams: Record<string, unknown> = {
method,
headers: finalHeaders,
};

if (finalBody) {
theFetchParams.body = finalBody;
}

const url = new URL(
`/${configParams.apiVersion}${path}`,
configParams.baseUrl ?? DEFAULT_BASE_URL,
).toString();

this.emit('start', {
...theFetchParams,
url,
});

return fetchInstance(url, theFetchParams);
};

const consumeStream = async (response: Response) => {
// eslint-disable-next-line no-restricted-syntax
for await (const chunk of response.body as unknown as PassThrough) {
this.emit('data', chunk);
}
};

this.getVoices = getVoices.bind(this, doFetch);
this.createTextToSpeech = createTextToSpeech.bind(this, doFetch, consumeStream);
}
}

+ 43
- 0
src/platforms/elevenlabs/features/tts.ts View File

@@ -0,0 +1,43 @@
import { ConsumeStream, DoFetch } from '../../../packages/request';
import { PlatformApiError } from '../../../common';

export interface CreateTextToSpeechParams {
voiceId: string;
text: string;
voiceSettings?: {
stability?: number;
similarityBoost?: number;
};
}

export function createTextToSpeech(
this: NodeJS.EventEmitter,
doFetch: DoFetch,
consumeStream: ConsumeStream,
params: CreateTextToSpeechParams,
) {
doFetch('POST', `/text-to-speech/${params.voiceId}/stream`, {
text: params.text,
voice_settings: params.voiceSettings,
} as Record<string, unknown>)
.then(async (response) => {
if (!response.ok) {
this.emit('error', new PlatformApiError(
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
`Create chat completion returned with status: ${response.status}`,
response,
));
this.emit('end');
return;
}

await consumeStream(response);
this.emit('end');
})
.catch((err) => {
this.emit('error', err as Error);
this.emit('end');
});

return this;
}

+ 38
- 0
src/platforms/elevenlabs/features/voice.ts View File

@@ -0,0 +1,38 @@
import { DoFetch } from '../../../packages/request';
import { PlatformApiError } from '../../../common';

// https://docs.elevenlabs.io/api-reference/voices

export interface Voice {
voice_id: string;
name: string;
category: string;
}

export function getVoices(
this: NodeJS.EventEmitter,
doFetch: DoFetch,
) {
doFetch('GET', '/voices')
.then(async (response) => {
if (!response.ok) {
this.emit('error', new PlatformApiError(
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
`Request from platform returned with status: ${response.status}`,
response,
));
this.emit('end');
return;
}

const responseData = await response.json() as Record<string, unknown>;
this.emit('data', responseData.voices as Voice[]);
this.emit('end');
})
.catch((err) => {
this.emit('error', err as Error);
this.emit('end');
});

return this;
}

+ 1
- 0
src/platforms/elevenlabs/index.ts View File

@@ -1,6 +1,7 @@
import { Configuration } from './common';

export * from './common';
export { PlatformEventEmitter, PlatformEventEmitterImpl } from './events';

export const PLATFORM_ID = 'elevenlabs' as const;



+ 3
- 20
src/platforms/openai/common.ts View File

@@ -19,26 +19,7 @@ export interface CreatedResource {
created: Timestamp;
}

export type DoFetchBody = BodyInit | Record<string, unknown>

export type DoFetch = (
method: string,
path: string,
body?: DoFetchBody
) => Promise<Response>;

export type ConsumeStream = (
response: Response,
) => Promise<void>;

export class PlatformError extends Error {
constructor(message: string, readonly response: Response) {
super(message);
this.name = 'OpenAi.PlatformError';
}
}

export const enum ApiVersion {
export enum ApiVersion {
V1 = 'v1',
}

@@ -48,3 +29,5 @@ export interface Configuration {
apiKey: string;
baseUrl?: string;
}

export const DEFAULT_BASE_URL = 'https://api.openai.com' as const;

+ 19
- 25
src/platforms/openai/events.ts View File

@@ -1,7 +1,8 @@
import { PassThrough } from 'stream';
import { EventEmitter } from 'events';
import fetchPonyfill from 'fetch-ponyfill';
import { Configuration, DoFetchBody } from './common';
import * as AllPlatformsCommon from '../../common';
import { Configuration, DEFAULT_BASE_URL } from './common';
import { createTextCompletion, CreateTextCompletionParams } from './features/text-completion';
import { CreateChatCompletionParams, createChatCompletion } from './features/chat-completion';
import {
@@ -14,12 +15,9 @@ import {
} from './features/image';
import { CreateEditParams, createEdit } from './features/edit';
import { listModels } from './features/model';
import { DoFetchBody, processRequest } from '../../packages/request';

export type DataEventCallback<D> = (data: D) => void;

export type ErrorEventCallback = (event: Error) => void;

export interface PlatformEventEmitter extends NodeJS.EventEmitter {
export interface PlatformEventEmitter extends AllPlatformsCommon.PlatformEventEmitter {
createChatCompletion(params: CreateChatCompletionParams): void;
createImage(params: CreateImageParams): void;
createImageEdit(params: CreateImageEditParams): void;
@@ -27,9 +25,6 @@ export interface PlatformEventEmitter extends NodeJS.EventEmitter {
createCompletion(params: CreateTextCompletionParams): void;
createEdit(params: CreateEditParams): void;
listModels(): void;
on<D>(event: 'data', callback: DataEventCallback<D>): this;
on(event: 'end', callback: () => void): this;
on(event: 'error', callback: ErrorEventCallback): this;
}

export class PlatformEventEmitterImpl extends EventEmitter implements PlatformEventEmitter {
@@ -59,31 +54,30 @@ export class PlatformEventEmitterImpl extends EventEmitter implements PlatformEv

const { fetch: fetchInstance } = fetchPonyfill();
const doFetch = (method: string, path: string, body?: DoFetchBody) => {
const requestHeaders = {
let finalBody: BodyInit | undefined;
let finalHeaders = {
...platformHeaders,
};

let theBody: BodyInit;

if (
body instanceof FormData
|| body instanceof URLSearchParams
) {
theBody = body;
} else {
theBody = JSON.stringify(body);
requestHeaders['Content-Type'] = 'application/json';
if (body) {
const finalRequest = processRequest(body, finalHeaders);
finalBody = finalRequest.body;
if (finalRequest.headers) {
finalHeaders = finalRequest.headers;
}
}

const theFetchParams = {
const theFetchParams: Record<string, unknown> = {
method,
headers: requestHeaders,
body: theBody,
headers: finalHeaders,
};

if (finalBody) {
theFetchParams.body = finalBody;
}

const url = new URL(
`/${configParams.apiVersion}${path}`,
configParams.baseUrl ?? 'https://api.openai.com',
configParams.baseUrl ?? DEFAULT_BASE_URL,
).toString();

this.emit('start', {


+ 3
- 4
src/platforms/openai/features/chat-completion.ts View File

@@ -1,9 +1,6 @@
import {
FinishableChoiceBase,
ConsumeStream,
DataEventId,
DoFetch,
PlatformError,
CreatedResource,
} from '../common';
import {
@@ -11,6 +8,8 @@ import {
} from '../usage';
import { ChatCompletionModel } from '../models';
import { normalizeChatMessage, Message, MessageObject } from '../chat';
import { ConsumeStream, DoFetch } from '../../../packages/request';
import { PlatformApiError } from '../../../common';

export interface CreateChatCompletionParams {
messages: Message | Message[];
@@ -75,7 +74,7 @@ export function createChatCompletion(
} as Record<string, unknown>)
.then(async (response) => {
if (!response.ok) {
this.emit('error', new PlatformError(
this.emit('error', new PlatformApiError(
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
`Create chat completion returned with status: ${response.status}`,
response,


+ 3
- 3
src/platforms/openai/features/edit.ts View File

@@ -1,13 +1,13 @@
import {
ChoiceBase,
DoFetch,
PlatformError,
CreatedResource,
} from '../common';
import {
UsageMetadata,
} from '../usage';
import { EditModel } from '../models';
import { DoFetch } from '../../../packages/request';
import { PlatformApiError } from '../../../common';

export enum DataEventObjectType {
EDIT = 'edit',
@@ -46,7 +46,7 @@ export function createEdit(
} as Record<string, unknown>)
.then(async (response) => {
if (!response.ok) {
this.emit('error', new PlatformError(
this.emit('error', new PlatformApiError(
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
`Request from platform returned with status: ${response.status}`,
response,


+ 5
- 5
src/platforms/openai/features/image.ts View File

@@ -1,9 +1,9 @@
import * as FormDataUtils from '../../../packages/form-data';
import {
DoFetch,
PlatformError,
CreatedResource,
} from '../common';
import { DoFetch } from '../../../packages/request';
import { PlatformApiError } from '../../../common';

export enum ImageSize {
SQUARE_256 = '256x256',
@@ -45,7 +45,7 @@ export function createImage(
} as Record<string, unknown>)
.then(async (response) => {
if (!response.ok) {
this.emit('error', new PlatformError(
this.emit('error', new PlatformApiError(
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
`Request from platform returned with status: ${response.status}`,
response,
@@ -94,7 +94,7 @@ export function createImageEdit(
}))
.then(async (response) => {
if (!response.ok) {
this.emit('error', new PlatformError(
this.emit('error', new PlatformApiError(
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
`Request from platform returned with status: ${response.status}`,
response,
@@ -139,7 +139,7 @@ export function createImageVariation(
}))
.then(async (response) => {
if (!response.ok) {
this.emit('error', new PlatformError(
this.emit('error', new PlatformApiError(
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
`Request from platform returned with status: ${response.status}`,
response,


+ 3
- 2
src/platforms/openai/features/model.ts View File

@@ -1,4 +1,5 @@
import { DoFetch, PlatformError } from '../common';
import { DoFetch } from '../../../packages/request';
import { PlatformApiError } from '../../../common';

export enum DataEventObjectType {
MODEL = 'model',
@@ -18,7 +19,7 @@ export function listModels(
doFetch('GET', '/models')
.then(async (response) => {
if (!response.ok) {
this.emit('error', new PlatformError(
this.emit('error', new PlatformApiError(
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
`Request from platform returned with status: ${response.status}`,
response,


+ 3
- 4
src/platforms/openai/features/text-completion.ts View File

@@ -1,15 +1,14 @@
import { TextCompletionModel } from '../models';
import {
ConsumeStream,
DataEventId,
DoFetch,
FinishableChoiceBase,
PlatformError,
CreatedResource,
} from '../common';
import {
UsageMetadata,
} from '../usage';
import { ConsumeStream, DoFetch } from '../../../packages/request';
import { PlatformApiError } from '../../../common';

export enum DataEventObjectType {
TEXT_COMPLETION = 'text_completion',
@@ -76,7 +75,7 @@ export function createTextCompletion(
} as Record<string, unknown>)
.then(async (response) => {
if (!response.ok) {
this.emit('error', new PlatformError(
this.emit('error', new PlatformApiError(
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
`Create text completion returned with status: ${response.status}`,
response,


Loading…
Cancel
Save