Skip to content

Commit

Permalink
tweaks
Browse files Browse the repository at this point in the history
  • Loading branch information
cfortuner committed Sep 7, 2024
1 parent 0c40688 commit 88076b2
Show file tree
Hide file tree
Showing 3 changed files with 54 additions and 50 deletions.
11 changes: 4 additions & 7 deletions src/fetch-api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,12 @@ export interface KyOptions extends Omit<Options, 'credentials'> {
* Create an instance of Ky with options shared by all requests.
*/
export function createApiInstance(args: {
apiKey: string;
anthropicApiKey?: string;
apiKeyHeader: { [key: string]: string};
baseUrl?: string;
organizationId?: string;
kyOptions?: KyOptions;
}): KyInstance {
const { apiKey, anthropicApiKey, baseUrl, organizationId, kyOptions = {} } = args;
const { apiKeyHeader, baseUrl, organizationId, kyOptions = {} } = args;
const { headers, hooks = {}, prefixUrl, retry, timeout, ...rest } = kyOptions;

// Add a hook to handle OpenAI API errors
Expand Down Expand Up @@ -47,15 +46,13 @@ export function createApiInstance(args: {
}
});

const apiKeyHeader = apiKey ? { Authorization: `Bearer ${apiKey}` } : {};
const anthropicApiKeyHeader = anthropicApiKey ? { 'x-api-key': anthropicApiKey } : {};
const apiKeyHeaders = { ...apiKeyHeader, ...anthropicApiKeyHeader };


return ky.extend({
prefixUrl: baseUrl || prefixUrl || DEFAULT_BASE_URL,
headers: {
'User-Agent': 'openai-fetch',
...apiKeyHeaders,
...apiKeyHeader,
...(organizationId && {
'OpenAI-Organization': organizationId,
}),
Expand Down
71 changes: 43 additions & 28 deletions src/openai-client.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import { type Anthropic } from '../anthropic-types/index.js';

Check failure on line 1 in src/openai-client.ts

View workflow job for this annotation

GitHub Actions / Test Node.js 18

Run autofix to sort these imports!

Check failure on line 1 in src/openai-client.ts

View workflow job for this annotation

GitHub Actions / Test Node.js 20

Run autofix to sort these imports!

Check failure on line 1 in src/openai-client.ts

View workflow job for this annotation

GitHub Actions / Test Node.js 22

Run autofix to sort these imports!

Check failure on line 1 in src/openai-client.ts

View workflow job for this annotation

GitHub Actions / Test Node.js 18

Run autofix to sort these imports!

Check failure on line 1 in src/openai-client.ts

View workflow job for this annotation

GitHub Actions / Test Node.js 20

Run autofix to sort these imports!

Check failure on line 1 in src/openai-client.ts

View workflow job for this annotation

GitHub Actions / Test Node.js 21

Run autofix to sort these imports!

Check failure on line 1 in src/openai-client.ts

View workflow job for this annotation

GitHub Actions / Test Node.js 22

Run autofix to sort these imports!
import { type KyInstance } from 'ky';
import { type OpenAI } from '../openai-types/index.js';
import { createApiInstance, type KyOptions } from './fetch-api.js';
import { StreamCompletionChunker } from './streaming.js';
import {
type AnthropicModel,
type ChatParams,
type ChatResponse,
type ChatStreamParams,
Expand All @@ -15,12 +17,10 @@ import {
type EmbeddingResponse,
type ModerationParams,
type ModerationResponse,
type OpenAIModel
} from './types.js';


// Define a type that extracts the provider based on the baseUrl
type InferProvider<T> = T extends { baseUrl: `https://api.anthropic.com/v1` } ? 'anthropic' : 'openai';

type OpenAIConfigOpts = {
/**
* The HTTP endpoint for the OpenAI API. You probably don't want to change this.
Expand All @@ -42,6 +42,14 @@ export type ConfigOpts = (OpenAIConfigOpts | AnthropicConfigOpts) &
* @see https://platform.openai.com/account/api-keys
*/
apiKey?: string;
/**
* The API key used to authenticate with the Anthropic API.
*/
anthropic?: {
apiKey: string;
baseUrl: string
}

/**
* The organization ID that should be billed for API requests.
* This is only necessary if your API key is scoped to multiple organizations.
Expand All @@ -62,36 +70,42 @@ type RequestOpts = {
};

export class OpenAIClient<T extends ConfigOpts = ConfigOpts> {
private api: ReturnType<typeof createApiInstance>;
private opts: T;
private openAiApi: ReturnType<typeof createApiInstance>;
private anthropicApi?: ReturnType<typeof createApiInstance>;

constructor(opts: T) {
this.opts = opts;
const process = globalThis.process || { env: {} };
const apiKey = opts.apiKey || process.env.OPENAI_API_KEY;
const organizationId = opts.organizationId || process.env.OPENAI_ORG_ID;
if (!apiKey)
throw new Error(
'Missing OpenAI API key. Please provide one in the config or set the OPENAI_API_KEY environment variable.'
);


if (opts.anthropic) {
this.anthropicApi = createApiInstance({
apiKeyHeader: { 'x-api-key': opts.anthropic.apiKey},
baseUrl: opts.anthropic.baseUrl,
organizationId,
kyOptions: opts.kyOptions,
})
}

this.api = createApiInstance({
apiKey,
anthropicApiKey: this.getProvider(opts.baseUrl) === 'anthropic' ? apiKey : undefined,
this.openAiApi= createApiInstance({
apiKeyHeader: { Authorization: `Bearer ${apiKey}` },
baseUrl: opts.baseUrl,
organizationId,
kyOptions: opts.kyOptions,
});
}

getProvider(overrideBaseUrl?: string) {
const defaultBaseUrl = this.opts.baseUrl;
const baseUrl = overrideBaseUrl || defaultBaseUrl;
return baseUrl?.includes('anthropic') ? 'anthropic' : 'openai';
private getProvider(model: AnthropicModel | OpenAIModel) {
return model.includes('claude') && this.anthropicApi ? 'anthropic' : 'openai'
}

private getApi(opts?: RequestOpts) {
return opts ? this.api.extend(opts) : this.api;
private getApi(opts?: RequestOpts, providerApi: KyInstance = this.openAiApi) {
return opts ? providerApi.extend(opts) : providerApi
}

private convertToolDefinitionToAnthropicFormat(tool: OpenAI.ChatCompletionTool): Anthropic.Tool {
Expand All @@ -104,7 +118,7 @@ export class OpenAIClient<T extends ConfigOpts = ConfigOpts> {
}
};
}
private convertChatParamsToAnthropicFormat(params: ChatParams<'anthropic'>) {
private convertChatParamsToAnthropicFormat(params: ChatParams) {
// Anthropic doesn't allow a system prompt in messages.
const messages = params.messages.filter(msg => msg.role !== 'system');

Expand All @@ -113,6 +127,7 @@ export class OpenAIClient<T extends ConfigOpts = ConfigOpts> {

return {
...params,
max_tokens: params.max_tokens || 1000,
messages,
tools
};
Expand Down Expand Up @@ -190,13 +205,13 @@ export class OpenAIClient<T extends ConfigOpts = ConfigOpts> {


/** Create a completion for a chat message. */
async createChatCompletion<R extends RequestOpts>(
params: ChatParams<InferProvider<R['headers'] & T>> ,
opts?: R
async createChatCompletion(
params: ChatParams,
opts?: RequestOpts
): Promise<ChatResponse> {
if (this.getProvider(opts?.headers?.baseUrl) === 'anthropic') {
const anthropicParams = this.convertChatParamsToAnthropicFormat(params as ChatParams<'anthropic'>);
const anthropicResponse = await this.getApi(opts).post('messages', { json: anthropicParams }).json();
if (this.getProvider(params.model) === 'anthropic' && this.anthropicApi) {
const anthropicParams = this.convertChatParamsToAnthropicFormat(params);
const anthropicResponse = await this.getApi(opts, this.anthropicApi).post('messages', { json: anthropicParams }).json();
return this.convertAnthropicResponseToOpenAIFormat(anthropicResponse);
}

Expand All @@ -207,13 +222,13 @@ export class OpenAIClient<T extends ConfigOpts = ConfigOpts> {
}

/** Create a chat completion and stream back partial progress. */
async streamChatCompletion<R extends RequestOpts>(
params: ChatStreamParams<InferProvider<R['headers'] & T>>,
opts?: R
async streamChatCompletion(
params: ChatStreamParams,
opts?: RequestOpts
): Promise<ChatStreamResponse> {
if (this.getProvider(opts?.headers?.baseUrl) === 'anthropic') {
const anthropicParams = this.convertChatParamsToAnthropicFormat(params as ChatParams<'anthropic'>);
const response = await this.getApi(opts).post('messages', {
if (this.getProvider(params.model) === 'anthropic') {
const anthropicParams = this.convertChatParamsToAnthropicFormat(params);
const response = await this.getApi(opts, this.anthropicApi).post('messages', {
json: { ...anthropicParams, stream: true },
onDownloadProgress: () => {}, // trick ky to return ReadableStream.
});
Expand Down
22 changes: 7 additions & 15 deletions src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,24 +68,16 @@ export type ChatMessage = {
export type { AnthropicModelAndUnknown as AnthropicModel }
export type { ChatModel as OpenAIModel}

export type ChatParams<T extends 'openai' | 'anthropic' = 'openai'> =
T extends 'anthropic'
// Use the openai param shape, with a few exceptions
? Omit<OpenAI.ChatCompletionCreateParams, 'stream' | 'messages' | 'model'> & {
messages: ChatMessage[];
// Set the anthropic model of choice
model: Exclude<AnthropicModelAndUnknown, ChatModel>;
// anthropic requires max_tokens to be set explicitly
max_tokens: number;
}
: Omit<OpenAI.ChatCompletionCreateParams, 'stream' | 'messages' | 'model'> & {
messages: ChatMessage[];
model: Exclude<ChatModel | (string & {}), Exclude<AnthropicModelAndUnknown, (string & {})>>;
};
type AnthropicModel = | "claude-3-5-sonnet-20240620" | "claude-3-opus-20240229" | "claude-3-sonnet-20240229" | "claude-3-haiku-20240307" | "claude-2.1" | "claude-2.0" | "claude-instant-1.2"

export type ChatParams = Omit<OpenAI.ChatCompletionCreateParams, 'stream' | 'messages' | 'model'> & {
messages: ChatMessage[];
model: ChatModel | AnthropicModel | ({} & string)
};

export type ChatResponse = OpenAI.ChatCompletion;

export type ChatStreamParams<T extends 'openai' | 'anthropic' = 'openai'> = ChatParams<T>;
export type ChatStreamParams = ChatParams
export type ChatStreamChunk = OpenAI.ChatCompletionChunk;
export type ChatStreamResponse = ReadableStream<ChatStreamChunk>;

Expand Down

0 comments on commit 88076b2

Please sign in to comment.