Skip to content

Commit

Permalink
feat: add openai services
Browse files Browse the repository at this point in the history
  • Loading branch information
kirklin committed Feb 6, 2024
1 parent cbb7240 commit 5e0c4c7
Show file tree
Hide file tree
Showing 23 changed files with 582 additions and 92 deletions.
2 changes: 1 addition & 1 deletion apps/admin/.env.development
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ VITE_USE_PWA=true
VITE_PUBLIC_PATH=/

# Proxy settings for your development server (array of [string, string] pairs)
VITE_PROXY=[["/api","http://localhost:8899"]]
VITE_PROXY=[["/api","http://localhost:8899"],["/ai-openai","http://localhost:9000"]]

# Basic interface address SPA
VITE_GLOB_API_URL=/api
Expand Down
2 changes: 1 addition & 1 deletion apps/admin/.env.production
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ VITE_USE_PWA=true
VITE_PUBLIC_PATH=/

# Proxy settings for your development server (array of [string, string] pairs)
VITE_PROXY=[["/api","https://celeris-web-api.vercel.app/"]]
VITE_PROXY=[["/api","https://celeris-web-api.vercel.app/"],["/ai-openai","https://celeris-web-api-ai-openai.vercel.app/"]]

# Basic interface address SPA
VITE_GLOB_API_URL=/api
Expand Down
2 changes: 1 addition & 1 deletion apps/admin/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
"preview:dist": "vite preview"
},
"dependencies": {
"@celeris/ai-core": "workspace:*",
"@celeris/ai-services-openai": "workspace:*",
"@celeris/constants": "workspace:*",
"@celeris/directives": "workspace:*",
"@celeris/locale": "workspace:*",
Expand Down
12 changes: 12 additions & 0 deletions apps/admin/vercel.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,18 @@
"source": "/api/:match*",
"destination": "https://celeris-web-api.vercel.app/:match*"
},
{
"source": "/ai-openai",
"destination": "https://celeris-web-api-ai-openai.vercel.app"
},
{
"source": "/ai-openai/",
"destination": "https://celeris-web-api-ai-openai.vercel.app/"
},
{
"source": "/ai-openai/:match*",
"destination": "https://celeris-web-api-ai-openai.vercel.app/:match*"
},
{
"source": "/:path*",
"destination": "/index.html"
Expand Down
3 changes: 3 additions & 0 deletions eslint.config.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import kirklin from "@kirklin/eslint-config";

export default kirklin({
rules: {
"new-cap": "warn",
},
formatters: {
/**
* 格式化CSS、LESS、SCSS文件,以及Vue中的`<style>`块
Expand Down
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,10 @@
},
"scripts": {
"bootstrap": "pnpm install",
"dev": "run-p dev:admin dev:mock",
"dev": "run-p dev:admin dev:mock dev:ai-services-openai",
"dev:admin": "pnpm --filter @celeris/admin dev",
"dev:mock": "pnpm --filter @celeris/admin-api dev",
"dev:ai-services-openai": "pnpm --filter @celeris/ai-services-openai dev",
"build": "pnpm --filter @celeris/admin build",
"generate-tree": "pnpm --filter scripts generate-tree",
"clean": "rimraf node_modules **/*/node_modules **/**/*/node_modules **/*/dist **/**/*/dist",
Expand Down
8 changes: 8 additions & 0 deletions packages/ai/core/src/config/modelProviders/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import type { ChatModelCard } from "../../types/model/openai/ChatGPTModel";
import OpenAIProvider from "./openai";

export const CELERIS_DEFAULT_MODEL_LIST: ChatModelCard[] = [
OpenAIProvider.chatModels,
].flat();

export { default as OpenAIProvider } from "./openai";
113 changes: 113 additions & 0 deletions packages/ai/core/src/config/modelProviders/openai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
// refs to: https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo
import type { ModelProviderCard } from "../../types/model/openai/ChatGPTModel";

const OpenAI: ModelProviderCard = {
chatModels: [
{
description: "GPT 3.5 Turbo,适用于各种文本生成和理解任务",
displayName: "GPT-3.5 Turbo",
functionCall: true,
id: "gpt-3.5-turbo",
tokens: 4096,
},
{
displayName: "GPT-3.5 Turbo (0125)",
functionCall: true,
id: "gpt-3.5-turbo-0125",
tokens: 16_385,
},
{
displayName: "GPT-3.5 Turbo (1106)",
functionCall: true,
hidden: true,
id: "gpt-3.5-turbo-1106",
tokens: 16_385,
},
{
hidden: true,
id: "gpt-3.5-turbo-instruct",
tokens: 4096,
},
{
displayName: "GPT-3.5 Turbo 16K",
hidden: true,
id: "gpt-3.5-turbo-16k",
tokens: 16_385,
},
{
hidden: true,
id: "gpt-3.5-turbo-0613",
legacy: true,
tokens: 4096,
},
{
hidden: true,
id: "gpt-3.5-turbo-16k-0613",
legacy: true,
tokens: 4096,
},
{
displayName: "GPT-4 Turbo Preview",
functionCall: true,
id: "gpt-4-turbo-preview",
tokens: 128_000,
},
{
displayName: "GPT-4 Turbo Preview (0125)",
functionCall: true,
id: "gpt-4-0125-preview",
tokens: 128_000,
},
{
description: "GPT-4 视觉预览版,支持视觉任务",
displayName: "GPT-4 Turbo Vision (Preview)",
id: "gpt-4-vision-preview",
tokens: 128_000,
vision: true,
},
{
displayName: "GPT-4 Turbo Preview (1106)",
functionCall: true,
hidden: true,
id: "gpt-4-1106-preview",
tokens: 128_000,
},
{
displayName: "GPT-4",
functionCall: true,
hidden: true,
id: "gpt-4",
tokens: 8192,
},
{
functionCall: true,
hidden: true,
id: "gpt-4-0613",
tokens: 8192,
},
{
functionCall: true,
hidden: true,
id: "gpt-4-32k",
tokens: 32_768,
},
{
functionCall: true,
hidden: true,
id: "gpt-4-32k-0613",
tokens: 32_768,
},
{
files: true,
functionCall: true,
hidden: true,
id: "gpt-4-all",
tokens: 32_768,
vision: true,
},
],
enabled: true,
id: "openai",
};

export default OpenAI;
43 changes: 43 additions & 0 deletions packages/ai/core/src/types/model/openai/ChatGPTModel.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ export enum ChatGPTModelEnum {
GPT3_5 = "gpt-3.5-turbo",
GPT3_5_1106 = "gpt-3.5-turbo-1106",
GPT3_5_16K = "gpt-3.5-turbo-16k",
GPT3_5_0125 = "gpt-3.5-turbo-0125",
/**
* GPT 4
*/
Expand Down Expand Up @@ -44,6 +45,48 @@ export interface ChatGPTParams {
top_p?: number;
}

/**
* Interface representing a chat model card.
*/
export interface ChatModelCard {
/** Description of the chat model. */
description?: string;
/** Display name of the chat model. */
displayName?: string;
/**
* Indicates whether the model supports file upload.
*/
files?: boolean;
/**
* Indicates whether the model supports function call.
*/
functionCall?: boolean;
/** Indicates whether the chat model is hidden. */
hidden?: boolean;
/** Unique identifier for the chat model. */
id: string;
/**
* Indicates whether the chat model is custom.
*/
isCustom?: boolean;
/**
* Indicates whether the chat model is legacy (deprecated but not removed yet).
*/
legacy?: boolean;
/** Number of tokens supported by the chat model. */
tokens?: number;
/**
* Indicates whether the model supports vision.
*/
vision?: boolean;
}

export interface ModelProviderCard {
chatModels: ChatModelCard[];
enabled?: boolean;
id: string;
}

export type ChatGPTRoleType = "user" | "system" | "assistant" | "function";

export interface ChatGPTMessage {
Expand Down
1 change: 1 addition & 0 deletions packages/ai/services/openai/.env
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
OPENAI_API_KEY=sk-I
1 change: 1 addition & 0 deletions packages/ai/services/openai/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
OPENAI_API_KEY=xxxxxxx
104 changes: 104 additions & 0 deletions packages/ai/services/openai/api/chat-with-functions.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
import {
OpenAIStream,
StreamingTextResponse,
experimental_StreamData,
} from "ai";
import OpenAI from "openai";
import type { ChatCompletionCreateParams } from "openai/resources/chat";
import { getServerConfig } from "../config/server";

const functions: ChatCompletionCreateParams.Function[] = [
{
name: "get_current_weather",
description: "Get the current weather.",
parameters: {
type: "object",
properties: {
format: {
type: "string",
enum: ["celsius", "fahrenheit"],
description: "The temperature unit to use.",
},
},
required: ["format"],
},
},
{
name: "eval_code_in_browser",
description: "Execute javascript code in the browser with eval().",
parameters: {
type: "object",
properties: {
code: {
type: "string",
description: `Javascript code that will be directly executed via eval(). Do not use backticks in your response.
DO NOT include any newlines in your response, and be sure to provide only valid JSON when providing the arguments object.
The output of the eval() will be returned directly by the function.`,
},
},
required: ["code"],
},
},
];

export default defineLazyEventHandler(async () => {
const { OPENAI_API_KEY } = getServerConfig();
if (!OPENAI_API_KEY) {
throw new Error("Missing OpenAI API key");
}
const openai = new OpenAI({
apiKey: OPENAI_API_KEY,
});

return defineEventHandler(async (event: any) => {
const { messages } = await readBody(event);

const response = await openai.chat.completions.create({
model: "gpt-3.5-turbo-0613",
stream: true,
messages,
functions,
});

// eslint-disable-next-line new-cap
const data = new experimental_StreamData();
const stream = OpenAIStream(response, {
experimental_onFunctionCall: async (
{ name, arguments: args },
createFunctionCallMessages,
) => {
if (name === "get_current_weather") {
// Call a weather API here
const weatherData = {
temperature: 20,
unit: args.format === "celsius" ? "C" : "F",
};

data.append({
text: "Some custom data",
});

const newMessages = createFunctionCallMessages(weatherData);
return openai.chat.completions.create({
messages: [...messages, ...newMessages],
stream: true,
model: "gpt-3.5-turbo-0613",
});
}
},
onCompletion(completion) {
console.log("completion", completion);

Check failure on line 90 in packages/ai/services/openai/api/chat-with-functions.ts

View workflow job for this annotation

GitHub Actions / LINT

Unexpected console statement
},
onFinal(_completion) {
data.close();
},
experimental_streamData: true,
});

data.append({
text: "Hello, how are you?",
});

return new StreamingTextResponse(stream, {}, data);
});
});
Loading

0 comments on commit 5e0c4c7

Please sign in to comment.