diff --git a/examples/basic/package.json b/examples/basic/package.json index d100fb2..aa47ca3 100644 --- a/examples/basic/package.json +++ b/examples/basic/package.json @@ -5,7 +5,9 @@ "private": true, "scripts": { "start": "run-s start:*", - "start:agnostic": "tsx --env-file=.env ./src/model-agnostic.ts", + "start:natural-language": "tsx --env-file=.env ./src/natural-language.ts", + "start:structured-data": "tsx --env-file=.env ./src/structured-data.ts", + "start:with-optional-tools": "tsx --env-file=.env ./src/with-optional-tools.ts", "start:vertex": "tsx --env-file=.env ./src/vertex.ts", "start:openai": "tsx --env-file=.env ./src/openai.ts", "type-check": "tsc --noEmit" diff --git a/examples/basic/src/model-agnostic.ts b/examples/basic/src/model-agnostic.ts deleted file mode 100644 index 22df6b6..0000000 --- a/examples/basic/src/model-agnostic.ts +++ /dev/null @@ -1,138 +0,0 @@ -import { Kurt, type KurtMessage, type KurtAdapter } from "@formula-monks/kurt" -import { KurtOpenAI } from "@formula-monks/kurt-open-ai" -import { KurtVertexAI } from "@formula-monks/kurt-vertex-ai" -import { VertexAI } from "@google-cloud/vertexai" -import OpenAI from "openai" -import { z } from "zod" - -// --- Create Kurt in a model-agnostic way - -const createKurt = (model: string): Kurt => { - const adapter = findAdapter(model) - if (!adapter) throw new Error(`Model ${model} is not supported.`) - return new Kurt(adapter) -} - -const findAdapter = (model: string): KurtAdapter | null => { - if (KurtOpenAI.isSupportedModel(model)) - return new KurtOpenAI({ openAI: new OpenAI(), model }) - - if (KurtVertexAI.isSupportedModel(model)) - return new KurtVertexAI({ - vertexAI: new VertexAI({ - project: process.env.VERTEX_AI_PROJECT ?? "my-project", - location: process.env.VERTEX_AI_LOCATION ?? "us-central1", - }), - model, - }) - - return null -} - -const model = process.env.KURT_MODEL ?? "gemini-1.0-pro" - -const kurt = createKurt(model) - -// --- Generate Natural Language Output - -const naturalLanguageStream = kurt.generateNaturalLanguage({ - prompt: "Say hello!", -}) - -for await (const event of naturalLanguageStream) { - console.log(event) -} -// { chunk: "Hello" } -// { chunk: "!" } -// { chunk: " How" } -// { chunk: " can" } -// { chunk: " I" } -// { chunk: " assist" } -// { chunk: " you" } -// { chunk: " today" } -// { chunk: "?" } -// { -// finished: true, -// text: "Hello! How can I assist you today?", -// data: undefined, -// } - -const { text } = await naturalLanguageStream.result -console.log(text) -// "Hello! How can I assist you today?" - -// --- Generate Structured Data Output - -const structuredDataStream = kurt.generateStructuredData({ - prompt: "Say hello!", - schema: z.object({ - say: z.string().describe("A single word to say"), - }), -}) - -for await (const event of structuredDataStream) { - console.log(event) -} -// { chunk: '{"' } -// { chunk: "say" } -// { chunk: '":"' } -// { chunk: "hello" } -// { chunk: '"}' } -// { finished: true, text: '{"say":"hello"}', data: { say: "hello" } } - -const { data } = await structuredDataStream.result -console.log(data) -// { say: "hello" } - -// --- Running with Tools - -const prompt = - "What's 9876356 divided by 30487, rounded to the nearest integer?" - -const tools = { - subtract: z - .object({ - minuend: z.number().describe("The number to subtract from"), - subtrahend: z.number().describe("The number to subtract by"), - }) - .describe("Calculate a subtraction expression"), - divide: z - .object({ - dividend: z.number().describe("The number to be divided"), - divisor: z.number().describe("The number to divide by"), - }) - .describe("Calculate a division expression"), -} - -// Run Kurt in a loop until it produces a natural language response, -// or until we reach a maximum number of iterations. -const extraMessages: KurtMessage[] = [] -const MAX_ITERATIONS = 3 -for (let i = 0; i < MAX_ITERATIONS; i++) { - const { text, data } = await kurt.generateWithOptionalTools({ - prompt, - tools, - }).result - - // If there is data in the result, it means the LLM made a tool call. - if (data) { - const { name, args } = data - let result = {} - if (name === "divide") { - result = { quotient: args.dividend / args.divisor } - } else if (name === "subtract") { - result = { difference: args.minuend - args.subtrahend } - } - const toolCall = { name, args, result } - extraMessages.push({ role: "model", toolCall }) - console.log(toolCall) - // { - // name: "divide", - // args: { dividend: 9876356, divisor: 30487 }, - // result: { quotient: 323.95302915996984 }, - // } - } else { - console.log(text) // "The answer, rounded to the nearest integer, is 324." - break - } -} diff --git a/examples/basic/src/natural-language.ts b/examples/basic/src/natural-language.ts new file mode 100644 index 0000000..7378a77 --- /dev/null +++ b/examples/basic/src/natural-language.ts @@ -0,0 +1,27 @@ +import { createKurt } from "./util/createKurt" +const kurt = createKurt(process.env.KURT_MODEL) + +const stream = kurt.generateNaturalLanguage({ + prompt: "Say hello!", +}) + +for await (const event of stream) { + console.log(event) +} +// { chunk: "Hello" } +// { chunk: "!" } +// { chunk: " How" } +// { chunk: " can" } +// { chunk: " I" } +// { chunk: " assist" } +// { chunk: " you" } +// { chunk: " today" } +// { chunk: "?" } +// { +// finished: true, +// text: "Hello! How can I assist you today?", +// data: undefined, +// } + +const { text } = await stream.result +console.log(text) // "Hello! How can I assist you today?" diff --git a/examples/basic/src/structured-data.ts b/examples/basic/src/structured-data.ts new file mode 100644 index 0000000..137c346 --- /dev/null +++ b/examples/basic/src/structured-data.ts @@ -0,0 +1,24 @@ +import { z } from "zod" +import { createKurt } from "./util/createKurt" +const kurt = createKurt(process.env.KURT_MODEL) + +const structuredDataStream = kurt.generateStructuredData({ + prompt: "Say hello!", + schema: z.object({ + say: z.string().describe("A single word to say"), + }), +}) + +for await (const event of structuredDataStream) { + console.log(event) +} +// { chunk: '{"' } +// { chunk: "say" } +// { chunk: '":"' } +// { chunk: "hello" } +// { chunk: '"}' } +// { finished: true, text: '{"say":"hello"}', data: { say: "hello" } } + +const { data } = await structuredDataStream.result +console.log(data) +// { say: "hello" } diff --git a/examples/basic/src/util/createKurt.ts b/examples/basic/src/util/createKurt.ts new file mode 100644 index 0000000..fd9fc24 --- /dev/null +++ b/examples/basic/src/util/createKurt.ts @@ -0,0 +1,29 @@ +import { Kurt, type KurtAdapter } from "@formula-monks/kurt" + +import { KurtOpenAI } from "@formula-monks/kurt-open-ai" +import { OpenAI } from "openai" + +import { KurtVertexAI } from "@formula-monks/kurt-vertex-ai" +import { VertexAI } from "@google-cloud/vertexai" + +const DEFAULT_MODEL = "gpt-4o" + +export const createKurt = (model = DEFAULT_MODEL): Kurt => { + const adapter = findAdapter(model) + if (!adapter) throw new Error(`Model ${model} is not supported.`) + return new Kurt(adapter) +} + +const findAdapter = (model: string): KurtAdapter | undefined => { + if (KurtOpenAI.isSupportedModel(model)) + return new KurtOpenAI({ openAI: new OpenAI(), model }) + + if (KurtVertexAI.isSupportedModel(model)) + return new KurtVertexAI({ + vertexAI: new VertexAI({ + project: process.env.VERTEX_AI_PROJECT ?? "my-project", + location: process.env.VERTEX_AI_LOCATION ?? "us-central1", + }), + model, + }) +} diff --git a/examples/basic/src/with-optional-tools.ts b/examples/basic/src/with-optional-tools.ts new file mode 100644 index 0000000..40571e3 --- /dev/null +++ b/examples/basic/src/with-optional-tools.ts @@ -0,0 +1,56 @@ +import { z } from "zod" +import type { KurtMessage } from "@formula-monks/kurt" +import { createKurt } from "./util/createKurt" +const kurt = createKurt(process.env.KURT_MODEL) + +const prompt = + "What's 9876356 divided by 30487, rounded to the nearest integer?" + +const tools = { + subtract: z + .object({ + minuend: z.number().describe("The number to subtract from"), + subtrahend: z.number().describe("The number to subtract by"), + }) + .describe("Calculate a subtraction expression"), + divide: z + .object({ + dividend: z.number().describe("The number to be divided"), + divisor: z.number().describe("The number to divide by"), + }) + .describe("Calculate a division expression"), +} + +// Run Kurt in a loop until it produces a natural language response, +// or until we reach a maximum number of iterations. +const extraMessages: KurtMessage[] = [] +const MAX_ITERATIONS = 3 +for (let i = 0; i < MAX_ITERATIONS; i++) { + const { text, data } = await kurt.generateWithOptionalTools({ + prompt, + tools, + extraMessages, + }).result + + // If there is data in the result, it means the LLM made a tool call. + if (data) { + const { name, args } = data + let result = {} + if (name === "divide") { + result = { quotient: args.dividend / args.divisor } + } else if (name === "subtract") { + result = { difference: args.minuend - args.subtrahend } + } + const toolCall = { name, args, result } + extraMessages.push({ role: "model", toolCall }) + console.log(toolCall) + // { + // name: "divide", + // args: { dividend: 9876356, divisor: 30487 }, + // result: { quotient: 323.95302915996984 }, + // } + } else { + console.log(text) // "The answer, rounded to the nearest integer, is 324." + break + } +} diff --git a/examples/basic/tsconfig.json b/examples/basic/tsconfig.json index 107ccf7..da23fd3 100644 --- a/examples/basic/tsconfig.json +++ b/examples/basic/tsconfig.json @@ -10,6 +10,6 @@ "moduleResolution": "bundler", "target": "es2022" }, - "include": ["src/*.ts"], + "include": ["src/**/*.ts"], "exclude": ["node_modules", "dist"] } diff --git a/packages/kurt/README.md b/packages/kurt/README.md index 3015648..bcbd0ee 100644 --- a/packages/kurt/README.md +++ b/packages/kurt/README.md @@ -8,7 +8,7 @@ This package implements the core functionality of Kurt, providing the common int # Examples -Check the [example folder](https://github.com/FormulaMonks/kurt/tree/main/examples) +Check the [examples folder](https://github.com/FormulaMonks/kurt/tree/main/examples) for runnable example files. ## Create Kurt with your LLM of choice @@ -21,33 +21,7 @@ You can see usage examples for setup with different adapters in the respective a The most basic use case for an LLM is to ask it to generate some text. -```ts -const stream = kurt.generateNaturalLanguage({ - prompt: "Say hello!", -}) - -for await (const event of stream) { - console.log(event) -} -// { chunk: "Hello" } -// { chunk: "!" } -// { chunk: " How" } -// { chunk: " can" } -// { chunk: " I" } -// { chunk: " assist" } -// { chunk: " you" } -// { chunk: " today" } -// { chunk: "?" } -// { -// finished: true, -// text: "Hello! How can I assist you today?", -// data: undefined, -// } - -const { text } = await stream.result -console.log(text) -// "Hello! How can I assist you today?" -``` +[This example code](../../examples/basic/src/natural-language.ts) shows how to use Kurt to generate natural language. ## Generate Structured Data Output @@ -57,30 +31,7 @@ Using the `zod` library as a convenient way to specify a JSON schema in TypeScri For best results, be sure to include descriptions of every field in the schema, as these will be used by the LLM as documentation to determine how best to fill the fields with data. -```ts -import { z } from "zod" - -const stream = kurt.generateStructuredData({ - prompt: "Say hello!", - schema: z.object({ - say: z.string().describe("A single word to say"), - }), -}) - -for await (const event of stream) { - console.log(event) -} -// { chunk: '{"' } -// { chunk: "say" } -// { chunk: '":"' } -// { chunk: "hello" } -// { chunk: '"}' } -// { finished: true, text: '{"say":"hello"}', data: { say: "hello" } } - -const { data } = await stream.result -console.log(data) -// { say: "hello" } -``` +[This example code](../../examples/basic/src/structured-data.ts) shows how to use Kurt to generate structured data. ## Generate With Optional Tools @@ -92,57 +43,4 @@ As above, we can use the `zod` library to conveniently declare the JSON schema f Again, for best results, we should include helpful descriptions of each tool schema, and each field within them, so that the LLM can make a more informed decision about how to use the tools. -```ts -import { z } from "zod" - -const prompt = - "What's 9876356 divided by 30487, rounded to the nearest integer?" - -const tools = { - subtract: z - .object({ - minuend: z.number().describe("The number to subtract from"), - subtrahend: z.number().describe("The number to subtract by"), - }) - .describe("Calculate a subtraction expression"), - divide: z - .object({ - dividend: z.number().describe("The number to be divided"), - divisor: z.number().describe("The number to divide by"), - }) - .describe("Calculate a division expression"), -} - -// Run Kurt in a loop until it produces a natural language response, -// or until we reach a maximum number of iterations. -const extraMessages: KurtMessage[] = [] -const MAX_ITERATIONS = 3 -for (let i = 0; i < MAX_ITERATIONS; i++) { - const { text, data } = await kurt.generateWithOptionalTools({ - prompt, - tools, - }).result - - // If there is data in the result, it means the LLM made a tool call. - if (data) { - const { name, args } = data - let result = {} - if (name === "divide") { - result = { quotient: args.dividend / args.divisor } - } else if (name === "subtract") { - result = { difference: args.minuend - args.subtrahend } - } - const toolCall = { name, args, result } - extraMessages.push({ role: "model", toolCall }) - console.log(toolCall) - // { - // name: "divide", - // args: { dividend: 9876356, divisor: 30487 }, - // result: { quotient: 323.95302915996984 }, - // } - } else { - console.log(text) // "The answer, rounded to the nearest integer, is 324." - break - } -} -``` +[This example code](../../examples/basic/src/with-optional-tools.ts) shows how to use Kurt to generate in a loop with optional tools.