Skip to content

Commit

Permalink
fix: replace openai client with anthropic
Browse files Browse the repository at this point in the history
  • Loading branch information
sshivaditya committed Jan 10, 2025
1 parent 3ad8ca6 commit c4de02e
Show file tree
Hide file tree
Showing 8 changed files with 86 additions and 65 deletions.
2 changes: 2 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
{
}
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,13 @@
"dependencies": {
"@actions/core": "1.10.1",
"@actions/github": "6.0.0",
"@anthropic-ai/sdk": "^0.33.1",
"@octokit/rest": "20.1.1",
"@octokit/webhooks": "13.2.7",
"@sinclair/typebox": "0.32.33",
"@ubiquity-dao/ubiquibot-logger": "^1.3.0",
"dotenv": "16.4.5",
"glob": "^11.0.0",
"openai": "^4.77.4",
"typebox-validators": "0.3.5"
},
"devDependencies": {
Expand Down
11 changes: 11 additions & 0 deletions src/adapters/anthropic/helpers/anthropic.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import Anthropic from "@anthropic-ai/sdk";
import { Context } from "../../../types/context";

export class SuperAnthropic {
protected client: Anthropic;
protected context: Context;
constructor(client: Anthropic, context: Context) {
this.client = client;
this.context = context;
}
}
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
import OpenAI from "openai";
import { Context } from "../../../types/context";
import { SuperOpenAi } from "./openai";
import { SuperAnthropic } from "./anthropic";
import { Tool, ToolResult, ToolResultMap, DirectoryExploreResult } from "../../../types/tool";
import { ReadFile } from "../../../tools/read-file";
import { WriteFile } from "../../../tools/write-file";
import { ExploreDir } from "../../../tools/explore-dir";
import { SearchFiles } from "../../../tools/search-files";
import { CreatePr } from "../../../tools/create-pr";
import Anthropic from "@anthropic-ai/sdk";
import { ContentBlock, MessageParam, Message } from "@anthropic-ai/sdk/resources";

const MAX_TRIES = 5;

const sysMsg = `You are a capable AI assistant currently running on a GitHub bot.
const sysMsg: string = `You are a capable AI assistant currently running on a GitHub bot.
You are designed to assist with resolving issues by making incremental fixes using a standardized tool interface.
Each tool implements a common interface that provides consistent error handling and result reporting.
Expand Down Expand Up @@ -153,18 +154,13 @@ interface ToolRequest {
};
}

type ChatMessage = {
role: "system" | "user" | "assistant";
content: string;
};

export class Completions extends SuperOpenAi {
export class Completions extends SuperAnthropic {
protected maxTokens: number;
protected tools: ToolSet;
protected llmAttempts: number;
protected toolAttempts: number;

constructor(client: OpenAI, context: Context) {
constructor(client: Anthropic, context: Context) {
super(client, context);
this.maxTokens = 100000;
this.llmAttempts = 0;
Expand Down Expand Up @@ -262,13 +258,20 @@ export class Completions extends SuperOpenAi {
}

private async _checkSolution(prompt: string, model: string): Promise<boolean> {
const res = await this.client.chat.completions.create({
const res = await this.client.messages.create({
model,
messages: [
{
role: "system",
content:
"You are a solution validator. Respond with 'SOLVED' if the issue is completely resolved, or 'CONTINUE' if more work is needed. Provide a brief explanation after your decision.",
role: "assistant",
content: [
{
text: "You are a solution validator. Respond with 'SOLVED' if the issue is completely resolved, or 'CONTINUE' if more work is needed. Provide a brief explanation after your decision.",
type: "text",
cache_control: {
type: "ephemeral",
},
},
],
},
{
role: "user",
Expand All @@ -279,7 +282,10 @@ export class Completions extends SuperOpenAi {
max_tokens: 50,
});

const response = res.choices[0]?.message?.content || "";
const response =
res.content.filter((obj: ContentBlock) => {
return obj.type === "text";
})[0].text || "";
return response.includes("SOLVED");
}

Expand Down Expand Up @@ -355,13 +361,8 @@ export class Completions extends SuperOpenAi {
this.tools.searchFiles = new SearchFiles(workingDir);

let isSolved = false;
let finalResponse: OpenAI.Chat.Completions.ChatCompletion | null = null;
const conversationHistory: ChatMessage[] = [
{
role: "system",
content: sysMsg,
},
];
let finalResponse: (Message & { _request_id?: string | null }) | null = null;
const conversationHistory: Array<MessageParam> = [];

while (this.llmAttempts < MAX_TRIES && !isSolved) {
// Get the directory tree
Expand All @@ -376,32 +377,49 @@ export class Completions extends SuperOpenAi {
// Add the current state to conversation
conversationHistory.push({
role: "user",
content: `Current LLM attempt: ${this.llmAttempts + 1}/${MAX_TRIES}\nWorking directory: ${workingDir}\n\nDirectory structure:\n${treeOutput}\n\nPrevious solution state: ${currentSolution}\n\nOriginal request: ${prompt}`,
content: [
{
type: "text",
text: `Current LLM attempt: ${this.llmAttempts + 1}/${MAX_TRIES}\nWorking directory: ${workingDir}\n\nDirectory structure:\n${treeOutput}\n\nPrevious solution state: ${currentSolution}\n\nOriginal request: ${prompt}`,
},
],
});

const res = await this.client.chat.completions.create({
model,
const res = await this.client.messages.create({
model: model,
messages: conversationHistory,
temperature: 0.2,
max_tokens: this.maxTokens,
top_p: 0.5,
frequency_penalty: 0,
presence_penalty: 0,
system: [
{
type: "text",
text: sysMsg,
cache_control: {
type: "ephemeral",
},
},
],
});

this.context.logger.info("LLM response:", { response: res });
finalResponse = res;
const textBlock = res.content.filter((obj: ContentBlock) => obj.type === "text")[0];
finalResponse = textBlock ? res : null;

// Get the LLM's response
const llmResponse = res.choices[0]?.message?.content || "";
const llmResponse = res.content.find((c) => c.type === "text")?.text || "";

// Process any tool requests in the response
const processedResponse = await this._processResponse(llmResponse, workingDir);

// Add the processed response to conversation history
conversationHistory.push({
role: "assistant",
content: processedResponse,
content: [
{
type: "text",
text: processedResponse,
},
],
});

// Update current solution state
Expand Down
12 changes: 6 additions & 6 deletions src/adapters/index.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import { Context } from "../types";
import { SuperOpenAi } from "./openai/helpers/openai";
import OpenAI from "openai";
import { Completions } from "./openai/helpers/completions";
import { SuperAnthropic } from "./anthropic/helpers/anthropic";
import { Completions } from "./anthropic/helpers/completions";
import Anthropic from "@anthropic-ai/sdk";

export function createAdapters(openai: OpenAI, context: Context) {
export function createAdapters(anthropic: Anthropic, context: Context) {
return {
openai: {
completions: new Completions(openai, context),
super: new SuperOpenAi(openai, context),
completions: new Completions(anthropic, context),
super: new SuperAnthropic(anthropic, context),
},
};
}
11 changes: 0 additions & 11 deletions src/adapters/openai/helpers/openai.ts

This file was deleted.

7 changes: 4 additions & 3 deletions src/plugin.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { delegate } from "./handlers/front-controller";
import { Context, Env, PluginInputs } from "./types";
import { isIssueCommentEvent } from "./types/typeguards";
import { createAdapters } from "./adapters";
import OpenAI from "openai";
import Anthropic from "@anthropic-ai/sdk";

/**
* The main plugin function. Split for easier testing.
Expand Down Expand Up @@ -36,11 +36,12 @@ export async function plugin(inputs: PluginInputs, env: Env) {
adapters: {} as ReturnType<typeof createAdapters>,
};

const openaiClient = new OpenAI({
const anthropic = new Anthropic({
baseURL: "https://openrouter.ai/api/v1",
apiKey: env.OPENROUTER_API_KEY,
});
context.adapters = createAdapters(openaiClient, context);

context.adapters = createAdapters(anthropic, context);

/**
* NOTICE: Consider non-database storage solutions unless necessary
Expand Down
26 changes: 13 additions & 13 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,19 @@
"@jridgewell/gen-mapping" "^0.3.5"
"@jridgewell/trace-mapping" "^0.3.24"

"@anthropic-ai/sdk@^0.33.1":
version "0.33.1"
resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.33.1.tgz#83be59ad8cbd6634f48155b0aa4b7287cfba1b8a"
integrity sha512-VrlbxiAdVRGuKP2UQlCnsShDHJKWepzvfRCkZMpU+oaUdKLpOfmylLMRojGrAgebV+kDtPjewCVP0laHXg+vsA==
dependencies:
"@types/node" "^18.11.18"
"@types/node-fetch" "^2.6.4"
abort-controller "^3.0.0"
agentkeepalive "^4.2.1"
form-data-encoder "1.7.2"
formdata-node "^4.3.2"
node-fetch "^2.6.7"

"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.24.6":
version "7.24.6"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.24.6.tgz#ab88da19344445c3d8889af2216606d3329f3ef2"
Expand Down Expand Up @@ -5237,19 +5250,6 @@ onetime@^6.0.0:
dependencies:
mimic-fn "^4.0.0"

openai@^4.77.4:
version "4.77.4"
resolved "https://registry.yarnpkg.com/openai/-/openai-4.77.4.tgz#1093d165efb3e13e763faf42fa62e34313e293e9"
integrity sha512-rShjKsZ/HXm1cSxXt6iFeZxiCohrVShawt0aRRQmbb+z/EXcH4OouyQZP1ShyZMb63LJajpl8aGw3DzEi8Wh9Q==
dependencies:
"@types/node" "^18.11.18"
"@types/node-fetch" "^2.6.4"
abort-controller "^3.0.0"
agentkeepalive "^4.2.1"
form-data-encoder "1.7.2"
formdata-node "^4.3.2"
node-fetch "^2.6.7"

optionator@^0.9.3:
version "0.9.4"
resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.4.tgz#7ea1c1a5d91d764fb282139c88fe11e182a3a734"
Expand Down

0 comments on commit c4de02e

Please sign in to comment.