Skip to content
This repository has been archived by the owner on Sep 19, 2024. It is now read-only.

chore: debug #815

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/bindings/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,8 @@ export const loadConfig = async (context: Context): Promise<BotConfig> => {
registerWalletWithVerification: registerWalletWithVerification,
},
ask: {
apiKey: openAIKey,
tokenLimit: openAITokenLimit || 0,
apiKey: process.env.OPENAI_API_KEY || openAIKey,
tokenLimit: openAITokenLimit || 8000,
},
accessControl: enableAccessControl,
newContributorGreeting: newContributorGreeting,
Expand Down
18 changes: 9 additions & 9 deletions src/configs/ubiquibot-config-default.ts
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These need to be off by default

Original file line number Diff line number Diff line change
Expand Up @@ -50,39 +50,39 @@ export const DefaultConfig: MergedConfig = {
commandSettings: [
{
name: "start",
enabled: false,
enabled: true,
},
{
name: "stop",
enabled: false,
enabled: true,
},
{
name: "wallet",
enabled: false,
enabled: true,
},
{
name: "payout",
enabled: false,
enabled: true,
},
{
name: "multiplier",
enabled: false,
enabled: true,
},
{
name: "query",
enabled: false,
enabled: true,
},
{
name: "ask",
enabled: false,
enabled: true,
},
{
name: "allow",
enabled: false,
enabled: true,
},
{
name: "autopay",
enabled: false,
enabled: true,
},
],
incentives: {
Expand Down
1 change: 1 addition & 0 deletions src/handlers/comment/action.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ export const handleComment = async (): Promise<void> => {
if (userCommand) {
const { id, handler, callback, successComment, failureComment } = userCommand;
logger.info(`Running a comment handler: ${handler.name}`);
console.log("running a comment handler: ", handler.name);

const { payload: _payload } = getBotContext();
const issue = (_payload as Payload).issue;
Expand Down
20 changes: 15 additions & 5 deletions src/handlers/comment/handlers/ask.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,14 @@ export const ask = async (body: string) => {
const sender = payload.sender.login;
const issue = payload.issue;

console.log("body", body);

if (!body) {
return `Please ask a question`;
}

if (!issue) {
return `This command can only be used on issues`;
return `This command can only be used on issues and pull requests.`;
}

const chatHistory: CreateChatCompletionRequestMessage[] = [];
Expand All @@ -35,9 +37,11 @@ export const ask = async (body: string) => {
if (matches) {
const [, body] = matches;

console.log("body", body);

// standard comments
const comments = await getAllIssueComments(issue.number);
// raw so we can grab the <!--- { 'UbiquityAI': 'answer' } ---> tag
// raw so we can grab the <!--- { 'UbiquiBot': 'answer' } ---> tag
const commentsRaw = await getAllIssueComments(issue.number, "raw");

if (!comments) {
Expand All @@ -53,7 +57,7 @@ export const ask = async (body: string) => {

// add the rest
comments.forEach(async (comment, i) => {
if (comment.user.type == UserType.User || commentsRaw[i].body.includes("<!--- { 'UbiquityAI': 'answer' } --->")) {
if (comment.user.type == UserType.User || commentsRaw[i].body.includes("<!--- { 'UbiquiBot': 'answer' } --->")) {
streamlined.push({
login: comment.user.login,
body: comment.body,
Expand All @@ -74,13 +78,15 @@ export const ask = async (body: string) => {
// let chatgpt deduce what is the most relevant context
const gptDecidedContext = await decideContextGPT(chatHistory, streamlined, linkedPRStreamlined, linkedIssueStreamlined);

console.log("gptDecidedContext", gptDecidedContext);

if (linkedIssueStreamlined.length == 0 && linkedPRStreamlined.length == 0) {
// No external context to add
chatHistory.push(
{
role: "system",
content: sysMsg,
name: "UbiquityAI",
name: "UbiquiBot",
} as CreateChatCompletionRequestMessage,
{
role: "user",
Expand All @@ -93,7 +99,7 @@ export const ask = async (body: string) => {
{
role: "system",
content: sysMsg, // provide the answer template
name: "UbiquityAI",
name: "UbiquiBot",
} as CreateChatCompletionRequestMessage,
{
role: "system",
Expand All @@ -108,8 +114,12 @@ export const ask = async (body: string) => {
);
}

console.log("chatHistory pre ask");

const gptResponse = await askGPT(body, chatHistory);

console.log("chatHistory pre ask");

if (typeof gptResponse === "string") {
return gptResponse;
} else if (gptResponse.answer) {
Expand Down
21 changes: 11 additions & 10 deletions src/helpers/gpt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,16 @@ import OpenAI from "openai";
import { CreateChatCompletionRequestMessage } from "openai/resources/chat";
import { ErrorDiff } from "../utils/helpers";

export const sysMsg = `You are the UbiquityAI, designed to provide accurate technical answers. \n
export const sysMsg = `You are the UbiquiBot, designed to provide accurate technical answers. \n
Whenever appropriate, format your response using GitHub Flavored Markdown. Utilize tables, lists, and code blocks for clear and organized answers. \n
Do not make up answers. If you are unsure, say so. \n
Original Context exists only to provide you with additional information to the current question, use it to formulate answers. \n
Infer the context of the question from the Original Context using your best judgement. \n
All replies MUST end with "\n\n <!--- { 'UbiquityAI': 'answer' } ---> ".\n
All replies MUST end with "\n\n <!--- { 'UbiquiBot': 'answer' } ---> ".\n
`;

export const gptContextTemplate = `
You are the UbiquityAI, designed to review and analyze pull requests.
You are the UbiquiBot, designed to review and analyze pull requests.
You have been provided with the spec of the issue and all linked issues or pull requests.
Using this full context, Reply in pure JSON format, with the following structure omitting irrelvant information pertaining to the specification.
You MUST provide the following structure, but you may add additional information if you deem it relevant.
Expand Down Expand Up @@ -79,7 +79,7 @@ export const decideContextGPT = async (

// standard comments
const comments = await getAllIssueComments(issue.number);
// raw so we can grab the <!--- { 'UbiquityAI': 'answer' } ---> tag
// raw so we can grab the <!--- { 'UbiquiBot': 'answer' } ---> tag
const commentsRaw = await getAllIssueComments(issue.number, "raw");

if (!comments) {
Expand All @@ -95,7 +95,7 @@ export const decideContextGPT = async (

// add the rest
comments.forEach(async (comment, i) => {
if (comment.user.type == UserType.User || commentsRaw[i].body.includes("<!--- { 'UbiquityAI': 'answer' } --->")) {
if (comment.user.type == UserType.User || commentsRaw[i].body.includes("<!--- { 'UbiquiBot': 'answer' } --->")) {
streamlined.push({
login: comment.user.login,
body: comment.body,
Expand All @@ -117,18 +117,19 @@ export const decideContextGPT = async (
chatHistory.push(
{
role: "system",
content: gptContextTemplate,
},
{
role: "assistant",
content: "This issue/Pr context: \n" + JSON.stringify(streamlined),
name: "UbiquityAI",
} as CreateChatCompletionRequestMessage,
{
role: "system",
role: "assistant",
content: "Linked issue(s) context: \n" + JSON.stringify(linkedIssueStreamlined),
name: "UbiquityAI",
} as CreateChatCompletionRequestMessage,
{
role: "system",
role: "assistant",
content: "Linked Pr(s) context: \n" + JSON.stringify(linkedPRStreamlined),
name: "UbiquityAI",
} as CreateChatCompletionRequestMessage
);

Expand Down
4 changes: 2 additions & 2 deletions src/helpers/issue.ts
Original file line number Diff line number Diff line change
Expand Up @@ -745,7 +745,7 @@ export const getAllLinkedIssuesAndPullsInBody = async (issueNumber: number) => {
const prComments = await getAllIssueComments(linkedPrs[i]);
const prCommentsRaw = await getAllIssueComments(linkedPrs[i], "raw");
prComments.forEach(async (comment, i) => {
if (comment.user.type == UserType.User || prCommentsRaw[i].body.includes("<!--- { 'UbiquityAI': 'answer' } --->")) {
if (comment.user.type == UserType.User || prCommentsRaw[i].body.includes("<!--- { 'UbiquiBot': 'answer' } --->")) {
linkedPRStreamlined.push({
login: comment.user.login,
body: comment.body,
Expand All @@ -767,7 +767,7 @@ export const getAllLinkedIssuesAndPullsInBody = async (issueNumber: number) => {
const issueComments = await getAllIssueComments(linkedIssues[i]);
const issueCommentsRaw = await getAllIssueComments(linkedIssues[i], "raw");
issueComments.forEach(async (comment, i) => {
if (comment.user.type == UserType.User || issueCommentsRaw[i].body.includes("<!--- { 'UbiquityAI': 'answer' } --->")) {
if (comment.user.type == UserType.User || issueCommentsRaw[i].body.includes("<!--- { 'UbiquiBot': 'answer' } --->")) {
linkedIssueStreamlined.push({
login: comment.user.login,
body: comment.body,
Expand Down
2 changes: 1 addition & 1 deletion src/utils/private.ts
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ export const getWideConfig = async (context: Context) => {
promotionComment: mergedConfigData.promotionComment,
registerWalletWithVerification: mergedConfigData.registerWalletWithVerification,
enableAccessControl: mergedConfigData.enableAccessControl,
openAIKey: mergedConfigData.openAIKey,
openAIKey: process.env.OPENAI_API_KEY || mergedConfigData.openAIKey,
openAITokenLimit: mergedConfigData.openAITokenLimit,
staleBountyTime: mergedConfigData.staleBountyTime,
newContributorGreeting: mergedConfigData.newContributorGreeting,
Expand Down