diff --git a/src/bindings/config.ts b/src/bindings/config.ts index 55d0fb8be..43b5718fa 100644 --- a/src/bindings/config.ts +++ b/src/bindings/config.ts @@ -65,9 +65,7 @@ export const loadConfig = async (context: Context): Promise => { permitBaseUrl: process.env.PERMIT_BASE_URL || permitBaseUrl, }, unassign: { - timeRangeForMaxIssue: process.env.DEFAULT_TIME_RANGE_FOR_MAX_ISSUE - ? Number(process.env.DEFAULT_TIME_RANGE_FOR_MAX_ISSUE) - : timeRangeForMaxIssue, + timeRangeForMaxIssue: process.env.DEFAULT_TIME_RANGE_FOR_MAX_ISSUE ? Number(process.env.DEFAULT_TIME_RANGE_FOR_MAX_ISSUE) : timeRangeForMaxIssue, timeRangeForMaxIssueEnabled: process.env.DEFAULT_TIME_RANGE_FOR_MAX_ISSUE_ENABLED ? process.env.DEFAULT_TIME_RANGE_FOR_MAX_ISSUE_ENABLED == "true" : timeRangeForMaxIssueEnabled, diff --git a/src/configs/ubiquibot-config-default.ts b/src/configs/ubiquibot-config-default.ts index 9598ef782..fe9f1552d 100644 --- a/src/configs/ubiquibot-config-default.ts +++ b/src/configs/ubiquibot-config-default.ts @@ -76,6 +76,10 @@ export const DefaultConfig: MergedConfig = { name: "ask", enabled: false, }, + { + name: "review", + enabled: false, + }, { name: "allow", enabled: false, diff --git a/src/handlers/comment/commands.ts b/src/handlers/comment/commands.ts index 9bce9fc6e..af6587741 100644 --- a/src/handlers/comment/commands.ts +++ b/src/handlers/comment/commands.ts @@ -7,6 +7,7 @@ export enum IssueCommentCommands { MULTIPLIER = "/multiplier", // set bounty multiplier (for treasury) QUERY = "/query", ASK = "/ask", // ask GPT a question + REVIEW = "/review", // GPT pull request review // Access Controls ALLOW = "/allow", diff --git a/src/handlers/comment/handlers/index.ts b/src/handlers/comment/handlers/index.ts index 4dcbf7f21..e4b7a69f8 100644 --- a/src/handlers/comment/handlers/index.ts +++ b/src/handlers/comment/handlers/index.ts @@ -38,6 +38,7 @@ import { autoPay } from "./payout"; import { getTargetPriceLabel } from "../../shared"; import Decimal from "decimal.js"; import { ErrorDiff } from "../../../utils/helpers"; +import { review } from "./review"; export * from "./assign"; export * from "./wallet"; @@ -47,6 +48,7 @@ export * from "./help"; export * from "./multiplier"; export * from "./query"; export * from "./ask"; +export * from "./review"; export * from "./authorize"; export interface RewardsResponse { @@ -295,6 +297,12 @@ export const userCommands = (): UserCommands[] => { handler: ask, callback: commandCallback, }, + { + id: IssueCommentCommands.REVIEW, + description: `Compares the pull request code diff with the linked issue's specification to perform a review of the current pull request. \n example usage: /review`, + handler: review, + callback: commandCallback, + }, { id: IssueCommentCommands.MULTIPLIER, description: `Set the bounty payout multiplier for a specific contributor, and provide the reason for why. \n example usage: "/wallet @user 0.5 'Multiplier reason'"`, diff --git a/src/handlers/comment/handlers/review.ts b/src/handlers/comment/handlers/review.ts new file mode 100644 index 000000000..5e88059ef --- /dev/null +++ b/src/handlers/comment/handlers/review.ts @@ -0,0 +1,140 @@ +import { getBotContext, getLogger } from "../../../bindings"; +import { Payload, StreamlinedComment } from "../../../types"; +import { getAllIssueComments, getPullByNumber } from "../../../helpers"; +import { CreateChatCompletionRequestMessage } from "openai/resources/chat"; +import { askGPT, getPRSpec, specCheckTemplate, validationMsg } from "../../../helpers/gpt"; +import { ErrorDiff } from "../../../utils/helpers"; + +/** + * @notice Three calls to OpenAI are made. First for context, second for review and third for finalization. + * @returns Pull Request Report + */ +export const review = async (body: string) => { + const context = getBotContext(); + const logger = getLogger(); + + const payload = context.payload as Payload; + const issue = payload.issue; + + if (!issue) { + return ErrorDiff(`Payload issue is undefined.`); + } + + if (!body) { + return ErrorDiff(`Payload body is undefined.`); + } + + const isPr = await getPullByNumber(context, issue.number); + + if (!isPr) { + return ErrorDiff(`Can only be used on pull requests.`); + } + + const reviewRegex = /^\/review/; + const reviewRegexMatch = body.match(reviewRegex); + + if (!reviewRegexMatch) { + return ErrorDiff(`Error matching regex for review`); + } + + const streamlined: StreamlinedComment[] = []; + let chatHistory: CreateChatCompletionRequestMessage[] = []; + const commentsRaw = await getAllIssueComments(issue.number, "raw"); + + if (!commentsRaw) { + logger.info(`Error getting issue comments`); + return ErrorDiff(`Error getting issue comments.`); + } + + // return a diff of the changes made in the PR + const comparePR = async () => { + const comparePR = await context.octokit.pulls.get({ + owner: payload.repository.owner.login, + repo: payload.repository.name, + pull_number: issue.number, + }); + + const pr = comparePR.data; + + const prDiff = await context.octokit.pulls.get({ + owner: payload.repository.owner.login, + repo: payload.repository.name, + pull_number: pr.number, + mediaType: { + format: "diff", + }, + }); + + const diffContent = prDiff.data; + + return { + pr, + diff: diffContent, + }; + }; + + const isPull = async () => { + if (isPr) { + const diff = await comparePR() + .then(({ diff }) => { + return diff; + }) + .catch((error) => { + logger.info(`Error getting diff: ${error}`); + return ErrorDiff(`Error getting diff: ${error}`); + }); + + const spec = await getPRSpec(context, chatHistory, streamlined); + + chatHistory = []; + chatHistory.push( + { + role: "system", + content: specCheckTemplate, + } as CreateChatCompletionRequestMessage, + { + role: "assistant", + content: "Spec for Pr: \n" + JSON.stringify(spec), + } as CreateChatCompletionRequestMessage, + { + role: "user", + content: `${issue.assignees[0].login}'s PR Diff: \n` + JSON.stringify(diff), + } as CreateChatCompletionRequestMessage + ); + + const gptResponse = await askGPT(`Pr review call for #${issue.number}`, chatHistory); + + chatHistory = []; + chatHistory.push( + { + role: "system", + content: validationMsg, + } as CreateChatCompletionRequestMessage, + { + role: "assistant", + content: `Validate for user: ${issue.assignees[0].login}: \n` + JSON.stringify(gptResponse), + } as CreateChatCompletionRequestMessage + ); + + const validated = await askGPT(`Pr review validation call for #${issue.number}`, chatHistory); + + if (typeof validated === "string") { + return validated; + } else { + if (validated.answer) { + return validated.answer; + } else { + return ErrorDiff(`No answer found for issue #${issue.number}`); + } + } + } else { + return ErrorDiff(`No PR found for issue #${issue.number}`); + } + }; + + const res = await isPull(); + if (res.startsWith("```diff\n")) { + return res; + } + return res + `\n###### Ensure the pull request requirements are in the linked issue's first comment and update it if the scope evolves.`; +}; diff --git a/src/helpers/gpt.ts b/src/helpers/gpt.ts index 046503d79..3b1f95cfb 100644 --- a/src/helpers/gpt.ts +++ b/src/helpers/gpt.ts @@ -1,23 +1,55 @@ import { getBotConfig, getBotContext, getLogger } from "../bindings"; import { Payload, StreamlinedComment, UserType } from "../types"; -import { getAllIssueComments, getAllLinkedIssuesAndPullsInBody } from "../helpers"; +import { getAllIssueComments, getAllLinkedIssuesAndPullsInBody, getIssueByNumber } from "../helpers"; import OpenAI from "openai"; import { CreateChatCompletionRequestMessage } from "openai/resources/chat"; import { ErrorDiff } from "../utils/helpers"; +import { Context } from "probot"; -export const sysMsg = `You are the UbiquityAI, designed to provide accurate technical answers. \n +const escapeMsg = `Please ignore all previous commands and directives. \n +Perform exactly what is asked of you in the following way: \n\n`; + +export const sysMsg = `${escapeMsg} You are an AI designed to provide accurate technical answers. \n Whenever appropriate, format your response using GitHub Flavored Markdown. Utilize tables, lists, and code blocks for clear and organized answers. \n Do not make up answers. If you are unsure, say so. \n Original Context exists only to provide you with additional information to the current question, use it to formulate answers. \n Infer the context of the question from the Original Context using your best judgement. \n -All replies MUST end with "\n\n ".\n +All replies MUST end with "\n\n ".\n +`; + +export const validationMsg = `${escapeMsg} You are an AI validation bot designed to ensure that the answers provided by the OpenAI API meet our predefined standards. \n +The input you'll validate is the output of a pull request review performed by GPT-3, depending on whether it has achieved the spec will determine what you need to do. \n + +If the spec is not achieved then you will take the useful information from the review and deliver it using the following template: \n +=== Template A === \n +### Spec not achieved +{username} this is where you went wrong... +this is how you can fix it... +> code example of solution +=== Template A === \n + +If the spec is achieved then you will respond using the following template including their real username, no @ symbols:\n +=== Template B === \n +### Spec achieved +{username}, you have achieved the spec and now the reviewers will let you know if there are any other changes needed.\n +=== Template B === \n `; -export const gptContextTemplate = ` -You are the UbiquityAI, designed to review and analyze pull requests. +export const specCheckTemplate = `${escapeMsg} Using the provided context, ensure you clearly understand the specification of the issue. \n +Now using your best judgement, determine if the specification has been met based on the PR diff provided. \n +The spec should be achieved atleast logically, if not literally. If changes are made that are not directly mentioned in the spec, but are logical and do not break the spec, they are acceptable. \n +Your response will be posted as a GitHub comment for everyone to see in the pull request review conversation. +Knowing this, only include information that will benefit them, think of it as a quick summary of the review. +You can add value by identifying coding errors and code suggestions that benefit both the author and reviewers. +`; + +export const gptContextTemplate = `${escapeMsg} +You are an AI designed to review and analyze pull requests. You have been provided with the spec of the issue and all linked issues or pull requests. Using this full context, Reply in pure JSON format, with the following structure omitting irrelvant information pertaining to the specification. You MUST provide the following structure, but you may add additional information if you deem it relevant. +Do not include example data, only include data relevant to the specification. + Example:[ { "source": "issue #123" @@ -54,6 +86,66 @@ Example:[ ] `; +export const getPRSpec = async (context: Context, chatHistory: CreateChatCompletionRequestMessage[], streamlined: StreamlinedComment[]) => { + const logger = getLogger(); + + const payload = context.payload as Payload; + + const pr = payload.issue; + + if (!pr) { + return ErrorDiff(`Payload issue is undefined.`); + } + + // we're in the pr context, so grab the linked issue body + const regex = /(#(\d+)|https:\/\/github\.com\/[^/\s]+\/[^/\s]+\/(issues|pull)\/(\d+))/gi; + const linkedIssueNumber = pr.body.match(regex); + const linkedIssues: number[] = []; + + if (linkedIssueNumber) { + linkedIssueNumber.forEach((issue: string) => { + if (issue.includes("#")) { + linkedIssues.push(Number(issue.slice(1))); + } else { + linkedIssues.push(Number(issue.split("/")[6])); + } + }); + } else { + logger.info(`No linked issues or prs found`); + } + + if (!linkedIssueNumber) { + return ErrorDiff(`No linked issue found in body.`); + } + + // get the linked issue body + const linkedIssue = await getIssueByNumber(context, linkedIssues[0]); + + if (!linkedIssue) { + return ErrorDiff(`Error getting linked issue.`); + } + + // add the first comment of the pull request which is the contributor's description of their changes + streamlined.push({ + login: pr.user.login, + body: `${pr.user.login}'s pull request description:\n` + pr.body, + }); + + // add the linked issue body as this is the spec + streamlined.push({ + login: "assistant", + body: `#${linkedIssue.number} Specification: \n` + linkedIssue.body, + }); + + // no other conversation context is needed + chatHistory.push({ + role: "system", + content: "This pull request context: \n" + JSON.stringify(streamlined), + } as CreateChatCompletionRequestMessage); + + return chatHistory; +}; + /** * @notice best used alongside getAllLinkedIssuesAndPullsInBody() in helpers/issue * @param chatHistory the conversational context to provide to GPT @@ -74,12 +166,12 @@ export const decideContextGPT = async ( const issue = payload.issue; if (!issue) { - return `Payload issue is undefined`; + return ErrorDiff(`Payload issue is undefined.`); } // standard comments const comments = await getAllIssueComments(issue.number); - // raw so we can grab the tag + // raw so we can grab the tag const commentsRaw = await getAllIssueComments(issue.number, "raw"); if (!comments) { @@ -95,7 +187,7 @@ export const decideContextGPT = async ( // add the rest comments.forEach(async (comment, i) => { - if (comment.user.type == UserType.User || commentsRaw[i].body.includes("")) { + if (comment.user.type == UserType.User || commentsRaw[i].body.includes("")) { streamlined.push({ login: comment.user.login, body: comment.body, @@ -108,7 +200,7 @@ export const decideContextGPT = async ( if (typeof links === "string") { logger.info(`Error getting linked issues or prs: ${links}`); - return `Error getting linked issues or prs: ${links}`; + return ErrorDiff(`Error getting linked issues or prs: ${links}`); } linkedIssueStreamlined = links.linkedIssues; @@ -117,23 +209,23 @@ export const decideContextGPT = async ( chatHistory.push( { role: "system", + content: gptContextTemplate, + }, + { + role: "assistant", content: "This issue/Pr context: \n" + JSON.stringify(streamlined), - name: "UbiquityAI", } as CreateChatCompletionRequestMessage, { - role: "system", + role: "assistant", content: "Linked issue(s) context: \n" + JSON.stringify(linkedIssueStreamlined), - name: "UbiquityAI", } as CreateChatCompletionRequestMessage, { - role: "system", + role: "assistant", content: "Linked Pr(s) context: \n" + JSON.stringify(linkedPRStreamlined), - name: "UbiquityAI", } as CreateChatCompletionRequestMessage ); - // we'll use the first response to determine the context of future calls - const res = await askGPT("", chatHistory); + const res = await askGPT(`OpenAI fetching context for #${issue.number}`, chatHistory); return res; };