From 73092ba93b3bb7879ceee9f9b3850d2f87378faa Mon Sep 17 00:00:00 2001 From: Dhruwang Jariwala <67850763+Dhruwang@users.noreply.github.com> Date: Wed, 25 Sep 2024 20:06:20 +0530 Subject: [PATCH 1/7] fix: Cache (#105) * fix: cache for enrollment * fix: cache * fix: cache * removed usage of octokit webhook * fixes * fix build --------- Co-authored-by: pandeymangg --- .../[repositoryId]/players/page.tsx | 2 +- app/api/github-webhook/route.ts | 41 + env.mjs | 3 +- lib/constants.ts | 2 + lib/enrollment/cache.ts | 33 + lib/enrollment/service.ts | 129 ++- lib/github/cache.ts | 25 + lib/github/hooks/bounty.ts | 366 ++++---- lib/github/hooks/installation.ts | 15 +- lib/github/hooks/issue.ts | 859 +++++++++--------- lib/github/index.ts | 72 +- lib/github/service.ts | 214 ++--- lib/github/utils.ts | 8 +- lib/repository/cache.ts | 32 + lib/repository/service.ts | 196 ++-- lib/user/service.ts | 28 + package.json | 5 +- pages/api/github-webhook.ts | 52 -- pnpm-lock.yaml | 511 +++++++++-- 19 files changed, 1597 insertions(+), 996 deletions(-) create mode 100644 app/api/github-webhook/route.ts create mode 100644 lib/enrollment/cache.ts create mode 100644 lib/github/cache.ts create mode 100644 lib/repository/cache.ts delete mode 100644 pages/api/github-webhook.ts diff --git a/app/(dashboard)/repo-settings/[repositoryId]/players/page.tsx b/app/(dashboard)/repo-settings/[repositoryId]/players/page.tsx index 6cea3ec..01742f6 100644 --- a/app/(dashboard)/repo-settings/[repositoryId]/players/page.tsx +++ b/app/(dashboard)/repo-settings/[repositoryId]/players/page.tsx @@ -1,5 +1,5 @@ import UserProfileSummary from "@/components/ui/user-profile-summary"; -import { getUsersForRepository } from "@/lib/repository/service"; +import { getUsersForRepository } from "@/lib/user/service"; export const metadata = { title: "Player overview", diff --git a/app/api/github-webhook/route.ts b/app/api/github-webhook/route.ts new file mode 100644 index 0000000..e1a8653 --- /dev/null +++ b/app/api/github-webhook/route.ts @@ -0,0 +1,41 @@ +import { registerHooks } from "@/lib/github"; +import { EmitterWebhookEvent, EmitterWebhookEventName } from "@octokit/webhooks"; +import { headers } from "next/headers"; +import { NextResponse } from "next/server"; + +// Set to store processed event IDs +const processedEvents = new Set(); + +export async function POST(req: Request) { + const headersList = headers(); + const eventId = headersList.get("x-github-delivery") as string; + const githubEvent = headersList.get("x-github-event") as string; + + let body: EmitterWebhookEvent<"issue_comment" | "pull_request" | "installation">["payload"]; + + try { + body = await req.json(); + } catch (error) { + return NextResponse.json({ error: "Invalid JSON payload" }, { status: 400 }); + } + + if (!eventId) { + return NextResponse.json({ error: "Missing X-GitHub-Delivery header" }, { status: 400 }); + } + + if (processedEvents.has(eventId)) { + return NextResponse.json({ message: `Event ${eventId} already processed, skipping` }, { status: 200 }); + } + + registerHooks(githubEvent as EmitterWebhookEventName, body); + + processedEvents.add(eventId); + setTimeout( + () => { + processedEvents.delete(eventId); + }, + 24 * 60 * 60 * 1000 + ); // 24 hours + + return NextResponse.json({ message: `Event ${eventId} processed` }, { status: 200 }); +} diff --git a/env.mjs b/env.mjs index c2305f4..09b1441 100644 --- a/env.mjs +++ b/env.mjs @@ -31,10 +31,10 @@ export const env = createEnv({ TREMENDOUS_CAMPAIGN_ID: z.string().min(1), DISCORD_BOT_TOKEN: z.string(), DISCORD_CHANNEL_ID: z.string(), + OSS_GG_REPO_ID: z.string().min(1), }, client: { NEXT_PUBLIC_APP_URL: z.string().min(1), - // NEXT_PUBLIC_TRIGGER_PUBLIC_API_KEY: z.string().min(1).optional(), }, runtimeEnv: { NEXTAUTH_URL: process.env.NEXTAUTH_URL, @@ -64,5 +64,6 @@ export const env = createEnv({ TREMENDOUS_CAMPAIGN_ID: process.env.TREMENDOUS_CAMPAIGN_ID, DISCORD_BOT_TOKEN: process.env.DISCORD_BOT_TOKEN, DISCORD_CHANNEL_ID: process.env.DISCORD_CHANNEL_ID, + OSS_GG_REPO_ID: process.env.OSS_GG_REPO_ID, }, }); diff --git a/lib/constants.ts b/lib/constants.ts index 7adda27..1cac30a 100644 --- a/lib/constants.ts +++ b/lib/constants.ts @@ -74,3 +74,5 @@ export const DISCORD_CHANNEL_ID = env.DISCORD_CHANNEL_ID; export const DISCORD_BOT_TOKEN = env.DISCORD_BOT_TOKEN; export const DISCORD_AWARD_POINTS_MESSAGE = (username: string, points: number) => `Way to go, ${username} 🎉 You've just earned ${points} points. Your contribution is invaluable to our community 🙌 Keep up the fantastic work and let's keep pushing forward! đŸ’Ē`; + +export const OSS_GG_REPO_ID = env.OSS_GG_REPO_ID; diff --git a/lib/enrollment/cache.ts b/lib/enrollment/cache.ts new file mode 100644 index 0000000..dfe79c6 --- /dev/null +++ b/lib/enrollment/cache.ts @@ -0,0 +1,33 @@ +import { revalidateTag } from "next/cache"; + +interface RevalidateProps { + userId?: string; + repositoryId?: string; +} + +export const enrollmentCache = { + tag: { + byUserId(userId: string) { + return `users-${userId}-enrollment`; + }, + byRepositoryId(repositoryId: string) { + return `repositories-${repositoryId}-enrollment`; + }, + byUserIdAndRepositoryId(userId: string, repositoryId: string) { + return `users-${userId}-repositories-${repositoryId}-enrollment`; + }, + }, + revalidate({ userId, repositoryId }: RevalidateProps): void { + if (userId) { + revalidateTag(this.tag.byUserId(userId)); + } + + if (repositoryId) { + revalidateTag(this.tag.byRepositoryId(repositoryId)); + } + + if (userId && repositoryId) { + revalidateTag(this.tag.byUserIdAndRepositoryId(userId, repositoryId)); + } + }, +}; diff --git a/lib/enrollment/service.ts b/lib/enrollment/service.ts index 70b46ce..d246610 100644 --- a/lib/enrollment/service.ts +++ b/lib/enrollment/service.ts @@ -3,8 +3,35 @@ import { TEnrollment, TEnrollmentInput, ZEnrollmentInput } from "@/types/enrollm import { DatabaseError } from "@/types/errors"; import { TRepository } from "@/types/repository"; import { Prisma } from "@prisma/client"; +import { unstable_cache } from "next/cache"; import { validateInputs } from "../utils/validate"; +import { enrollmentCache } from "./cache"; + +export const getEnrollment = async (userId: string, repositoryId: string) => + unstable_cache( + async () => { + try { + const enrollment = await db.enrollment.findFirst({ + where: { + userId, + repositoryId, + }, + }); + + return enrollment; + } catch (error) { + if (error instanceof Prisma.PrismaClientKnownRequestError) { + throw new DatabaseError(error.message); + } + throw error; + } + }, + [`getEnrollment-${userId}-${repositoryId}`], + { + tags: [enrollmentCache.tag.byUserIdAndRepositoryId(userId, repositoryId)], + } + )(); /** * Enrolls a user in all repositories. @@ -48,12 +75,7 @@ export const createEnrollment = async (enrollmentData: TEnrollmentInput): Promis try { // Check if enrollment already exists - const existingEnrollment = await db.enrollment.findFirst({ - where: { - userId: enrollmentData.userId, - repositoryId: enrollmentData.repositoryId, - }, - }); + const existingEnrollment = await getEnrollment(enrollmentData.userId, enrollmentData.repositoryId); if (existingEnrollment) { throw new Error("Enrollment already exists."); @@ -63,6 +85,11 @@ export const createEnrollment = async (enrollmentData: TEnrollmentInput): Promis data: enrollmentData, }); + enrollmentCache.revalidate({ + userId: enrollmentData.userId, + repositoryId: enrollmentData.repositoryId, + }); + return enrollment; } catch (error) { if (error instanceof Prisma.PrismaClientKnownRequestError) { @@ -89,6 +116,11 @@ export const deleteEnrollment = async (userId: string, repositoryId: string): Pr }, }, }); + + enrollmentCache.revalidate({ + userId, + repositoryId, + }); } catch (error) { if (error instanceof Prisma.PrismaClientKnownRequestError) { throw new DatabaseError(error.message); @@ -104,15 +136,23 @@ export const deleteEnrollment = async (userId: string, repositoryId: string): Pr * @returns A boolean indicating whether the user is enrolled. */ -export const hasEnrollmentForRepository = async (userId: string, repositoryId: string): Promise => { - const count = await db.enrollment.count({ - where: { - userId, - repositoryId, +export const hasEnrollmentForRepository = async (userId: string, repositoryId: string): Promise => + unstable_cache( + async () => { + const count = await db.enrollment.count({ + where: { + userId, + repositoryId, + }, + }); + + return count > 0; }, - }); - return count > 0; -}; + [`hasEnrollmentForRepository-${userId}-${repositoryId}`], + { + tags: [enrollmentCache.tag.byUserIdAndRepositoryId(userId, repositoryId)], + } + )(); /** * Retrieves an array of repositories that a user is enrolled in. @@ -121,32 +161,39 @@ export const hasEnrollmentForRepository = async (userId: string, repositoryId: s * a repository the user is enrolled in. The array is empty if the user has no enrollments. */ -export const getEnrolledRepositories = async (userId: string): Promise => { - const enrolledRepositories = await db.repository.findMany({ - where: { - enrollments: { - some: { - userId: userId, +export const getEnrolledRepositories = async (userId: string): Promise => + unstable_cache( + async () => { + const enrolledRepositories = await db.repository.findMany({ + where: { + enrollments: { + some: { + userId: userId, + }, + }, }, - }, - }, - select: { - id: true, - githubId: true, - name: true, - description: true, - homepage: true, - configured: true, - topics: true, - installation: true, - installationId: true, - pointTransactions: true, - enrollments: true, - logoUrl: true, - levels: true, - owner: true, - }, - }); + select: { + id: true, + githubId: true, + name: true, + description: true, + homepage: true, + configured: true, + topics: true, + installation: true, + installationId: true, + pointTransactions: true, + enrollments: true, + logoUrl: true, + levels: true, + owner: true, + }, + }); - return enrolledRepositories; -}; + return enrolledRepositories; + }, + [`getEnrolledRepositories-${userId}`], + { + tags: [enrollmentCache.tag.byUserId(userId)], + } + )(); diff --git a/lib/github/cache.ts b/lib/github/cache.ts new file mode 100644 index 0000000..fba27b1 --- /dev/null +++ b/lib/github/cache.ts @@ -0,0 +1,25 @@ +import { revalidateTag } from "next/cache"; + +interface RevalidateProps { + repoGithubId?: number; + githubLogin?: string; +} + +export const githubCache = { + tag: { + byRepoGithubId(repoGithubId: number) { + return `github-repo-${repoGithubId}`; + }, + byGithubLogin(githubLogin: string) { + return `github-repo-${githubLogin}`; + }, + }, + revalidate({ repoGithubId, githubLogin }: RevalidateProps): void { + if (repoGithubId) { + revalidateTag(this.tag.byRepoGithubId(repoGithubId)); + } + if (githubLogin) { + revalidateTag(this.tag.byGithubLogin(githubLogin)); + } + }, +}; diff --git a/lib/github/hooks/bounty.ts b/lib/github/hooks/bounty.ts index 7a8f969..c8cfac1 100644 --- a/lib/github/hooks/bounty.ts +++ b/lib/github/hooks/bounty.ts @@ -15,233 +15,227 @@ import { import { extractIssueNumbersFromPrBody, getOctokitInstance } from "@/lib/github/utils"; import { getRepositoryByGithubId } from "@/lib/repository/service"; import { createUser, getUser, getUserByGithubId } from "@/lib/user/service"; -import { Webhooks } from "@octokit/webhooks"; +import { EmitterWebhookEvent, Webhooks } from "@octokit/webhooks"; /** * Handles the event when a bounty is created. * * @param webhooks - The Webhooks instance. */ -export const onBountyCreated = async (webhooks: Webhooks) => { - webhooks.on(EVENT_TRIGGERS.ISSUE_COMMENTED, async (context) => { - try { - const octokit = getOctokitInstance(context.payload.installation?.id!); - const repo = context.payload.repository.name; - const issueCommentBody = context.payload.comment.body; - const bountyCommentRegex = new RegExp(`${BOUNTY_IDENTIFIER}\\s+(\\d+)`); - const bountyMatch = issueCommentBody.match(bountyCommentRegex); - const isPR = Boolean(context.payload.issue.pull_request); - const issueNumber = context.payload.issue.number; - const owner = context.payload.repository.owner.login; - const hasOssGgLabel = context.payload.issue.labels.some((label) => label.name === OSS_GG_LABEL); +export const onBountyCreated = async (payload: EmitterWebhookEvent<"issue_comment.created">["payload"]) => { + try { + const octokit = getOctokitInstance(payload.installation?.id!); + const repo = payload.repository.name; + const issueCommentBody = payload.comment.body; + const bountyCommentRegex = new RegExp(`${BOUNTY_IDENTIFIER}\\s+(\\d+)`); + const bountyMatch = issueCommentBody.match(bountyCommentRegex); + const isPR = Boolean(payload.issue.pull_request); + const issueNumber = payload.issue.number; + const owner = payload.repository.owner.login; + const hasOssGgLabel = payload.issue.labels.some((label) => label.name === OSS_GG_LABEL); + + const commentOnIssue = async (comment: string) => { + await octokit.issues.createComment({ + body: comment, + issue_number: issueNumber, + repo, + owner, + }); + }; + + if (bountyMatch) { + if (isPR) { + await commentOnIssue("Bounties can be setup in issues only, not in PRs."); + return; + } - const commentOnIssue = async (comment: string) => { - await octokit.issues.createComment({ - body: comment, - issue_number: issueNumber, - repo, - owner, - }); - }; + if (!hasOssGgLabel) { + await commentOnIssue(`Bounties can be setup only in issues with the ${OSS_GG_LABEL} label.`); + return; + } + + const newBountyAmount = parseInt(bountyMatch[1], 10); + const newBountyLabel = `${BOUNTY_EMOJI} ${newBountyAmount} ${USD_CURRENCY_CODE}`; - if (bountyMatch) { - if (isPR) { - await commentOnIssue("Bounties can be setup in issues only, not in PRs."); + // Check if the repo is registered in oss.gg + const ossGgRepo = await getRepositoryByGithubId(payload.repository.id); + if (!ossGgRepo) { + await commentOnIssue( + "If you are the repo owner, please register at https://oss.gg to be able to create bounties." + ); + return; + } else if (ossGgRepo) { + const bountySettings = await getBountySettingsByRepositoryId(ossGgRepo.id)(); + if (bountySettings?.maxBounty && newBountyAmount > bountySettings.maxBounty) { + await commentOnIssue( + `Bounty amount exceeds the maximum bounty amount of ${bountySettings.maxBounty} ${USD_CURRENCY_CODE} set by the repo owner.` + ); return; } - if (!hasOssGgLabel) { - await commentOnIssue(`Bounties can be setup only in issues with the ${OSS_GG_LABEL} label.`); + let usersThatCanCreateBounty = ossGgRepo?.installation?.memberships?.map((m) => m.userId); + if (!usersThatCanCreateBounty) { + await commentOnIssue("No admins for the given repo in oss.gg!"); return; } - - const newBountyAmount = parseInt(bountyMatch[1], 10); - const newBountyLabel = `${BOUNTY_EMOJI} ${newBountyAmount} ${USD_CURRENCY_CODE}`; - - // Check if the repo is registered in oss.gg - const ossGgRepo = await getRepositoryByGithubId(context.payload.repository.id); - if (!ossGgRepo) { - await commentOnIssue( - "If you are the repo owner, please register at https://oss.gg to be able to create bounties." - ); + const ossGgUsers = await Promise.all( + usersThatCanCreateBounty.map(async (userId) => { + const user = await getUser(userId); + return user?.githubId; + }) + ); + const isUserAllowedToCreateBounty = ossGgUsers?.includes(payload.comment.user.id); + if (!isUserAllowedToCreateBounty) { + await commentOnIssue("You are not allowed to create bounties! Please contact the repo admin."); return; - } else if (ossGgRepo) { - const bountySettings = await getBountySettingsByRepositoryId(ossGgRepo.id)(); - if (bountySettings?.maxBounty && newBountyAmount > bountySettings.maxBounty) { - await commentOnIssue( - `Bounty amount exceeds the maximum bounty amount of ${bountySettings.maxBounty} ${USD_CURRENCY_CODE} set by the repo owner.` - ); - return; - } - - let usersThatCanCreateBounty = ossGgRepo?.installation.memberships.map((m) => m.userId); - if (!usersThatCanCreateBounty) { - await commentOnIssue("No admins for the given repo in oss.gg!"); - return; - } - const ossGgUsers = await Promise.all( - usersThatCanCreateBounty.map(async (userId) => { - const user = await getUser(userId); - return user?.githubId; - }) - ); - const isUserAllowedToCreateBounty = ossGgUsers?.includes(context.payload.comment.user.id); - if (!isUserAllowedToCreateBounty) { - await commentOnIssue("You are not allowed to create bounties! Please contact the repo admin."); - return; - } + } - // Regex that matches the bounty label format like "💸 50 USD" - const previousBountyLabel = context.payload.issue?.labels?.find((label) => - label.name.match(BOUNTY_LABEL_REGEX) - ); + // Regex that matches the bounty label format like "💸 50 USD" + const previousBountyLabel = payload.issue?.labels?.find((label) => + label.name.match(BOUNTY_LABEL_REGEX) + ); - if (previousBountyLabel) { - const previousBountyAmount = parseInt(previousBountyLabel.name.split(" ")[1], 10); - if (previousBountyAmount === newBountyAmount) { - return; - } else { - await octokit.issues.updateLabel({ - owner, - repo, - name: previousBountyLabel.name, - new_name: newBountyLabel, - color: "0E8A16", - }); - - await commentOnIssue( - `The bounty amount was updated to ${newBountyAmount} ${USD_CURRENCY_CODE}` - ); - } + if (previousBountyLabel) { + const previousBountyAmount = parseInt(previousBountyLabel.name.split(" ")[1], 10); + if (previousBountyAmount === newBountyAmount) { + return; } else { - await octokit.issues.addLabels({ + await octokit.issues.updateLabel({ owner, repo, - issue_number: issueNumber, - labels: [newBountyLabel], + name: previousBountyLabel.name, + new_name: newBountyLabel, + color: "0E8A16", }); - await commentOnIssue( - `A bounty of ${newBountyAmount} ${USD_CURRENCY_CODE} has been added to this issue. Happy hacking!` - ); + + await commentOnIssue(`The bounty amount was updated to ${newBountyAmount} ${USD_CURRENCY_CODE}`); } + } else { + await octokit.issues.addLabels({ + owner, + repo, + issue_number: issueNumber, + labels: [newBountyLabel], + }); + await commentOnIssue( + `A bounty of ${newBountyAmount} ${USD_CURRENCY_CODE} has been added to this issue. Happy hacking!` + ); } } - } catch (err) { - console.error(err); - throw new Error(err); } - }); + } catch (err) { + console.error(err); + throw new Error(err); + } }; /** * Handles the event when a bounty pull request is merged. * @param webhooks - The Webhooks instance. */ -export const onBountyPullRequestMerged = async (webhooks: Webhooks) => { - webhooks.on(EVENT_TRIGGERS.PULL_REQUEST_CLOSED, async (context) => { - try { - const octokit = getOctokitInstance(context.payload.installation?.id!); - const repo = context.payload.repository.name; - const owner = context.payload.repository.owner.login; - const isPrMerged = context.payload.pull_request.merged; - const prNumber = context.payload.pull_request.number; - const prBody = context.payload.pull_request.body ?? ""; - const prAuthorGithubId = context.payload.pull_request.user.id; - const prAuthorUsername = context.payload.pull_request.user.login; - - if (!isPrMerged || !prBody) { - return; - } +export const onBountyPullRequestMerged = async ( + payload: EmitterWebhookEvent<"pull_request.closed">["payload"] +) => { + try { + const octokit = getOctokitInstance(payload.installation?.id!); + const repo = payload.repository.name; + const owner = payload.repository.owner.login; + const isPrMerged = payload.pull_request.merged; + const prNumber = payload.pull_request.number; + const prBody = payload.pull_request.body ?? ""; + const prAuthorGithubId = payload.pull_request.user.id; + const prAuthorUsername = payload.pull_request.user.login; + + if (!isPrMerged || !prBody) { + return; + } - const linkedIssueNumbers = extractIssueNumbersFromPrBody(prBody); // This assumes that a PR body can be linked to multiple issues + const linkedIssueNumbers = extractIssueNumbersFromPrBody(prBody); // This assumes that a PR body can be linked to multiple issues - const awardBountyToUser = async (issueNumber: number) => { - const commentOnIssue = async (comment: string) => { - await octokit.issues.createComment({ - body: comment, - issue_number: issueNumber, - repo, - owner, - }); - }; - - const issue = await octokit.issues.get({ - owner, - repo, + const awardBountyToUser = async (issueNumber: number) => { + const commentOnIssue = async (comment: string) => { + await octokit.issues.createComment({ + body: comment, issue_number: issueNumber, + repo, + owner, }); + }; - const issueLabels = issue.data.labels.map((label) => - typeof label === "string" ? label : label?.name - ); + const issue = await octokit.issues.get({ + owner, + repo, + issue_number: issueNumber, + }); - // Check if the issue has the required labels and is assigned to the pull request author - const hasOssGgLabel = issueLabels?.some((label) => label === OSS_GG_LABEL); - const bountyLabel = issueLabels?.find((label) => label?.match(BOUNTY_LABEL_REGEX)); - const isIssueAssignedToPrAuthor = issue.data.assignees?.some( - (assignee) => assignee.id === prAuthorGithubId - ); - const isIssueValid = hasOssGgLabel && bountyLabel && isIssueAssignedToPrAuthor; + const issueLabels = issue.data.labels.map((label) => (typeof label === "string" ? label : label?.name)); - // If the issue is not valid, return - if (!isIssueValid) { - return; - } else { - const bountyAmount = parseInt(bountyLabel.split(" ")[1], 10); - const { data: prAuthorProfile } = await octokit.users.getByUsername({ - username: prAuthorUsername, + // Check if the issue has the required labels and is assigned to the pull request author + const hasOssGgLabel = issueLabels?.some((label) => label === OSS_GG_LABEL); + const bountyLabel = issueLabels?.find((label) => label?.match(BOUNTY_LABEL_REGEX)); + const isIssueAssignedToPrAuthor = issue.data.assignees?.some( + (assignee) => assignee.id === prAuthorGithubId + ); + const isIssueValid = hasOssGgLabel && bountyLabel && isIssueAssignedToPrAuthor; + + // If the issue is not valid, return + if (!isIssueValid) { + return; + } else { + const bountyAmount = parseInt(bountyLabel.split(" ")[1], 10); + const { data: prAuthorProfile } = await octokit.users.getByUsername({ + username: prAuthorUsername, + }); + const ossGgRepo = await getRepositoryByGithubId(payload.repository.id); + let user = await getUserByGithubId(prAuthorGithubId); + if (!user) { + user = await createUser({ + githubId: prAuthorGithubId, + login: prAuthorUsername, + email: prAuthorProfile.email, + name: prAuthorProfile.name, + avatarUrl: prAuthorProfile.avatar_url, }); - const ossGgRepo = await getRepositoryByGithubId(context.payload.repository.id); - let user = await getUserByGithubId(prAuthorGithubId); - if (!user) { - user = await createUser({ - githubId: prAuthorGithubId, - login: prAuthorUsername, - email: prAuthorProfile.email, - name: prAuthorProfile.name, - avatarUrl: prAuthorProfile.avatar_url, - }); - } + } - const bountyExists = await checkIfBountyExists(issue.data.html_url); - if (bountyExists) { - await commentOnIssue("Bounty already exists for this issue. Please check your inbox!"); - console.error(`Bounty already exists for issue: ${issue.data.html_url}`); - return; - } + const bountyExists = await checkIfBountyExists(issue.data.html_url); + if (bountyExists) { + await commentOnIssue("Bounty already exists for this issue. Please check your inbox!"); + console.error(`Bounty already exists for issue: ${issue.data.html_url}`); + return; + } - const bountyOrder = await dispatchBountyOrder({ - fundingSource: "BALANCE", - amount: bountyAmount, - currencyCode: USD_CURRENCY_CODE, - deliveryMethod: "EMAIL", - recipientName: user?.name!, - recipientEmail: user?.email!, - }); + const bountyOrder = await dispatchBountyOrder({ + fundingSource: "BALANCE", + amount: bountyAmount, + currencyCode: USD_CURRENCY_CODE, + deliveryMethod: "EMAIL", + recipientName: user?.name!, + recipientEmail: user?.email!, + }); - if (bountyOrder) { - await storeBounty({ - usdAmount: bountyAmount, - issueUrl: issue.data.html_url, - status: "open", - orderId: bountyOrder.order.id, - rewardId: bountyOrder.order.rewards?.[0].id!, - userId: user?.id, - repositoryId: ossGgRepo?.id!, - }); + if (bountyOrder) { + await storeBounty({ + usdAmount: bountyAmount, + issueUrl: issue.data.html_url, + status: "open", + orderId: bountyOrder.order.id, + rewardId: bountyOrder.order.rewards?.[0].id!, + userId: user?.id, + repositoryId: ossGgRepo?.id!, + }); - await commentOnIssue( - `Thanks a lot for your valuable contribution! Pls check your inbox for all details to redeem your bounty of ${bountyAmount} ${USD_CURRENCY_CODE}!` - ); - } + await commentOnIssue( + `Thanks a lot for your valuable contribution! Pls check your inbox for all details to redeem your bounty of ${bountyAmount} ${USD_CURRENCY_CODE}!` + ); } - }; - - // Award bounty to each linked issue number - await Promise.all(linkedIssueNumbers.map(awardBountyToUser)); - } catch (err) { - console.error(err); - throw new Error(err); - } - }); + } + }; + + // Award bounty to each linked issue number + await Promise.all(linkedIssueNumbers.map(awardBountyToUser)); + } catch (err) { + console.error(err); + throw new Error(err); + } }; diff --git a/lib/github/hooks/installation.ts b/lib/github/hooks/installation.ts index be5fb75..a11eb96 100644 --- a/lib/github/hooks/installation.ts +++ b/lib/github/hooks/installation.ts @@ -1,5 +1,4 @@ -import { EVENT_TRIGGERS } from "@/lib/constants"; -import { Webhooks } from "@octokit/webhooks"; +import { EmitterWebhookEvent, Webhooks } from "@octokit/webhooks"; import { sendInstallationDetails } from "../services/user"; @@ -12,12 +11,10 @@ type GitHubRepository = { private: boolean; }; -export const onInstallationCreated = async (webhooks: Webhooks) => { - webhooks.on(EVENT_TRIGGERS.INSTALLATION_CREATED, async (context) => { - const installationId = context.payload.installation.id; - const appId = context.payload.installation.app_id; - const repos = context.payload.repositories as GitHubRepository[]; +export const onInstallationCreated = async (payload: EmitterWebhookEvent<"installation">["payload"]) => { + const installationId = payload.installation.id; + const appId = payload.installation.app_id; + const repos = payload.repositories as GitHubRepository[]; - await sendInstallationDetails(installationId, appId, repos, context.payload.installation); - }); + await sendInstallationDetails(installationId, appId, repos, payload.installation); }; diff --git a/lib/github/hooks/issue.ts b/lib/github/hooks/issue.ts index 2431301..2607287 100644 --- a/lib/github/hooks/issue.ts +++ b/lib/github/hooks/issue.ts @@ -18,7 +18,7 @@ import { getRepositoryByGithubId } from "@/lib/repository/service"; import { getUser } from "@/lib/user/service"; import { discordPointMessageTask } from "@/src/trigger/discordPointsMessage"; import { issueReminderTask } from "@/src/trigger/issueReminder"; -import { Webhooks } from "@octokit/webhooks"; +import { EmitterWebhookEvent, Webhooks } from "@octokit/webhooks"; import { isMemberOfRepository } from "../services/user"; import { @@ -33,119 +33,115 @@ import { processUserPoints, } from "../utils"; -export const onIssueOpened = async (webhooks: Webhooks) => { - webhooks.on(EVENT_TRIGGERS.ISSUE_OPENED, async (context) => { - const projectId = context.payload.repository.id; - //TODO: - //1. check if the issue has the oss label - //2. if it has the OSS label find all the users that are currently subscribed to the repo, have the right points/permission, then send them an email - - // const isProjectRegistered = await getProject(projectId) - // if (!isProjectRegistered) { - // await context.octokit.issues.createComment( - // context.issue({ - // body: ON_REPO_NOT_REGISTERED, - // }) - // ) - // return - // } - - const labels = context.payload.issue.labels?.map((label) => label.name); - const isLevelLabel = labels?.includes(LEVEL_LABEL); - - if (!isLevelLabel) { - return; - } +export const onIssueOpened = async (payload: EmitterWebhookEvent<"issues.opened">["payload"]) => { + const projectId = payload.repository.id; + //TODO: + //1. check if the issue has the oss label + //2. if it has the OSS label find all the users that are currently subscribed to the repo, have the right points/permission, then send them an email + + // const isProjectRegistered = await getProject(projectId) + // if (!isProjectRegistered) { + // await octokit.issues.createComment( + // issue({ + // body: ON_REPO_NOT_REGISTERED, + // }) + // ) + // return + // } + + const labels = payload.issue.labels?.map((label) => label.name); + const isLevelLabel = labels?.includes(LEVEL_LABEL); + + if (!isLevelLabel) { + return; + } - // await sendNewIssue( - // context.payload.repository.id, - // context.payload.issue.user.id, - // context.payload.issue.id - // ) - - // await context.octokit.issues.createComment( - // context.issue({ - // body: ON_NEW_ISSUE, - // }) - // ) - }); + // await sendNewIssue( + // payload.repository.id, + // payload.issue.user.id, + // payload.issue.id + // ) + + // await octokit.issues.createComment( + // issue({ + // body: ON_NEW_ISSUE, + // }) + // ) }; -export const onAssignCommented = async (webhooks: Webhooks) => { - webhooks.on(EVENT_TRIGGERS.ISSUE_COMMENTED, async (context) => { - try { - const issueCommentBody = context.payload.comment.body; - const [identifier, points] = issueCommentBody.split(" "); - const issueNumber = context.payload.issue.number; - const repo = context.payload.repository.name; - const owner = context.payload.repository.owner.login; - const commenter = context.payload.comment.user.login; - const installationId = context.payload.installation?.id!; - const octokit = getOctokitInstance(installationId); - const isOssGgLabel = context.payload.issue.labels.some((label) => label.name === OSS_GG_LABEL); - - if (issueCommentBody.trim() === ASSIGN_IDENTIFIER) { - if (!isOssGgLabel) return; - - const isAssigned = context.payload.issue.assignees.length > 0; - if (isAssigned) { - const assignee = context.payload.issue.assignees[0].login; - const message = - assignee === commenter - ? `This issue is already assigned to you. Let's get this shipped!` - : `This issue is already assigned to another person. Please find more issues [here](https://oss.gg).`; - await octokit.issues.createComment({ - owner, - repo, - issue_number: issueNumber, - body: message, - }); - return; - } - - //users who haven't linked the issue to the PR will be able to assign themselves again even if their pr was rejected, because their names won't be added to the "Attempted:user1" comment in the issue. - const allCommentsInTheIssue = await octokit.issues.listComments({ +export const onAssignCommented = async (payload: EmitterWebhookEvent<"issue_comment.created">["payload"]) => { + try { + const issueCommentBody = payload.comment.body; + const [identifier, points] = issueCommentBody.split(" "); + const issueNumber = payload.issue.number; + const repo = payload.repository.name; + const owner = payload.repository.owner.login; + const commenter = payload.comment.user.login; + const installationId = payload.installation?.id!; + const octokit = getOctokitInstance(installationId); + const isOssGgLabel = payload.issue.labels.some((label) => label.name === OSS_GG_LABEL); + + if (issueCommentBody.trim() === ASSIGN_IDENTIFIER) { + if (!isOssGgLabel) return; + + const isAssigned = payload.issue.assignees.length > 0; + if (isAssigned) { + const assignee = payload.issue.assignees[0].login; + const message = + assignee === commenter + ? `This issue is already assigned to you. Let's get this shipped!` + : `This issue is already assigned to another person. Please find more issues [here](https://oss.gg).`; + await octokit.issues.createComment({ owner, repo, issue_number: issueNumber, - per_page: 100, + body: message, }); - let { extractedUserNames } = - await extractUserNamesFromCommentsForRejectCommand(allCommentsInTheIssue); + return; + } - const isUserPrRejectedBefore = extractedUserNames?.includes(context.payload.comment.user.login); - if (isUserPrRejectedBefore) { - await octokit.issues.createComment({ - owner, - repo, - issue_number: issueNumber, - body: "You have already attempted this issue. We will open the issue up for a different contributor to work on. Feel free to stick around in the community and pick up a different issue.", - }); - return; - } + //users who haven't linked the issue to the PR will be able to assign themselves again even if their pr was rejected, because their names won't be added to the "Attempted:user1" comment in the issue. + const allCommentsInTheIssue = await octokit.issues.listComments({ + owner, + repo, + issue_number: issueNumber, + per_page: 100, + }); + let { extractedUserNames } = await extractUserNamesFromCommentsForRejectCommand(allCommentsInTheIssue); - const { data: userIssues } = await octokit.issues.listForRepo({ + const isUserPrRejectedBefore = extractedUserNames?.includes(payload.comment.user.login); + if (isUserPrRejectedBefore) { + await octokit.issues.createComment({ owner, repo, - assignee: commenter, - state: "open", + issue_number: issueNumber, + body: "You have already attempted this issue. We will open the issue up for a different contributor to work on. Feel free to stick around in the community and pick up a different issue.", }); + return; + } - if (userIssues.length > 0) { - const assignedIssue = userIssues[0]; - await octokit.issues.createComment({ - owner, - repo, - issue_number: issueNumber, - body: `You already have an open issue assigned to you [here](${assignedIssue.html_url}). Once that's closed or unassigned, only then we recommend you to take up more.`, - }); - return; - } + const { data: userIssues } = await octokit.issues.listForRepo({ + owner, + repo, + assignee: commenter, + state: "open", + }); - /* + if (userIssues.length > 0) { + const assignedIssue = userIssues[0]; + await octokit.issues.createComment({ + owner, + repo, + issue_number: issueNumber, + body: `You already have an open issue assigned to you [here](${assignedIssue.html_url}). Once that's closed or unassigned, only then we recommend you to take up more.`, + }); + return; + } + + /* //checking if the current level of user has the power to solve the issue on which the /assign comment was made. - const currentRepo = await getRepositoryByGithubId(context.payload.repository.id); - const user = await getUserByGithubId(context.payload.comment.user.id); + const currentRepo = await getRepositoryByGithubId(payload.repository.id); + const user = await getUserByGithubId(payload.comment.user.id); if (currentRepo && user) { const userTotalPoints = await getPointsForPlayerInRepoByRepositoryId(currentRepo.id, user.id); @@ -157,7 +153,7 @@ export const onAssignCommented = async (webhooks: Webhooks) => { const levels = currentRepo?.levels as TLevel[]; const modifiedTagsArray = calculateAssignabelNonAssignableIssuesForUserInALevel(levels); //gets all assignable tags be it from the current level and from lower levels. - const labels = context.payload.issue.labels; + const labels = payload.issue.labels; const tags = modifiedTagsArray.find((item) => item.levelId === currentLevelOfUser?.id); //finds the curent level in the modifiedTagsArray. const isAssignable = labels.some((label) => { @@ -175,277 +171,272 @@ export const onAssignCommented = async (webhooks: Webhooks) => { } } */ - await octokit.issues.addAssignees({ - owner, - repo, - issue_number: issueNumber, - assignees: [commenter], - }); + await octokit.issues.addAssignees({ + owner, + repo, + issue_number: issueNumber, + assignees: [commenter], + }); - //send trigger event to wait for 36hrs then send a reminder if the user has not created a pull request - try { - if (context.payload.installation?.id) { - await issueReminderTask.trigger({ - issueNumber, - repo, - owner, - commenter, - installationId: context.payload.installation.id ?? "", - }); - } - } catch (error) { - console.error("Error sending event:", error.message); - if (error.response) { - const responseText = await error.response.text(); // Capture response text - console.error("Response:", responseText); - } + //send trigger event to wait for 36hrs then send a reminder if the user has not created a pull request + try { + if (payload.installation?.id) { + await issueReminderTask.trigger({ + issueNumber, + repo, + owner, + commenter, + installationId: payload.installation.id ?? "", + }); + } + } catch (error) { + console.error("Error sending event:", error.message); + if (error.response) { + const responseText = await error.response.text(); // Capture response text + console.error("Response:", responseText); } + } + await octokit.issues.createComment({ + owner, + repo, + issue_number: issueNumber, + body: `Assigned to @${commenter}! Please open a draft PR linking this issue within 48h ⚠ī¸ If we can't detect a PR from you linking this issue in 48h, you'll be unassigned automatically 🕹ī¸ Excited to have you ship this 🚀`, + }); + } + + if (identifier === CREATE_IDENTIFIER) { + //check if the user is a member of the repository in our database + const isMember = await isMemberOfRepository(commenter, installationId); + if (!isMember) { await octokit.issues.createComment({ owner, repo, issue_number: issueNumber, - body: `Assigned to @${commenter}! Please open a draft PR linking this issue within 48h ⚠ī¸ If we can't detect a PR from you linking this issue in 48h, you'll be unassigned automatically 🕹ī¸ Excited to have you ship this 🚀`, + body: `@${commenter}, ${ON_USER_NOT_REGISTERED}`, }); + return; } - - if (identifier === CREATE_IDENTIFIER) { - //check if the user is a member of the repository in our database - const isMember = await isMemberOfRepository(commenter, installationId); - if (!isMember) { + if (isOssGgLabel) { + return; + } else { + if (isNaN(parseInt(points))) { await octokit.issues.createComment({ owner, repo, issue_number: issueNumber, - body: `@${commenter}, ${ON_USER_NOT_REGISTERED}`, + body: `@${commenter}, ${POINT_IS_NOT_A_NUMBER}`, }); return; } - if (isOssGgLabel) { - return; - } else { - if (isNaN(parseInt(points))) { - await octokit.issues.createComment({ - owner, - repo, - issue_number: issueNumber, - body: `@${commenter}, ${POINT_IS_NOT_A_NUMBER}`, - }); - return; - } - await octokit.issues.addLabels({ - owner: owner, - repo: repo, - issue_number: issueNumber, - labels: [OSS_GG_LABEL, `:joystick: ${points} points`], - }); - await octokit.issues.createComment({ - owner: owner, - repo: repo, - issue_number: issueNumber, - body: ON_NEW_ISSUE, - }); - } - } - } catch (err) { - console.error(err); - } - }); -}; - -export const onUnassignCommented = async (webhooks: Webhooks) => { - webhooks.on(EVENT_TRIGGERS.ISSUE_COMMENTED, async (context) => { - try { - const issueCommentBody = context.payload.comment.body; - if (issueCommentBody.trim() !== UNASSIGN_IDENTIFIER) { - return; - } - - const isOssGgLabel = context.payload.issue.labels.some((label) => label.name === OSS_GG_LABEL); - if (!isOssGgLabel) { - return; - } - - const issueNumber = context.payload.issue.number; - const repo = context.payload.repository.name; - const owner = context.payload.repository.owner.login; - const commenter = context.payload.comment.user.login; - const octokit = getOctokitInstance(context.payload.installation?.id!); - - const isAssigned = context.payload.issue.assignees.length > 0; - if (!isAssigned) { - await octokit.issues.createComment({ - owner, - repo, - issue_number: issueNumber, - body: "This issue is not assigned to anyone.", - }); - return; - } - - const assignee = context.payload.issue.assignees[0].login; - if (assignee === commenter) { - await octokit.issues.removeAssignees({ - owner, - repo, + await octokit.issues.addLabels({ + owner: owner, + repo: repo, issue_number: issueNumber, - assignees: [assignee], + labels: [OSS_GG_LABEL, `:joystick: ${points} points`], }); await octokit.issues.createComment({ - owner, - repo, + owner: owner, + repo: repo, issue_number: issueNumber, - body: "Issue unassigned.", + body: ON_NEW_ISSUE, }); - return; } + } + } catch (err) { + console.error(err); + } +}; - const ossGgRepo = await getRepositoryByGithubId(context.payload.repository.id); - const usersThatCanUnassign = ossGgRepo?.installation.memberships.map((m) => m.userId) || []; - const ossGgUsers = await Promise.all( - usersThatCanUnassign.map(async (userId) => { - const user = await getUser(userId); - return user?.githubId; - }) - ); +export const onUnassignCommented = async ( + payload: EmitterWebhookEvent<"issue_comment.created">["payload"] +) => { + try { + const issueCommentBody = payload.comment.body; + if (issueCommentBody.trim() !== UNASSIGN_IDENTIFIER) { + return; + } - const isUserAllowedToUnassign = ossGgUsers?.includes(context.payload.comment.user.id); - if (!isUserAllowedToUnassign) { - await octokit.issues.createComment({ - owner, - repo, - issue_number: issueNumber, - body: "You cannot unassign this issue as it is not assigned to you.", - }); - return; - } + const isOssGgLabel = payload.issue.labels.some((label) => label.name === OSS_GG_LABEL); + if (!isOssGgLabel) { + return; + } + + const issueNumber = payload.issue.number; + const repo = payload.repository.name; + const owner = payload.repository.owner.login; + const commenter = payload.comment.user.login; + const octokit = getOctokitInstance(payload.installation?.id!); + const isAssigned = payload.issue.assignees.length > 0; + if (!isAssigned) { await octokit.issues.createComment({ owner, repo, issue_number: issueNumber, - body: "Issue unassigned.", + body: "This issue is not assigned to anyone.", }); + return; + } + const assignee = payload.issue.assignees[0].login; + if (assignee === commenter) { await octokit.issues.removeAssignees({ owner, repo, issue_number: issueNumber, assignees: [assignee], }); - } catch (err) { - console.error(err); + await octokit.issues.createComment({ + owner, + repo, + issue_number: issueNumber, + body: "Issue unassigned.", + }); + return; } - }); + + const ossGgRepo = await getRepositoryByGithubId(payload.repository.id); + const usersThatCanUnassign = ossGgRepo?.installation?.memberships?.map((m) => m.userId) || []; + const ossGgUsers = await Promise.all( + usersThatCanUnassign.map(async (userId) => { + const user = await getUser(userId); + return user?.githubId; + }) + ); + + const isUserAllowedToUnassign = ossGgUsers?.includes(payload.comment.user.id); + if (!isUserAllowedToUnassign) { + await octokit.issues.createComment({ + owner, + repo, + issue_number: issueNumber, + body: "You cannot unassign this issue as it is not assigned to you.", + }); + return; + } + + await octokit.issues.createComment({ + owner, + repo, + issue_number: issueNumber, + body: "Issue unassigned.", + }); + + await octokit.issues.removeAssignees({ + owner, + repo, + issue_number: issueNumber, + assignees: [assignee], + }); + } catch (err) { + console.error(err); + } }; -export const onAwardPoints = async (webhooks: Webhooks) => { - webhooks.on(EVENT_TRIGGERS.ISSUE_COMMENTED, async (context) => { - try { - const repo = context.payload.repository.name; - const issueCommentBody = context.payload.comment.body; - const awardPointsRegex = new RegExp(`${AWARD_POINTS_IDENTIFIER}\\s+(\\d+)`); - const match = issueCommentBody.match(awardPointsRegex); - const isPR = !!context.payload.issue.pull_request; - const issueNumber = isPR ? context.payload.issue.number : undefined; - const owner = context.payload.repository.owner.login; - let comment: string = ""; - - if (match) { - const points = parseInt(match[1], 10); - - if (!issueNumber) { - console.error("Comment is not on a PR."); - return; - } +export const onAwardPoints = async (payload: EmitterWebhookEvent<"issue_comment.created">["payload"]) => { + try { + const repo = payload.repository.name; + const issueCommentBody = payload.comment.body; + const awardPointsRegex = new RegExp(`${AWARD_POINTS_IDENTIFIER}\\s+(\\d+)`); + const match = issueCommentBody.match(awardPointsRegex); + const isPR = !!payload.issue.pull_request; + const issueNumber = isPR ? payload.issue.number : undefined; + const owner = payload.repository.owner.login; + let comment: string = ""; + + if (match) { + const points = parseInt(match[1], 10); + + if (!issueNumber) { + console.error("Comment is not on a PR."); + return; + } - const ossGgRepo = await getRepositoryByGithubId(context.payload.repository.id); + const ossGgRepo = await getRepositoryByGithubId(payload.repository.id); - let usersThatCanAwardPoints = ossGgRepo?.installation.memberships.map((m) => m.userId); - if (!usersThatCanAwardPoints) { - throw new Error("No admins for the given repo in oss.gg!"); - } - const ossGgUsers = await Promise.all( - usersThatCanAwardPoints.map(async (userId) => { - const user = await getUser(userId); - return user?.githubId; - }) - ); - const isUserAllowedToAwardPoints = ossGgUsers?.includes(context.payload.comment.user.id); - if (!isUserAllowedToAwardPoints) { - comment = "You are not allowed to award points! Please contact an admin."; + let usersThatCanAwardPoints = ossGgRepo?.installation?.memberships?.map((m) => m.userId); + if (!usersThatCanAwardPoints) { + throw new Error("No admins for the given repo in oss.gg!"); + } + const ossGgUsers = await Promise.all( + usersThatCanAwardPoints.map(async (userId) => { + const user = await getUser(userId); + return user?.githubId; + }) + ); + const isUserAllowedToAwardPoints = ossGgUsers?.includes(payload.comment.user.id); + if (!isUserAllowedToAwardPoints) { + comment = "You are not allowed to award points! Please contact an admin."; + } else { + if (!ossGgRepo) { + comment = "If you are the repo owner, please register at oss.gg to be able to award points"; } else { - if (!ossGgRepo) { - comment = "If you are the repo owner, please register at oss.gg to be able to award points"; - } else { - const prAuthorUsername = context.payload.issue.user.login; - - //process user points - let user = await processUserPoints({ - installationId: context.payload.installation?.id!, - prAuthorGithubId: context.payload.issue.user.id, - prAuthorUsername: prAuthorUsername, - avatarUrl: context.payload.issue.user.avatar_url, - points, - url: context.payload.comment.html_url, - repoId: ossGgRepo?.id, - comment, - }); + const prAuthorUsername = payload.issue.user.login; + + //process user points + let user = await processUserPoints({ + installationId: payload.installation?.id!, + prAuthorGithubId: payload.issue.user.id, + prAuthorUsername: prAuthorUsername, + avatarUrl: payload.issue.user.avatar_url, + points, + url: payload.comment.html_url, + repoId: ossGgRepo?.id, + comment, + }); - comment = - `Awarding ${user.login}: ${points} points 🕹ī¸ Well done! Check out your new contribution on [oss.gg/${user.login}](https://oss.gg/${user.login})` + - " " + - comment; + comment = + `Awarding ${user.login}: ${points} points 🕹ī¸ Well done! Check out your new contribution on [oss.gg/${user.login}](https://oss.gg/${user.login})` + + " " + + comment; - await discordPointMessageTask.trigger({ - channelId: DISCORD_CHANNEL_ID, - message: DISCORD_AWARD_POINTS_MESSAGE(user.name ?? prAuthorUsername, points), - }); - } + await discordPointMessageTask.trigger({ + channelId: DISCORD_CHANNEL_ID, + message: DISCORD_AWARD_POINTS_MESSAGE(user.name ?? prAuthorUsername, points), + }); } - - //post comment - postComment({ - installationId: context.payload.installation?.id!, - body: comment, - issueNumber: issueNumber, - repo, - owner, - }); } - } catch (err) { - console.error(err); - throw new Error(err); + + //post comment + postComment({ + installationId: payload.installation?.id!, + body: comment, + issueNumber: issueNumber, + repo, + owner, + }); } - }); + } catch (err) { + console.error(err); + throw new Error(err); + } }; -export const onPullRequestMerged = async (webhooks: Webhooks) => { - webhooks.on(EVENT_TRIGGERS.PULL_REQUEST_CLOSED, async (context) => { - const { pull_request: pullRequest, repository, installation } = context.payload; +export const onPullRequestMerged = async (payload: EmitterWebhookEvent<"pull_request">["payload"]) => { + const { pull_request: pullRequest, repository, installation } = payload; - if (!pullRequest.merged) { - console.log("Pull request was not merged."); - return; - } + if (!pullRequest.merged) { + console.log("Pull request was not merged."); + return; + } - const { - name: repo, - owner: { login: owner }, - } = repository; - const octokit = getOctokitInstance(installation?.id!); + const { + name: repo, + owner: { login: owner }, + } = repository; + const octokit = getOctokitInstance(installation?.id!); - const ossGgRepo = await getRepositoryByGithubId(repository.id); - if (!ossGgRepo) { - console.log("Repository is not enrolled in oss.gg."); - return; - } + const ossGgRepo = await getRepositoryByGithubId(repository.id); + if (!ossGgRepo) { + console.log("Repository is not enrolled in oss.gg."); + return; + } - await processPullRequest(context, octokit, pullRequest, repo, owner, ossGgRepo.id); - }); + await processPullRequest(payload, octokit, pullRequest, repo, owner, ossGgRepo.id); }; -async function processPullRequest(context, octokit, pullRequest, repo, owner, ossGgRepoId) { +async function processPullRequest(payload, octokit, pullRequest, repo, owner, ossGgRepoId) { const validPrLabels = filterValidLabels(pullRequest.labels); const isPrOssGgLabel = checkOssGgLabel(validPrLabels); @@ -453,7 +444,7 @@ async function processPullRequest(context, octokit, pullRequest, repo, owner, os const points = extractPointsFromLabels(validPrLabels); if (points) { await processAndComment({ - context, + payload, pullRequest, repo, owner, @@ -466,7 +457,7 @@ async function processPullRequest(context, octokit, pullRequest, repo, owner, os } console.log(`Pull request #${pullRequest.number} does not have the 🕹ī¸ oss.gg label.`); - await processLinkedIssues(context, octokit, pullRequest, repo, owner, ossGgRepoId); + await processLinkedIssues(payload, octokit, pullRequest, repo, owner, ossGgRepoId); } async function processLinkedIssues(context, octokit, pullRequest, repo, owner, ossGgRepoId) { @@ -481,7 +472,7 @@ async function processLinkedIssues(context, octokit, pullRequest, repo, owner, o } } -async function processIssue(context, octokit, pullRequest, repo, owner, issueNumber, ossGgRepoId) { +async function processIssue(payload, octokit, pullRequest, repo, owner, issueNumber, ossGgRepoId) { const { data: issue } = await octokit.issues.get({ owner, repo, issue_number: issueNumber }); const validLabels = filterValidLabels(issue.labels); @@ -494,7 +485,7 @@ async function processIssue(context, octokit, pullRequest, repo, owner, issueNum if (points) { console.log(`Points for issue #${issueNumber}:`, points); await processAndComment({ - context, + payload, pullRequest, repo, owner, @@ -507,151 +498,149 @@ async function processIssue(context, octokit, pullRequest, repo, owner, issueNum } } -export const onRejectCommented = async (webhooks: Webhooks) => { - webhooks.on(EVENT_TRIGGERS.ISSUE_COMMENTED, async (context) => { - try { - const issueCommentBody = context.payload.comment.body; - const prNumber = context.payload.issue.number; //this is pr number if comment made from pr,else issue number when made from issue. - const repo = context.payload.repository.name; - const owner = context.payload.repository.owner.login; - const octokit = getOctokitInstance(context.payload.installation?.id!); - const rejectRegex = new RegExp(`${REJECT_IDENTIFIER}\\s+(.*)`, "i"); - const match = issueCommentBody.match(rejectRegex); - const isCommentOnPullRequest = context.payload.issue.pull_request; - let comment: string = ""; - - if (!match) { - return; - } +export const onRejectCommented = async (payload: EmitterWebhookEvent<"issue_comment.created">["payload"]) => { + try { + const issueCommentBody = payload.comment.body; + const prNumber = payload.issue.number; //this is pr number if comment made from pr,else issue number when made from issue. + const repo = payload.repository.name; + const owner = payload.repository.owner.login; + const octokit = getOctokitInstance(payload.installation?.id!); + const rejectRegex = new RegExp(`${REJECT_IDENTIFIER}\\s+(.*)`, "i"); + const match = issueCommentBody.match(rejectRegex); + const isCommentOnPullRequest = payload.issue.pull_request; + let comment: string = ""; + + if (!match) { + return; + } - if (!isCommentOnPullRequest) { - await octokit.issues.createComment({ - owner, - repo, - issue_number: prNumber, - body: `The command ${REJECT_IDENTIFIER} only works in PRs, not on issues. Please use it in a Pull Request.`, - }); - return; - } + if (!isCommentOnPullRequest) { + await octokit.issues.createComment({ + owner, + repo, + issue_number: prNumber, + body: `The command ${REJECT_IDENTIFIER} only works in PRs, not on issues. Please use it in a Pull Request.`, + }); + return; + } - const message = match[1]; - const ossGgRepo = await getRepositoryByGithubId(context.payload.repository.id); + const message = match[1]; + const ossGgRepo = await getRepositoryByGithubId(payload.repository.id); - let usersThatCanRejectPr = ossGgRepo?.installation.memberships.map((m) => m.userId); - if (!usersThatCanRejectPr) { - throw new Error("No admins for the given repo in oss.gg!"); - } - const ossGgUsers = await Promise.all( - usersThatCanRejectPr.map(async (userId) => { - const user = await getUser(userId); - return user?.githubId; - }) - ); - const isUserAllowedToRejectPr = ossGgUsers?.includes(context.payload.comment.user.id); - if (!isUserAllowedToRejectPr) { - comment = "You are not allowed to reject a pull request."; + let usersThatCanRejectPr = ossGgRepo?.installation?.memberships?.map((m) => m.userId); + if (!usersThatCanRejectPr) { + throw new Error("No admins for the given repo in oss.gg!"); + } + const ossGgUsers = await Promise.all( + usersThatCanRejectPr.map(async (userId) => { + const user = await getUser(userId); + return user?.githubId; + }) + ); + const isUserAllowedToRejectPr = ossGgUsers?.includes(payload.comment.user.id); + if (!isUserAllowedToRejectPr) { + comment = "You are not allowed to reject a pull request."; + await octokit.issues.createComment({ + owner, + repo, + issue_number: prNumber, + body: comment, + }); + return; + } else { + const extractIssueNumbersFromPrBody = extractIssueNumbers(payload.issue.body || ""); + const prAuthor = payload.issue.user.login; + const rejectionMessage = REJECTION_MESSAGE_TEMPLATE(prAuthor, message); + + await octokit.issues.createComment({ + owner, + repo, + issue_number: prNumber, + body: rejectionMessage, + }); + + await octokit.pulls.update({ + owner, + repo, + pull_number: prNumber, + state: "closed", + }); + + if (extractIssueNumbersFromPrBody.length === 0) { await octokit.issues.createComment({ owner, repo, issue_number: prNumber, - body: comment, + body: "This PR is not linked to an issue. Please update the issue status manually.", }); return; } else { - const extractIssueNumbersFromPrBody = extractIssueNumbers(context.payload.issue.body || ""); - const prAuthor = context.payload.issue.user.login; - const rejectionMessage = REJECTION_MESSAGE_TEMPLATE(prAuthor, message); - - await octokit.issues.createComment({ - owner, - repo, - issue_number: prNumber, - body: rejectionMessage, - }); - - await octokit.pulls.update({ - owner, - repo, - pull_number: prNumber, - state: "closed", - }); + extractIssueNumbersFromPrBody.forEach(async (issueNumber: number) => { + //assumption: taking only first 100 comments because first rejection will happen in first 100 comments.If comments are more than 100 then such heavy discussed issue mostly would be given to a core team member.Even if it is given to a non core team member, our requirements would fulfill within 100 comments. + const allCommentsInTheIssue = await octokit.issues.listComments({ + owner, + repo, + issue_number: issueNumber, + per_page: 100, + }); - if (extractIssueNumbersFromPrBody.length === 0) { - await octokit.issues.createComment({ + const issue = await octokit.issues.get({ owner, repo, - issue_number: prNumber, - body: "This PR is not linked to an issue. Please update the issue status manually.", + issue_number: issueNumber, }); - return; - } else { - extractIssueNumbersFromPrBody.forEach(async (issueNumber: number) => { - //assumption: taking only first 100 comments because first rejection will happen in first 100 comments.If comments are more than 100 then such heavy discussed issue mostly would be given to a core team member.Even if it is given to a non core team member, our requirements would fulfill within 100 comments. - const allCommentsInTheIssue = await octokit.issues.listComments({ - owner, - repo, - issue_number: issueNumber, - per_page: 100, - }); - const issue = await octokit.issues.get({ + const issueAssignee = issue.data.assignees ? issue.data.assignees[0]?.login : ""; + + if (issueAssignee !== prAuthor) { + return; + } + + const { hasCommentWithAttemptedUserNames } = + checkFirstOccurenceForAttemptedComment(allCommentsInTheIssue); + + if (!hasCommentWithAttemptedUserNames) { + await octokit.issues.createComment({ owner, repo, issue_number: issueNumber, + body: `Attempted:${issueAssignee}`, }); + } else { + const { extractedUserNames, commentId } = + await extractUserNamesFromCommentsForRejectCommand(allCommentsInTheIssue); - const issueAssignee = issue.data.assignees ? issue.data.assignees[0]?.login : ""; - - if (issueAssignee !== prAuthor) { - return; - } - - const { hasCommentWithAttemptedUserNames } = - checkFirstOccurenceForAttemptedComment(allCommentsInTheIssue); + extractedUserNames.push(issueAssignee); - if (!hasCommentWithAttemptedUserNames) { - await octokit.issues.createComment({ + commentId && + (await octokit.issues.updateComment({ owner, repo, issue_number: issueNumber, - body: `Attempted:${issueAssignee}`, - }); - } else { - const { extractedUserNames, commentId } = - await extractUserNamesFromCommentsForRejectCommand(allCommentsInTheIssue); - - extractedUserNames.push(issueAssignee); - - commentId && - (await octokit.issues.updateComment({ - owner, - repo, - issue_number: issueNumber, - comment_id: commentId, - body: `Attempted:${extractedUserNames}`, - })); - } + comment_id: commentId, + body: `Attempted:${extractedUserNames}`, + })); + } - await octokit.issues.createComment({ - owner, - repo, - issue_number: issueNumber, - body: "The issue is up for grabs again! Feel free to assign yourself using /assign.", - }); + await octokit.issues.createComment({ + owner, + repo, + issue_number: issueNumber, + body: "The issue is up for grabs again! Feel free to assign yourself using /assign.", + }); - await octokit.issues.removeAssignees({ - owner, - repo, - issue_number: issueNumber, - assignees: [issueAssignee], - }); + await octokit.issues.removeAssignees({ + owner, + repo, + issue_number: issueNumber, + assignees: [issueAssignee], }); - } + }); } - } catch (err) { - console.error(err); } - }); + } catch (err) { + console.error(err); + } }; const extractUserNamesFromCommentsForRejectCommand = async (allCommentsInTheIssue) => { const { indexCommentWithAttemptedUserNames, hasCommentWithAttemptedUserNames } = diff --git a/lib/github/index.ts b/lib/github/index.ts index 7ae5e60..8de19de 100644 --- a/lib/github/index.ts +++ b/lib/github/index.ts @@ -1,7 +1,13 @@ import { onBountyCreated, onBountyPullRequestMerged } from "@/lib/github/hooks/bounty"; -import { Webhooks, createNodeMiddleware } from "@octokit/webhooks"; +import { EmitterWebhookEvent, EmitterWebhookEventName } from "@octokit/webhooks"; -import { GITHUB_APP_WEBHOOK_SECRET } from "../constants"; +import { + ASSIGN_IDENTIFIER, + AWARD_POINTS_IDENTIFIER, + BOUNTY_IDENTIFIER, + REJECT_IDENTIFIER, + UNASSIGN_IDENTIFIER, +} from "../constants"; import { onInstallationCreated } from "./hooks/installation"; import { onAssignCommented, @@ -12,22 +18,52 @@ import { onUnassignCommented, } from "./hooks/issue"; -const webhooks = new Webhooks({ - secret: GITHUB_APP_WEBHOOK_SECRET, -}); +export const registerHooks = async ( + event: EmitterWebhookEventName, + body: EmitterWebhookEvent<"issue_comment.created" | "pull_request" | "installation" | "issues">["payload"] +) => { + switch (event) { + case "issues": { + if (body.action === "opened") { + onIssueOpened(body as EmitterWebhookEvent<"issues.opened">["payload"]); + } + } + + case "issue_comment": { + if (body.action === "created") { + const payload = body as EmitterWebhookEvent<"issue_comment.created">["payload"]; + const commentBody = payload.comment.body; + const handlers = { + [ASSIGN_IDENTIFIER]: onAssignCommented, + [UNASSIGN_IDENTIFIER]: onUnassignCommented, + [AWARD_POINTS_IDENTIFIER]: onAwardPoints, + [REJECT_IDENTIFIER]: onRejectCommented, + [BOUNTY_IDENTIFIER]: onBountyCreated, + }; + + for (const [identifier, handler] of Object.entries(handlers)) { + if (commentBody.startsWith(identifier)) { + handler(payload); + break; + } + } + } + break; + } -export const webhookMiddleware = createNodeMiddleware(webhooks, { - path: "/api/github-webhook", -}); + case "installation": { + if (body.action === "created") { + onInstallationCreated(body as EmitterWebhookEvent<"installation">["payload"]); + } + } + case "pull_request": { + if (body.action === "closed") { + onPullRequestMerged(body as EmitterWebhookEvent<"pull_request">["payload"]); + } -export const registerHooks = async () => { - onIssueOpened(webhooks); - onInstallationCreated(webhooks); - onAssignCommented(webhooks); - onUnassignCommented(webhooks); - onAwardPoints(webhooks); - onRejectCommented(webhooks); - onBountyCreated(webhooks); - onBountyPullRequestMerged(webhooks); - onPullRequestMerged(webhooks); + if (body.action === "closed") { + onBountyPullRequestMerged(body as EmitterWebhookEvent<"pull_request.closed">["payload"]); + } + } + } }; diff --git a/lib/github/service.ts b/lib/github/service.ts index 1b1f1fd..4caa74f 100644 --- a/lib/github/service.ts +++ b/lib/github/service.ts @@ -6,124 +6,132 @@ import { Octokit } from "@octokit/rest"; import { unstable_cache } from "next/cache"; import { GITHUB_APP_ACCESS_TOKEN, GITHUB_CACHE_REVALIDATION_INTERVAL, OSS_GG_LABEL } from "../constants"; +import { githubCache } from "./cache"; import { extractPointsFromLabels } from "./utils"; type PullRequestStatus = "open" | "merged" | "closed" | undefined; const octokit = new Octokit({ auth: GITHUB_APP_ACCESS_TOKEN }); -export const getPullRequestsByGithubLogin = async ( +export const getPullRequestsByGithubLogin = ( playerRepositoryIds: string[], githubLogin: string, status?: PullRequestStatus -): Promise => { - if (!playerRepositoryIds || playerRepositoryIds.length === 0) { - console.warn("No repository IDs provided. Returning empty array."); - return []; - } - - const pullRequests: TPullRequest[] = []; - - let statusQuery = "is:pr"; - if (status === "open") statusQuery += " is:open"; - else if (status === "merged") statusQuery += " is:merged"; - else if (status === "closed") statusQuery += " is:closed -is:merged"; - - const repoQuery = playerRepositoryIds.map((id) => `repo:${id}`).join(" "); - const query = `${repoQuery} ${statusQuery} author:${githubLogin}`; - - try { - const { data } = await octokit.search.issuesAndPullRequests({ - q: query, - per_page: 99, - sort: "created", - order: "desc", - }); - - for (const pr of data.items) { - // console.log(`Complete PR object: ${JSON.stringify(pr, null, 2)}`); - - let prStatus: "open" | "merged" | "closed"; - if (pr.state === "open") { - prStatus = "open"; - } else if (pr.pull_request?.merged_at) { - prStatus = "merged"; - } else { - prStatus = "closed"; +) => + unstable_cache( + async (): Promise => { + if (!playerRepositoryIds || playerRepositoryIds.length === 0) { + console.warn("No repository IDs provided. Returning empty array."); + return []; } - const prLabels = pr.labels.filter((label) => label.name !== undefined) as { name: string }[]; + const pullRequests: TPullRequest[] = []; + + let statusQuery = "is:pr"; + if (status === "open") statusQuery += " is:open"; + else if (status === "merged") statusQuery += " is:merged"; + else if (status === "closed") statusQuery += " is:closed -is:merged"; + + const repoQuery = playerRepositoryIds.map((id) => `repo:${id}`).join(" "); + const query = `${repoQuery} ${statusQuery} author:${githubLogin}`; try { - const pullRequest: TPullRequest = ZPullRequest.parse({ - title: pr.title, - href: pr.html_url, - author: pr.user?.login || "", - repositoryFullName: pr.repository_url.split("/").slice(-2).join("/"), - dateOpened: pr.created_at, - dateMerged: pr.pull_request?.merged_at || null, - dateClosed: pr.closed_at, - status: prStatus, - points: prLabels ? extractPointsFromLabels(prLabels) : null, + const { data } = await octokit.search.issuesAndPullRequests({ + q: query, + per_page: 99, + sort: "created", + order: "desc", }); - pullRequests.push(pullRequest); + for (const pr of data.items) { + let prStatus: "open" | "merged" | "closed"; + if (pr.state === "open") { + prStatus = "open"; + } else if (pr.pull_request?.merged_at) { + prStatus = "merged"; + } else { + prStatus = "closed"; + } + + const prLabels = pr.labels.filter((label) => label.name !== undefined) as { name: string }[]; + + try { + const pullRequest: TPullRequest = ZPullRequest.parse({ + title: pr.title, + href: pr.html_url, + author: pr.user?.login || "", + repositoryFullName: pr.repository_url.split("/").slice(-2).join("/"), + dateOpened: pr.created_at, + dateMerged: pr.pull_request?.merged_at || null, + dateClosed: pr.closed_at, + status: prStatus, + points: prLabels ? extractPointsFromLabels(prLabels) : null, + }); + + pullRequests.push(pullRequest); + } catch (error) { + console.error(`Error parsing pull request: ${pr.title}`, error); + } + } } catch (error) { - console.error(`Error parsing pull request: ${pr.title}`, error); + console.error(`Error fetching or processing pull requests:`, error); } + + // Sort pullRequests by dateOpened in descending order + pullRequests.sort((a, b) => new Date(b.dateOpened).getTime() - new Date(a.dateOpened).getTime()); + + return pullRequests; + }, + [`getPullRequests-${githubLogin}-${status}-${playerRepositoryIds.join(",")}`], + { + tags: [githubCache.tag.byGithubLogin(githubLogin)], + revalidate: GITHUB_CACHE_REVALIDATION_INTERVAL, } - } catch (error) { - console.error(`Error fetching or processing pull requests:`, error); - } - - // Sort pullRequests by dateOpened in descending order - pullRequests.sort((a, b) => new Date(b.dateOpened).getTime() - new Date(a.dateOpened).getTime()); - - return pullRequests; -}; - -export const getAllOssGgIssuesOfRepos = unstable_cache( - async (repoGithubIds: number[]): Promise => { - const githubHeaders = { - Authorization: `Bearer ${GITHUB_APP_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - }; - - const allIssues = await Promise.all( - repoGithubIds.map(async (repoGithubId) => { - const repoResponse = await fetch(`https://api.github.com/repositories/${repoGithubId}`, { - headers: githubHeaders, - }); - const repoData = await repoResponse.json(); - - const issuesResponse = await fetch( - `https://api.github.com/search/issues?q=repo:${repoData.full_name}+is:issue+is:open+label:"${OSS_GG_LABEL}"&sort=created&order=desc`, - { headers: githubHeaders } - ); - const issuesData = await issuesResponse.json(); - const validatedData = ZGithubApiResponseSchema.parse(issuesData); - - // Map the GitHub API response to TPullRequest format - return validatedData.items.map((issue) => - ZPullRequest.parse({ - title: issue.title, - href: issue.html_url, - author: issue.user.login, - repositoryFullName: repoData.full_name, - dateOpened: issue.created_at, - dateMerged: null, - dateClosed: issue.closed_at, - status: "open", - points: extractPointsFromLabels(issue.labels), - }) - ); - }) - ); - - return allIssues.flat(); - }, - [`getOpenIssues`], - { - revalidate: GITHUB_CACHE_REVALIDATION_INTERVAL, - } -); + )(); + +export const getAllOssGgIssuesOfRepos = (repoGithubIds: number[]) => + unstable_cache( + async (): Promise => { + const githubHeaders = { + Authorization: `Bearer ${GITHUB_APP_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", + }; + + const allIssues = await Promise.all( + repoGithubIds.map(async (repoGithubId) => { + const repoResponse = await fetch(`https://api.github.com/repositories/${repoGithubId}`, { + headers: githubHeaders, + }); + const repoData = await repoResponse.json(); + + const issuesResponse = await fetch( + `https://api.github.com/search/issues?q=repo:${repoData.full_name}+is:issue+is:open+label:"${OSS_GG_LABEL}"&sort=created&order=desc`, + { headers: githubHeaders } + ); + const issuesData = await issuesResponse.json(); + const validatedData = ZGithubApiResponseSchema.parse(issuesData); + + // Map the GitHub API response to TPullRequest format + return validatedData.items.map((issue) => + ZPullRequest.parse({ + title: issue.title, + href: issue.html_url, + author: issue.user.login, + repositoryFullName: repoData.full_name, + dateOpened: issue.created_at, + dateMerged: null, + dateClosed: issue.closed_at, + status: "open", + points: extractPointsFromLabels(issue.labels), + }) + ); + }) + ); + + return allIssues.flat(); + }, + [`getAllOssGgIssuesOfRepos-${repoGithubIds.join("-")}`], + { + revalidate: GITHUB_CACHE_REVALIDATION_INTERVAL, + } + )(); diff --git a/lib/github/utils.ts b/lib/github/utils.ts index a189994..5d1ca1b 100644 --- a/lib/github/utils.ts +++ b/lib/github/utils.ts @@ -78,7 +78,7 @@ export const extractPointsFromLabels = (labels: { name: string }[]): number | nu // Helper to process user points and post a comment export const processAndComment = async ({ - context, + payload, pullRequest, repo, owner, @@ -86,7 +86,7 @@ export const processAndComment = async ({ issueNumber, ossGgRepoId, }: { - context: any; + payload: any; pullRequest: any; repo: string; owner: string; @@ -95,7 +95,7 @@ export const processAndComment = async ({ ossGgRepoId: string; }) => { const user = await processUserPoints({ - installationId: context.payload.installation?.id!, + installationId: payload.installation?.id!, prAuthorGithubId: pullRequest.user.id, prAuthorUsername: pullRequest.user.login, avatarUrl: pullRequest.user.avatar_url, @@ -109,7 +109,7 @@ export const processAndComment = async ({ // Post comment on the issue or pull request postComment({ - installationId: context.payload.installation?.id!, + installationId: payload.installation?.id!, body: comment, issueNumber: issueNumber, repo, diff --git a/lib/repository/cache.ts b/lib/repository/cache.ts new file mode 100644 index 0000000..fd98d91 --- /dev/null +++ b/lib/repository/cache.ts @@ -0,0 +1,32 @@ +import { revalidateTag } from "next/cache"; + +interface RevalidateProps { + githubId?: string; + id?: string; + userId?: string; +} + +export const repositoryCache = { + tag: { + byGithubId(githubId: number) { + return `repository-${githubId}`; + }, + byId(id: string) { + return `repository-${id}`; + }, + byUserId(userId: string) { + return `repository-${userId}`; + }, + }, + revalidate({ githubId, id, userId }: RevalidateProps): void { + if (githubId) { + revalidateTag(this.tag.byGithubId(githubId)); + } + if (id) { + revalidateTag(this.tag.byId(id)); + } + if (userId) { + revalidateTag(this.tag.byUserId(userId)); + } + }, +}; diff --git a/lib/repository/service.ts b/lib/repository/service.ts index 1e7a6b6..c21159b 100644 --- a/lib/repository/service.ts +++ b/lib/repository/service.ts @@ -1,12 +1,15 @@ import { db } from "@/lib/db"; import { TRepository } from "@/types/repository"; import { Prisma } from "@prisma/client"; +import { unstable_cache } from "next/cache"; + +import { repositoryCache } from "./cache"; /** * Fetches all repositories from the database. * @returns An array of repositories. */ -export const getAllRepositories = async (): Promise => { +export const getAllRepositories = async () => { try { const repositories = await db.repository.findMany({ where: { @@ -28,58 +31,74 @@ export const getAllRepositories = async (): Promise => { * @returns A repository. */ -export const getRepositoryByGithubId = async (githubId: number) => { - try { - const repository = await db.repository.findFirst({ - where: { - githubId, - }, - include: { - installation: { +export const getRepositoryByGithubId = (githubId: number): Promise => + unstable_cache( + async () => { + try { + const repository = await db.repository.findFirst({ + where: { + githubId, + }, include: { - memberships: true, + installation: { + include: { + memberships: true, + }, + }, }, - }, - }, - }); - return repository; - } catch (error) { - if (error instanceof Prisma.PrismaClientKnownRequestError) { - console.error("An error occurred while fetching repository:", error.message); - throw new Error("Database error occurred"); + }); + return repository; + } catch (error) { + if (error instanceof Prisma.PrismaClientKnownRequestError) { + console.error("An error occurred while fetching repository:", error.message); + throw new Error("Database error occurred"); + } + throw error; + } + }, + [`getRepositoryByGithubId-${githubId}`], + { + tags: [repositoryCache.tag.byGithubId(githubId)], + revalidate: 60 * 20, } - throw error; - } -}; + )(); /** * Fetches one repositories from the database by id. * @returns A repository. */ -export const getRepositoryById = async (id: string) => { - try { - const repository = await db.repository.findFirst({ - where: { - id, - }, - include: { - installation: { +export const getRepositoryById = (id: string) => + unstable_cache( + async () => { + try { + const repository = await db.repository.findFirst({ + where: { + id, + }, include: { - memberships: true, + installation: { + include: { + memberships: true, + }, + }, }, - }, - }, - }); - return repository; - } catch (error) { - if (error instanceof Prisma.PrismaClientKnownRequestError) { - console.error("An error occurred while fetching repository:", error.message); - throw new Error("Database error occurred"); + }); + return repository; + } catch (error) { + if (error instanceof Prisma.PrismaClientKnownRequestError) { + console.error("An error occurred while fetching repository:", error.message); + throw new Error("Database error occurred"); + } + throw error; + } + }, + [`getRepositoryById-${id}`], + { + tags: [repositoryCache.tag.byId(id)], + revalidate: 60 * 20, // Cache for 20 minutes } - throw error; - } -}; + )(); /** * Updates a repository's configuered state @@ -95,6 +114,7 @@ export const updateRepository = async (id: string, configuredValue: boolean) => if (!updatedRepository) { throw new Error("Repository not found."); } + repositoryCache.revalidate({ id }); return updatedRepository; }; @@ -104,61 +124,53 @@ export const updateRepository = async (id: string, configuredValue: boolean) => * @returns The fetched repository. */ -export const fetchRepoDetails = async (id: string) => { - try { - return await db.repository.findUnique({ - where: { id }, - }); - } catch (error) { - throw new Error(`Failed to fetch repository details: ${error}`); - } -}; +export const fetchRepoDetails = (id: string) => + unstable_cache( + async () => { + try { + return await db.repository.findUnique({ + where: { id }, + }); + } catch (error) { + throw new Error(`Failed to fetch repository details: ${error}`); + } + }, + [`fetchRepoDetails-${id}`], + { + tags: [repositoryCache.tag.byId(id)], + revalidate: 60 * 20, // Cache for 20 minutes + } + )(); /** * Fetches all repository a user has membership for * @returns An array of repositories. */ -export const getRepositoriesForUser = async (userId: string) => { - try { - const userRepositories = await db.repository.findMany({ - where: { - installation: { - memberships: { - some: { - userId, +export const getRepositoriesForUser = (userId: string) => + unstable_cache( + async (): Promise => { + try { + const userRepositories = await db.repository.findMany({ + where: { + installation: { + memberships: { + some: { + userId, + }, + }, }, }, - }, - }, - }); - - return userRepositories; - } catch (error) { - throw new Error(`Failed to get repositories for user: ${error}`); - } -}; - -/** - * Fetches all users who are enrolled to a specific repository - * @param repositoryId The unique identifier for the repository - * @returns An array of users enrolled to the given repository. - */ - -export const getUsersForRepository = async (repositoryId: string) => { - try { - const users = await db.user.findMany({ - where: { - enrollments: { - some: { - repositoryId: repositoryId, - }, - }, - }, - include: { pointTransactions: true }, - }); - - return users; - } catch (error) { - throw new Error(`Failed to get users for repository: ${error.message}`); - } -}; + }); + + return userRepositories as TRepository[]; + } catch (error) { + console.error(`Failed to get repositories for user: ${error}`); + throw new Error("Failed to retrieve user repositories"); + } + }, + [`getRepositoriesForUser-${userId}`], + { + tags: [repositoryCache.tag.byUserId(userId)], + revalidate: 60 * 20, // Cache for 20 minutes + } + )(); diff --git a/lib/user/service.ts b/lib/user/service.ts index 1b69ce6..6b9e0fb 100644 --- a/lib/user/service.ts +++ b/lib/user/service.ts @@ -4,6 +4,7 @@ import { DatabaseError, ResourceNotFoundError } from "@/types/errors"; import { TUser, TUserCreateInput, TUserUpdateInput, ZUser, ZUserUpdateInput } from "@/types/user"; import { Prisma } from "@prisma/client"; +import { repositoryCache } from "../repository/cache"; import { formatDateFields } from "../utils/datetime"; import { validateInputs } from "../utils/validate"; @@ -107,6 +108,8 @@ export const updateUser = async (personId: string, data: TUserUpdateInput): Prom select: userSelection, }); + repositoryCache.revalidate({ userId: personId }); + return updatedUser; } catch (error) { if (error instanceof Prisma.PrismaClientKnownRequestError && error.code === "P2016") { @@ -140,3 +143,28 @@ export const createUser = async (data: TUserCreateInput): Promise => { return user; }; + +/** + * Fetches all users who are enrolled to a specific repository + * @param repositoryId The unique identifier for the repository + * @returns An array of users enrolled to the given repository. + */ + +export const getUsersForRepository = async (repositoryId: string) => { + try { + const users = await db.user.findMany({ + where: { + enrollments: { + some: { + repositoryId: repositoryId, + }, + }, + }, + include: { pointTransactions: true }, + }); + + return users; + } catch (error) { + throw new Error(`Failed to get users for repository: ${error.message}`); + } +}; diff --git a/package.json b/package.json index cd5bff0..a0c2751 100644 --- a/package.json +++ b/package.json @@ -28,9 +28,10 @@ "@next-auth/prisma-adapter": "^1.0.7", "@octokit/auth-app": "^6.1.1", "@octokit/rest": "^20.1.1", + "@octokit/types": "^13.5.0", "@octokit/webhooks": "^12.2.0", "@octokit/webhooks-types": "^7.5.1", - "@prisma/client": "5.9.1", + "@prisma/client": "^5.19.1", "@radix-ui/react-alert-dialog": "^1.0.5", "@radix-ui/react-avatar": "^1.0.4", "@radix-ui/react-checkbox": "^1.0.4", @@ -100,7 +101,7 @@ "postcss": "^8.4.38", "prettier": "^3.2.5", "prettier-plugin-tailwindcss": "^0.5.14", - "prisma": "^5.14.0", + "prisma": "^5.19.1", "rimraf": "^5.0.7", "tailwindcss": "^3.4.3", "typescript": "^5.4.5" diff --git a/pages/api/github-webhook.ts b/pages/api/github-webhook.ts deleted file mode 100644 index fca9c2e..0000000 --- a/pages/api/github-webhook.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { registerHooks, webhookMiddleware } from "@/lib/github"; -import { NextApiRequest, NextApiResponse } from "next"; - -export const config = { - api: { - bodyParser: false, - }, -}; - -// Set to store processed event IDs -const processedEvents = new Set(); - -// Flag to ensure hooks are registered only once -let hooksRegistered = false; - -export default async function handler(req: NextApiRequest, res: NextApiResponse) { - if (req.method === "POST") { - const eventId = req.headers["x-github-delivery"] as string; - - if (!eventId) { - res.status(400).json({ error: "Missing X-GitHub-Delivery header" }); - return; - } - - if (processedEvents.has(eventId)) { - res.status(200).end(); - return; - } - - if (!hooksRegistered) { - registerHooks(); - hooksRegistered = true; - } - - webhookMiddleware(req, res, () => { - processedEvents.add(eventId); - - // Optionally, remove the event ID after some time to prevent the set from growing indefinitely - setTimeout( - () => { - processedEvents.delete(eventId); - }, - 24 * 60 * 60 * 1000 - ); // 24 hours - - res.status(200).end(); - }); - } else { - res.setHeader("Allow", "POST"); - res.status(405).end("Method Not Allowed"); - } -} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 91d07e1..ce4ba1b 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -31,13 +31,16 @@ importers: version: 3.4.0(react-hook-form@7.51.4(react@18.3.1)) '@next-auth/prisma-adapter': specifier: ^1.0.7 - version: 1.0.7(@prisma/client@5.9.1(prisma@5.14.0))(next-auth@4.24.7(next@14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) + version: 1.0.7(@prisma/client@5.19.1(prisma@5.19.1))(next-auth@4.24.7(next@14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)) '@octokit/auth-app': specifier: ^6.1.1 version: 6.1.1 '@octokit/rest': specifier: ^20.1.1 version: 20.1.1 + '@octokit/types': + specifier: ^13.5.0 + version: 13.5.0 '@octokit/webhooks': specifier: ^12.2.0 version: 12.2.0 @@ -45,8 +48,8 @@ importers: specifier: ^7.5.1 version: 7.5.1 '@prisma/client': - specifier: 5.9.1 - version: 5.9.1(prisma@5.14.0) + specifier: ^5.19.1 + version: 5.19.1(prisma@5.19.1) '@radix-ui/react-alert-dialog': specifier: ^1.0.5 version: 1.0.5(@types/react-dom@18.3.0)(@types/react@18.3.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -250,8 +253,8 @@ importers: specifier: ^0.5.14 version: 0.5.14(@trivago/prettier-plugin-sort-imports@4.3.0(prettier@3.2.5))(prettier@3.2.5) prisma: - specifier: ^5.14.0 - version: 5.14.0 + specifier: ^5.19.1 + version: 5.19.1 rimraf: specifier: ^5.0.7 version: 5.0.7 @@ -653,8 +656,8 @@ packages: resolution: {integrity: sha512-1TUx3KdaU3cN7nfCdNf+UVqA/PSX29Cjcox3fZZBtINlRrXVTmUkQnCKv2MbBUbCopbK4olAT1IHl76uZyCiVA==} engines: {node: '>=14.0.0'} - '@grpc/grpc-js@1.11.2': - resolution: {integrity: sha512-DWp92gDD7/Qkj7r8kus6/HCINeo3yPZWZ3paKgDgsbKbSpoxKg1yvN8xe2Q8uE3zOsPe3bX8FQX2+XValq2yTw==} + '@grpc/grpc-js@1.11.3': + resolution: {integrity: sha512-i9UraDzFHMR+Iz/MhFLljT+fCpgxZ3O6CxwGJ8YuNYHJItIHUzKJpW2LvoFZNnGPwqc9iWy9RAucxV0JoR9aUQ==} engines: {node: '>=12.10.0'} '@grpc/proto-loader@0.7.13': @@ -1071,6 +1074,10 @@ packages: resolution: {integrity: sha512-kaNl/T7WzyMUQHQlVq7q0oV4Kev6+0xFwqzofryC66jgGMacd0QH5TwfpbUwSTby+SdAdprAe5UKMvBw4tKS5Q==} engines: {node: '>=14'} + '@opentelemetry/api-logs@0.52.1': + resolution: {integrity: sha512-qnSqB2DQ9TPP96dl8cDubDvrUyWc0/sK81xHTK8eSUspzDM3bsewX903qclQFvVhgStjRWdC5bLb3kQqMkfV5A==} + engines: {node: '>=14'} + '@opentelemetry/api@1.9.0': resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} engines: {node: '>=8.0.0'} @@ -1081,6 +1088,12 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.9.0' + '@opentelemetry/context-async-hooks@1.25.1': + resolution: {integrity: sha512-UW/ge9zjvAEmRWVapOP0qyCvPulWU6cQxGxDbWEFfGOj1VBBZAuOqTo3X6yWmDTD3Xe15ysCZChHncr2xFMIfQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/context-async-hooks@1.26.0': resolution: {integrity: sha512-HedpXXYzzbaoutw6DFLWLDket2FwLkLpil4hGCZ1xYEIMTcivdfwEOISgdbLEWyG3HW52gTq2V9mOVJrONgiwg==} engines: {node: '>=14'} @@ -1093,6 +1106,12 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.9.0' + '@opentelemetry/core@1.25.1': + resolution: {integrity: sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/core@1.26.0': resolution: {integrity: sha512-1iKxXXE8415Cdv0yjG3G6hQnB5eVEsJce3QaawX8SjDn0mAS0ZM8fAbZZJD4ajvhC15cePvosSCut404KrIIvQ==} engines: {node: '>=14'} @@ -1105,48 +1124,96 @@ packages: peerDependencies: '@opentelemetry/api': ^1.0.0 + '@opentelemetry/exporter-logs-otlp-http@0.52.1': + resolution: {integrity: sha512-qKgywId2DbdowPZpOBXQKp0B8DfhfIArmSic15z13Nk/JAOccBUQdPwDjDnjsM5f0ckZFMVR2t/tijTUAqDZoA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + '@opentelemetry/exporter-trace-otlp-grpc@0.49.1': resolution: {integrity: sha512-Zbd7f3zF7fI2587MVhBizaW21cO/SordyrZGtMtvhoxU6n4Qb02Gx71X4+PzXH620e0+JX+Pcr9bYb1HTeVyJA==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.0.0 + '@opentelemetry/exporter-trace-otlp-grpc@0.52.1': + resolution: {integrity: sha512-pVkSH20crBwMTqB3nIN4jpQKUEoB0Z94drIHpYyEqs7UBr+I0cpYyOR3bqjA/UasQUMROb3GX8ZX4/9cVRqGBQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + '@opentelemetry/exporter-trace-otlp-http@0.49.1': resolution: {integrity: sha512-KOLtZfZvIrpGZLVvblKsiVQT7gQUZNKcUUH24Zz6Xbi7LJb9Vt6xtUZFYdR5IIjvt47PIqBKDWUQlU0o1wAsRw==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.0.0 + '@opentelemetry/exporter-trace-otlp-http@0.52.1': + resolution: {integrity: sha512-05HcNizx0BxcFKKnS5rwOV+2GevLTVIRA0tRgWYyw4yCgR53Ic/xk83toYKts7kbzcI+dswInUg/4s8oyA+tqg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + '@opentelemetry/exporter-trace-otlp-proto@0.49.1': resolution: {integrity: sha512-n8ON/c9pdMyYAfSFWKkgsPwjYoxnki+6Olzo+klKfW7KqLWoyEkryNkbcMIYnGGNXwdkMIrjoaP0VxXB26Oxcg==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.0.0 + '@opentelemetry/exporter-trace-otlp-proto@0.52.1': + resolution: {integrity: sha512-pt6uX0noTQReHXNeEslQv7x311/F1gJzMnp1HD2qgypLRPbXDeMzzeTngRTUaUbP6hqWNtPxuLr4DEoZG+TcEQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + '@opentelemetry/exporter-zipkin@1.22.0': resolution: {integrity: sha512-XcFs6rGvcTz0qW5uY7JZDYD0yNEXdekXAb6sFtnZgY/cHY6BQ09HMzOjv9SX+iaXplRDcHr1Gta7VQKM1XXM6g==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.0.0 + '@opentelemetry/exporter-zipkin@1.25.1': + resolution: {integrity: sha512-RmOwSvkimg7ETwJbUOPTMhJm9A9bG1U8s7Zo3ajDh4zM7eYcycQ0dM7FbLD6NXWbI2yj7UY4q8BKinKYBQksyw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + '@opentelemetry/instrumentation@0.49.1': resolution: {integrity: sha512-0DLtWtaIppuNNRRllSD4bjU8ZIiLp1cDXvJEbp752/Zf+y3gaLNaoGRGIlX4UHhcsrmtL+P2qxi3Hodi8VuKiQ==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.3.0 + '@opentelemetry/instrumentation@0.52.1': + resolution: {integrity: sha512-uXJbYU/5/MBHjMp1FqrILLRuiJCs3Ofk0MeRDk8g1S1gD47U8X3JnSwcMO1rtRo1x1a7zKaQHaoYu49p/4eSKw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + '@opentelemetry/otlp-exporter-base@0.49.1': resolution: {integrity: sha512-z6sHliPqDgJU45kQatAettY9/eVF58qVPaTuejw9YWfSRqid9pXPYeegDCSdyS47KAUgAtm+nC28K3pfF27HWg==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.0.0 + '@opentelemetry/otlp-exporter-base@0.52.1': + resolution: {integrity: sha512-z175NXOtX5ihdlshtYBe5RpGeBoTXVCKPPLiQlD6FHvpM4Ch+p2B0yWKYSrBfLH24H9zjJiBdTrtD+hLlfnXEQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + '@opentelemetry/otlp-grpc-exporter-base@0.49.1': resolution: {integrity: sha512-DNDNUWmOqtKTFJAyOyHHKotVox0NQ/09ETX8fUOeEtyNVHoGekAVtBbvIA3AtK+JflP7LC0PTjlLfruPM3Wy6w==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': ^1.0.0 + '@opentelemetry/otlp-grpc-exporter-base@0.52.1': + resolution: {integrity: sha512-zo/YrSDmKMjG+vPeA9aBBrsQM9Q/f2zo6N04WMB3yNldJRsgpRBeLLwvAt/Ba7dpehDLOEFBd1i2JCoaFtpCoQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': ^1.0.0 + '@opentelemetry/otlp-proto-exporter-base@0.49.1': resolution: {integrity: sha512-x1qB4EUC7KikUl2iNuxCkV8yRzrSXSyj4itfpIO674H7dhI7Zv37SFaOJTDN+8Z/F50gF2ISFH9CWQ4KCtGm2A==} engines: {node: '>=14'} @@ -1159,12 +1226,24 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.3.0 <1.9.0' + '@opentelemetry/otlp-transformer@0.52.1': + resolution: {integrity: sha512-I88uCZSZZtVa0XniRqQWKbjAUm73I8tpEy/uJYPPYw5d7BRdVk0RfTBQw8kSUl01oVWEuqxLDa802222MYyWHg==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + '@opentelemetry/propagator-b3@1.22.0': resolution: {integrity: sha512-qBItJm9ygg/jCB5rmivyGz1qmKZPsL/sX715JqPMFgq++Idm0x+N9sLQvWFHFt2+ZINnCSojw7FVBgFW6izcXA==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.9.0' + '@opentelemetry/propagator-b3@1.25.1': + resolution: {integrity: sha512-p6HFscpjrv7//kE+7L+3Vn00VEDUJB0n6ZrjkTYHrJ58QZ8B3ajSJhRbCcY6guQ3PDjTbxWklyvIN2ojVbIb1A==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/propagator-b3@1.26.0': resolution: {integrity: sha512-vvVkQLQ/lGGyEy9GT8uFnI047pajSOVnZI2poJqVGD3nJ+B9sFGdlHNnQKophE3lHfnIH0pw2ubrCTjZCgIj+Q==} engines: {node: '>=14'} @@ -1177,6 +1256,12 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.9.0' + '@opentelemetry/propagator-jaeger@1.25.1': + resolution: {integrity: sha512-nBprRf0+jlgxks78G/xq72PipVK+4or9Ypntw0gVZYNTCSK8rg5SeaGV19tV920CMqBD/9UIOiFr23Li/Q8tiA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/propagator-jaeger@1.26.0': resolution: {integrity: sha512-DelFGkCdaxA1C/QA0Xilszfr0t4YbGd3DjxiCDPh34lfnFr+VkkrjV9S8ZTJvAzfdKERXhfOxIKBoGPJwoSz7Q==} engines: {node: '>=14'} @@ -1189,6 +1274,12 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.9.0' + '@opentelemetry/resources@1.25.1': + resolution: {integrity: sha512-pkZT+iFYIZsVn6+GzM0kSX+u3MSLCY9md+lIJOoKl/P+gJFfxJte/60Usdp8Ce4rOs8GduUpSPNe1ddGyDT1sQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/resources@1.26.0': resolution: {integrity: sha512-CPNYchBE7MBecCSVy0HKpUISEeJOniWqcHaAHpmasZ3j9o6V3AyBzhRc90jdmemq0HOxDr6ylhUbDhBqqPpeNw==} engines: {node: '>=14'} @@ -1202,24 +1293,48 @@ packages: '@opentelemetry/api': '>=1.4.0 <1.9.0' '@opentelemetry/api-logs': '>=0.39.1' + '@opentelemetry/sdk-logs@0.52.1': + resolution: {integrity: sha512-MBYh+WcPPsN8YpRHRmK1Hsca9pVlyyKd4BxOC4SsgHACnl/bPp4Cri9hWhVm5+2tiQ9Zf4qSc1Jshw9tOLGWQA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.4.0 <1.10.0' + '@opentelemetry/sdk-metrics@1.22.0': resolution: {integrity: sha512-k6iIx6H3TZ+BVMr2z8M16ri2OxWaljg5h8ihGJxi/KQWcjign6FEaEzuigXt5bK9wVEhqAcWLCfarSftaNWkkg==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': '>=1.3.0 <1.9.0' + '@opentelemetry/sdk-metrics@1.25.1': + resolution: {integrity: sha512-9Mb7q5ioFL4E4dDrc4wC/A3NTHDat44v4I3p2pLPSxRvqUbDIQyMVr9uK+EU69+HWhlET1VaSrRzwdckWqY15Q==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + '@opentelemetry/sdk-node@0.49.1': resolution: {integrity: sha512-feBIT85ndiSHXsQ2gfGpXC/sNeX4GCHLksC4A9s/bfpUbbgbCSl0RvzZlmEpCHarNrkZMwFRi4H0xFfgvJEjrg==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': '>=1.3.0 <1.9.0' + '@opentelemetry/sdk-node@0.52.1': + resolution: {integrity: sha512-uEG+gtEr6eKd8CVWeKMhH2olcCHM9dEK68pe0qE0be32BcCRsvYURhHaD1Srngh1SQcnQzZ4TP324euxqtBOJA==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + '@opentelemetry/sdk-trace-base@1.22.0': resolution: {integrity: sha512-pfTuSIpCKONC6vkTpv6VmACxD+P1woZf4q0K46nSUvXFvOFqjBYKFaAMkKD3M1mlKUUh0Oajwj35qNjMl80m1Q==} engines: {node: '>=14'} peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.9.0' + '@opentelemetry/sdk-trace-base@1.25.1': + resolution: {integrity: sha512-C8k4hnEbc5FamuZQ92nTOp8X/diCY56XUTnMiv9UTuJitCzaNNHAVsdm5+HLCdI8SLQsLWIrG38tddMxLVoftw==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/sdk-trace-base@1.26.0': resolution: {integrity: sha512-olWQldtvbK4v22ymrKLbIcBi9L2SpMO84sCPY54IVsJhP9fRsxJT194C/AVaAuJzLE30EdhhM1VmvVYR7az+cw==} engines: {node: '>=14'} @@ -1232,6 +1347,12 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.9.0' + '@opentelemetry/sdk-trace-node@1.25.1': + resolution: {integrity: sha512-nMcjFIKxnFqoez4gUmihdBrbpsEnAX/Xj16sGvZm+guceYE0NE00vLhpDVK6f3q8Q4VFI5xG8JjlXKMB/SkTTQ==} + engines: {node: '>=14'} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/sdk-trace-node@1.26.0': resolution: {integrity: sha512-Fj5IVKrj0yeUwlewCRwzOVcr5avTuNnMHWf7GPc1t6WaT78J6CJyF3saZ/0RkZfdeNO8IcBl/bNcWMVZBMRW8Q==} engines: {node: '>=14'} @@ -1242,6 +1363,10 @@ packages: resolution: {integrity: sha512-CAOgFOKLybd02uj/GhCdEeeBjOS0yeoDeo/CA7ASBSmenpZHAKGB3iDm/rv3BQLcabb/OprDEsSQ1y0P8A7Siw==} engines: {node: '>=14'} + '@opentelemetry/semantic-conventions@1.25.1': + resolution: {integrity: sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ==} + engines: {node: '>=14'} + '@opentelemetry/semantic-conventions@1.27.0': resolution: {integrity: sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg==} engines: {node: '>=14'} @@ -1256,8 +1381,8 @@ packages: '@popperjs/core@2.11.8': resolution: {integrity: sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==} - '@prisma/client@5.9.1': - resolution: {integrity: sha512-caSOnG4kxcSkhqC/2ShV7rEoWwd3XrftokxJqOCMVvia4NYV/TPtJlS9C2os3Igxw/Qyxumj9GBQzcStzECvtQ==} + '@prisma/client@5.19.1': + resolution: {integrity: sha512-x30GFguInsgt+4z5I4WbkZP2CGpotJMUXy+Gl/aaUjHn2o1DnLYNTA+q9XdYmAQZM8fIIkvUiA2NpgosM3fneg==} engines: {node: '>=16.13'} peerDependencies: prisma: '*' @@ -1265,20 +1390,20 @@ packages: prisma: optional: true - '@prisma/debug@5.14.0': - resolution: {integrity: sha512-iq56qBZuFfX3fCxoxT8gBX33lQzomBU0qIUaEj1RebsKVz1ob/BVH1XSBwwwvRVtZEV1b7Fxx2eVu34Ge/mg3w==} + '@prisma/debug@5.19.1': + resolution: {integrity: sha512-lAG6A6QnG2AskAukIEucYJZxxcSqKsMK74ZFVfCTOM/7UiyJQi48v6TQ47d6qKG3LbMslqOvnTX25dj/qvclGg==} - '@prisma/engines-version@5.14.0-25.e9771e62de70f79a5e1c604a2d7c8e2a0a874b48': - resolution: {integrity: sha512-ip6pNkRo1UxWv+6toxNcYvItNYaqQjXdFNGJ+Nuk2eYtRoEdoF13wxo7/jsClJFFenMPVNVqXQDV0oveXnR1cA==} + '@prisma/engines-version@5.19.1-2.69d742ee20b815d88e17e54db4a2a7a3b30324e3': + resolution: {integrity: sha512-xR6rt+z5LnNqTP5BBc+8+ySgf4WNMimOKXRn6xfNRDSpHvbOEmd7+qAOmzCrddEc4Cp8nFC0txU14dstjH7FXA==} - '@prisma/engines@5.14.0': - resolution: {integrity: sha512-lgxkKZ6IEygVcw6IZZUlPIfLQ9hjSYAtHjZ5r64sCLDgVzsPFCi2XBBJgzPMkOQ5RHzUD4E/dVdpn9+ez8tk1A==} + '@prisma/engines@5.19.1': + resolution: {integrity: sha512-kR/PoxZDrfUmbbXqqb8SlBBgCjvGaJYMCOe189PEYzq9rKqitQ2fvT/VJ8PDSe8tTNxhc2KzsCfCAL+Iwm/7Cg==} - '@prisma/fetch-engine@5.14.0': - resolution: {integrity: sha512-VrheA9y9DMURK5vu8OJoOgQpxOhas3qF0IBHJ8G/0X44k82kc8E0w98HCn2nhnbOOMwbWsJWXfLC2/F8n5u0gQ==} + '@prisma/fetch-engine@5.19.1': + resolution: {integrity: sha512-pCq74rtlOVJfn4pLmdJj+eI4P7w2dugOnnTXpRilP/6n5b2aZiA4ulJlE0ddCbTPkfHmOL9BfaRgA8o+1rfdHw==} - '@prisma/get-platform@5.14.0': - resolution: {integrity: sha512-/yAyBvcEjRv41ynZrhdrPtHgk47xLRRq/o5eWGcUpBJ1YrUZTYB8EoPiopnP7iQrMATK8stXQdPOoVlrzuTQZw==} + '@prisma/get-platform@5.19.1': + resolution: {integrity: sha512-sCeoJ+7yt0UjnR+AXZL7vXlg5eNxaFOwC23h0KvW1YIXUoa7+W2ZcAUhoEQBmJTW4GrFqCuZ8YSP0mkDa4k3Zg==} '@protobufjs/aspromise@1.1.2': resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} @@ -2094,6 +2219,10 @@ packages: resolution: {integrity: sha512-b4ZeH1k5sy4NISWgkZGmVxspe2r5vFpZ+KdNpYxiobQ/ameM/979OYXNRceeimWKuEdEEp1SMcMJNfZ0Pt5L1A==} engines: {node: '>=18.0.0'} + '@trigger.dev/core@3.0.5': + resolution: {integrity: sha512-k8zFGrlHNXPkBb9RptuLl/UsmzHGeHL8xv6fWF6NySS61Y3yLtdBMSLrX8hCxlW9j7nm9elhiK3PgrQeozf0tg==} + engines: {node: '>=18.20.0'} + '@trigger.dev/nextjs@3.0.0-beta.56': resolution: {integrity: sha512-b46yKUgQYdx8p32hXdYQATJdoX9SYePYTF/P6Wo9DYJYra9pUKkeiBvXLKFqOjZpgX1Vpb7qtaSoJFyUARbJhg==} engines: {node: '>=18.0.0'} @@ -2244,6 +2373,11 @@ packages: peerDependencies: acorn: ^8 + acorn-import-attributes@1.9.5: + resolution: {integrity: sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==} + peerDependencies: + acorn: ^8 + acorn-jsx@5.3.2: resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} peerDependencies: @@ -2934,6 +3068,10 @@ packages: resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} engines: {node: '>=10'} + execa@8.0.1: + resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} + engines: {node: '>=16.17'} + extend@3.0.2: resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} @@ -3058,6 +3196,10 @@ packages: resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} engines: {node: '>=10'} + get-stream@8.0.1: + resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==} + engines: {node: '>=16'} + get-symbol-description@1.0.2: resolution: {integrity: sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==} engines: {node: '>= 0.4'} @@ -3216,6 +3358,10 @@ packages: resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} engines: {node: '>=10.17.0'} + human-signals@5.0.0: + resolution: {integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==} + engines: {node: '>=16.17.0'} + humanize-duration@3.32.1: resolution: {integrity: sha512-inh5wue5XdfObhu/IGEMiA1nUXigSGcaKNemcbLRKa7jXYGDZXr3LoT9pTIzq2hPEbld7w/qv9h+ikWGz8fL1g==} @@ -3232,6 +3378,9 @@ packages: resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} engines: {node: '>=6'} + import-in-the-middle@1.11.0: + resolution: {integrity: sha512-5DimNQGoe0pLUHbR9qK84iWaWjjbsxiqXnw6Qz64+azRgleqv9k2kTt5fw7QsOpmaGYtuxxursnPPsnTKEx10Q==} + import-in-the-middle@1.7.1: resolution: {integrity: sha512-1LrZPDtW+atAxH42S6288qyDFNQ2YCty+2mxEPRtfazH6Z5QwkaBSTS2ods7hnVJioF6rkRfNoA6A/MstpFXLg==} @@ -3388,6 +3537,10 @@ packages: resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} engines: {node: '>=8'} + is-stream@3.0.0: + resolution: {integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + is-string@1.0.7: resolution: {integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==} engines: {node: '>= 0.4'} @@ -3790,6 +3943,10 @@ packages: resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} engines: {node: '>=6'} + mimic-fn@4.0.0: + resolution: {integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==} + engines: {node: '>=12'} + min-indent@1.0.1: resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} engines: {node: '>=4'} @@ -3912,6 +4069,10 @@ packages: resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} engines: {node: '>=8'} + npm-run-path@5.3.0: + resolution: {integrity: sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + oauth@0.9.15: resolution: {integrity: sha512-a5ERWK1kh38ExDEfoO6qUHJb32rd7aYmPHuyCu3Fta/cnICvYmgd2uhuKXvPD+PXB+gCEYYEaQdIRAjCOwAKNA==} @@ -3973,6 +4134,10 @@ packages: resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} engines: {node: '>=6'} + onetime@6.0.0: + resolution: {integrity: sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==} + engines: {node: '>=12'} + openid-client@5.6.5: resolution: {integrity: sha512-5P4qO9nGJzB5PI0LFlhj4Dzg3m4odt0qsJTfyEtZyOlkgpILwEioOhVVJOrS1iVH494S4Ee5OCjjg6Bf5WOj3w==} @@ -4032,6 +4197,10 @@ packages: resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} engines: {node: '>=8'} + path-key@4.0.0: + resolution: {integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==} + engines: {node: '>=12'} + path-parse@1.0.7: resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} @@ -4182,8 +4351,8 @@ packages: pretty-format@3.8.0: resolution: {integrity: sha512-WuxUnVtlWL1OfZFQFuqvnvs6MiAGk9UNsBostyBOB0Is9wb5uRESevA6rnl/rkksXaGX3GzZhPup5d6Vp1nFew==} - prisma@5.14.0: - resolution: {integrity: sha512-gCNZco7y5XtjrnQYeDJTiVZmT/ncqCr5RY1/Cf8X2wgLRmyh9ayPAGBNziI4qEE4S6SxCH5omQLVo9lmURaJ/Q==} + prisma@5.19.1: + resolution: {integrity: sha512-c5K9MiDaa+VAAyh1OiYk76PXOme9s3E992D7kvvIOhCrNsBQfy2mP2QAQtX0WNj140IgG++12kwZpYB9iIydNQ==} engines: {node: '>=16.13'} hasBin: true @@ -4650,6 +4819,10 @@ packages: resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} engines: {node: '>=6'} + strip-final-newline@3.0.0: + resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==} + engines: {node: '>=12'} + strip-indent@3.0.0: resolution: {integrity: sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==} engines: {node: '>=8'} @@ -5928,7 +6101,7 @@ snapshots: '@google-cloud/precise-date@4.0.0': {} - '@grpc/grpc-js@1.11.2': + '@grpc/grpc-js@1.11.3': dependencies: '@grpc/proto-loader': 0.7.13 '@js-sdsl/ordered-map': 4.4.2 @@ -6100,9 +6273,9 @@ snapshots: outvariant: 1.4.3 strict-event-emitter: 0.5.1 - '@next-auth/prisma-adapter@1.0.7(@prisma/client@5.9.1(prisma@5.14.0))(next-auth@4.24.7(next@14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': + '@next-auth/prisma-adapter@1.0.7(@prisma/client@5.19.1(prisma@5.19.1))(next-auth@4.24.7(next@14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': dependencies: - '@prisma/client': 5.9.1(prisma@5.14.0) + '@prisma/client': 5.19.1(prisma@5.19.1) next-auth: 4.24.7(next@14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@next/env@14.1.0': {} @@ -6345,12 +6518,20 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs@0.52.1': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api@1.9.0': {} '@opentelemetry/context-async-hooks@1.22.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 + '@opentelemetry/context-async-hooks@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/context-async-hooks@1.26.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6360,6 +6541,11 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/semantic-conventions': 1.22.0 + '@opentelemetry/core@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/semantic-conventions': 1.25.1 + '@opentelemetry/core@1.26.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6374,9 +6560,18 @@ snapshots: '@opentelemetry/otlp-transformer': 0.49.1(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-logs': 0.49.1(@opentelemetry/api-logs@0.49.1)(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-logs-otlp-http@0.52.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.52.1 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-grpc@0.49.1(@opentelemetry/api@1.9.0)': dependencies: - '@grpc/grpc-js': 1.11.2 + '@grpc/grpc-js': 1.11.3 '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.22.0(@opentelemetry/api@1.9.0) '@opentelemetry/otlp-grpc-exporter-base': 0.49.1(@opentelemetry/api@1.9.0) @@ -6384,6 +6579,16 @@ snapshots: '@opentelemetry/resources': 1.22.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.22.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-grpc@0.52.1(@opentelemetry/api@1.9.0)': + dependencies: + '@grpc/grpc-js': 1.11.3 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-grpc-exporter-base': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-http@0.49.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6393,6 +6598,15 @@ snapshots: '@opentelemetry/resources': 1.22.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.22.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-http@0.52.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-proto@0.49.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6403,6 +6617,15 @@ snapshots: '@opentelemetry/resources': 1.22.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.22.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-proto@0.52.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-zipkin@1.22.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6411,6 +6634,14 @@ snapshots: '@opentelemetry/sdk-trace-base': 1.22.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.22.0 + '@opentelemetry/exporter-zipkin@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.1 + '@opentelemetry/instrumentation@0.49.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6423,19 +6654,45 @@ snapshots: transitivePeerDependencies: - supports-color + '@opentelemetry/instrumentation@0.52.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.52.1 + '@types/shimmer': 1.2.0 + import-in-the-middle: 1.11.0 + require-in-the-middle: 7.4.0 + semver: 7.6.2 + shimmer: 1.2.1 + transitivePeerDependencies: + - supports-color + '@opentelemetry/otlp-exporter-base@0.49.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.22.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base@0.52.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-grpc-exporter-base@0.49.1(@opentelemetry/api@1.9.0)': dependencies: - '@grpc/grpc-js': 1.11.2 + '@grpc/grpc-js': 1.11.3 '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.22.0(@opentelemetry/api@1.9.0) '@opentelemetry/otlp-exporter-base': 0.49.1(@opentelemetry/api@1.9.0) protobufjs: 7.4.0 + '@opentelemetry/otlp-grpc-exporter-base@0.52.1(@opentelemetry/api@1.9.0)': + dependencies: + '@grpc/grpc-js': 1.11.3 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-proto-exporter-base@0.49.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6453,11 +6710,27 @@ snapshots: '@opentelemetry/sdk-metrics': 1.22.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.22.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer@0.52.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.52.1 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.25.1(@opentelemetry/api@1.9.0) + protobufjs: 7.4.0 + '@opentelemetry/propagator-b3@1.22.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.22.0(@opentelemetry/api@1.9.0) + '@opentelemetry/propagator-b3@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/propagator-b3@1.26.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6468,6 +6741,11 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.22.0(@opentelemetry/api@1.9.0) + '@opentelemetry/propagator-jaeger@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/propagator-jaeger@1.26.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6479,6 +6757,12 @@ snapshots: '@opentelemetry/core': 1.22.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.22.0 + '@opentelemetry/resources@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.1 + '@opentelemetry/resources@1.26.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6499,6 +6783,13 @@ snapshots: '@opentelemetry/core': 1.22.0(@opentelemetry/api@1.9.0) '@opentelemetry/resources': 1.22.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs@0.52.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.52.1 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics@1.22.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6506,6 +6797,13 @@ snapshots: '@opentelemetry/resources': 1.22.0(@opentelemetry/api@1.9.0) lodash.merge: 4.6.2 + '@opentelemetry/sdk-metrics@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + lodash.merge: 4.6.2 + '@opentelemetry/sdk-node@0.49.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6525,6 +6823,25 @@ snapshots: transitivePeerDependencies: - supports-color + '@opentelemetry/sdk-node@0.52.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.52.1 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-grpc': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-http': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-proto': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-zipkin': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-node': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.1 + transitivePeerDependencies: + - supports-color + '@opentelemetry/sdk-trace-base@1.22.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6532,6 +6849,13 @@ snapshots: '@opentelemetry/resources': 1.22.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.22.0 + '@opentelemetry/sdk-trace-base@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.1 + '@opentelemetry/sdk-trace-base@1.26.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6549,6 +6873,16 @@ snapshots: '@opentelemetry/sdk-trace-base': 1.22.0(@opentelemetry/api@1.9.0) semver: 7.6.2 + '@opentelemetry/sdk-trace-node@1.25.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/context-async-hooks': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/propagator-b3': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/propagator-jaeger': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.25.1(@opentelemetry/api@1.9.0) + semver: 7.6.2 + '@opentelemetry/sdk-trace-node@1.26.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -6561,6 +6895,8 @@ snapshots: '@opentelemetry/semantic-conventions@1.22.0': {} + '@opentelemetry/semantic-conventions@1.25.1': {} + '@opentelemetry/semantic-conventions@1.27.0': {} '@panva/hkdf@1.1.1': {} @@ -6570,30 +6906,30 @@ snapshots: '@popperjs/core@2.11.8': {} - '@prisma/client@5.9.1(prisma@5.14.0)': + '@prisma/client@5.19.1(prisma@5.19.1)': optionalDependencies: - prisma: 5.14.0 + prisma: 5.19.1 - '@prisma/debug@5.14.0': {} + '@prisma/debug@5.19.1': {} - '@prisma/engines-version@5.14.0-25.e9771e62de70f79a5e1c604a2d7c8e2a0a874b48': {} + '@prisma/engines-version@5.19.1-2.69d742ee20b815d88e17e54db4a2a7a3b30324e3': {} - '@prisma/engines@5.14.0': + '@prisma/engines@5.19.1': dependencies: - '@prisma/debug': 5.14.0 - '@prisma/engines-version': 5.14.0-25.e9771e62de70f79a5e1c604a2d7c8e2a0a874b48 - '@prisma/fetch-engine': 5.14.0 - '@prisma/get-platform': 5.14.0 + '@prisma/debug': 5.19.1 + '@prisma/engines-version': 5.19.1-2.69d742ee20b815d88e17e54db4a2a7a3b30324e3 + '@prisma/fetch-engine': 5.19.1 + '@prisma/get-platform': 5.19.1 - '@prisma/fetch-engine@5.14.0': + '@prisma/fetch-engine@5.19.1': dependencies: - '@prisma/debug': 5.14.0 - '@prisma/engines-version': 5.14.0-25.e9771e62de70f79a5e1c604a2d7c8e2a0a874b48 - '@prisma/get-platform': 5.14.0 + '@prisma/debug': 5.19.1 + '@prisma/engines-version': 5.19.1-2.69d742ee20b815d88e17e54db4a2a7a3b30324e3 + '@prisma/get-platform': 5.19.1 - '@prisma/get-platform@5.14.0': + '@prisma/get-platform@5.19.1': dependencies: - '@prisma/debug': 5.14.0 + '@prisma/debug': 5.19.1 '@protobufjs/aspromise@1.1.2': {} @@ -7585,10 +7921,36 @@ snapshots: - supports-color - utf-8-validate + '@trigger.dev/core@3.0.5': + dependencies: + '@google-cloud/precise-date': 4.0.0 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.52.1 + '@opentelemetry/exporter-logs-otlp-http': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-http': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-node': 0.52.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-node': 1.25.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.25.1 + execa: 8.0.1 + humanize-duration: 3.32.1 + socket.io-client: 4.7.5 + superjson: 2.2.1 + zod: 3.22.3 + zod-error: 1.5.0 + zod-validation-error: 1.5.0(zod@3.22.3) + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + '@trigger.dev/nextjs@3.0.0-beta.56(@trigger.dev/sdk@3.0.0-beta.56(typescript@5.4.5))(next@14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))': dependencies: '@trigger.dev/sdk': 3.0.0-beta.56(typescript@5.4.5) - debug: 4.3.7 + debug: 4.3.4 next: 14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) transitivePeerDependencies: - supports-color @@ -7596,8 +7958,8 @@ snapshots: '@trigger.dev/react@3.0.0-beta.56(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: '@tanstack/react-query': 5.0.0-beta.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@trigger.dev/core': 3.0.0-beta.56 - debug: 4.3.7 + '@trigger.dev/core': 3.0.5 + debug: 4.3.4 react: 18.3.1 zod: 3.22.3 transitivePeerDependencies: @@ -7776,6 +8138,10 @@ snapshots: dependencies: acorn: 8.11.3 + acorn-import-attributes@1.9.5(acorn@8.11.3): + dependencies: + acorn: 8.11.3 + acorn-jsx@5.3.2(acorn@8.11.3): dependencies: acorn: 8.11.3 @@ -8293,7 +8659,7 @@ snapshots: engine.io-client@6.5.4: dependencies: '@socket.io/component-emitter': 3.1.2 - debug: 4.3.7 + debug: 4.3.4 engine.io-parser: 5.2.3 ws: 8.17.1 xmlhttprequest-ssl: 2.0.0 @@ -8423,7 +8789,7 @@ snapshots: eslint: 8.57.0 eslint-import-resolver-node: 0.3.9 eslint-import-resolver-typescript: 3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0) - eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0) + eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0) eslint-plugin-jsx-a11y: 6.8.0(eslint@8.57.0) eslint-plugin-react: 7.34.1(eslint@8.57.0) eslint-plugin-react-hooks: 4.6.2(eslint@8.57.0) @@ -8451,7 +8817,7 @@ snapshots: enhanced-resolve: 5.16.1 eslint: 8.57.0 eslint-module-utils: 2.8.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0) - eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0) + eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0) fast-glob: 3.3.2 get-tsconfig: 4.7.5 is-core-module: 2.13.1 @@ -8473,7 +8839,7 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-import@2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0): + eslint-plugin-import@2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0): dependencies: array-includes: 3.1.8 array.prototype.findlastindex: 1.2.5 @@ -8632,6 +8998,18 @@ snapshots: signal-exit: 3.0.7 strip-final-newline: 2.0.0 + execa@8.0.1: + dependencies: + cross-spawn: 7.0.3 + get-stream: 8.0.1 + human-signals: 5.0.0 + is-stream: 3.0.0 + merge-stream: 2.0.0 + npm-run-path: 5.3.0 + onetime: 6.0.0 + signal-exit: 4.1.0 + strip-final-newline: 3.0.0 + extend@3.0.2: {} fast-deep-equal@3.1.3: {} @@ -8744,6 +9122,8 @@ snapshots: get-stream@6.0.1: {} + get-stream@8.0.1: {} + get-symbol-description@1.0.2: dependencies: call-bind: 1.0.7 @@ -8940,6 +9320,8 @@ snapshots: human-signals@2.1.0: {} + human-signals@5.0.0: {} + humanize-duration@3.32.1: {} husky@9.0.11: {} @@ -8951,6 +9333,13 @@ snapshots: parent-module: 1.0.1 resolve-from: 4.0.0 + import-in-the-middle@1.11.0: + dependencies: + acorn: 8.11.3 + acorn-import-attributes: 1.9.5(acorn@8.11.3) + cjs-module-lexer: 1.4.1 + module-details-from-path: 1.0.3 + import-in-the-middle@1.7.1: dependencies: acorn: 8.11.3 @@ -9085,6 +9474,8 @@ snapshots: is-stream@2.0.1: {} + is-stream@3.0.0: {} + is-string@1.0.7: dependencies: has-tostringtag: 1.0.2 @@ -9605,7 +9996,7 @@ snapshots: micromark@3.2.0: dependencies: '@types/debug': 4.1.12 - debug: 4.3.7 + debug: 4.3.4 decode-named-character-reference: 1.0.2 micromark-core-commonmark: 1.1.0 micromark-factory-space: 1.1.0 @@ -9637,6 +10028,8 @@ snapshots: mimic-fn@2.1.0: {} + mimic-fn@4.0.0: {} + min-indent@1.0.1: {} minimal-polyfills@2.2.3: {} @@ -9776,6 +10169,10 @@ snapshots: dependencies: path-key: 3.1.1 + npm-run-path@5.3.0: + dependencies: + path-key: 4.0.0 + oauth@0.9.15: {} object-assign@4.1.1: {} @@ -9849,6 +10246,10 @@ snapshots: dependencies: mimic-fn: 2.1.0 + onetime@6.0.0: + dependencies: + mimic-fn: 4.0.0 + openid-client@5.6.5: dependencies: jose: 4.15.5 @@ -9915,6 +10316,8 @@ snapshots: path-key@3.1.1: {} + path-key@4.0.0: {} + path-parse@1.0.7: {} path-scurry@1.11.1: @@ -9998,9 +10401,11 @@ snapshots: pretty-format@3.8.0: {} - prisma@5.14.0: + prisma@5.19.1: dependencies: - '@prisma/engines': 5.14.0 + '@prisma/engines': 5.19.1 + optionalDependencies: + fsevents: 2.3.3 prismjs@1.27.0: {} @@ -10503,7 +10908,7 @@ snapshots: socket.io-parser@4.2.4: dependencies: '@socket.io/component-emitter': 3.1.2 - debug: 4.3.7 + debug: 4.3.4 transitivePeerDependencies: - supports-color @@ -10603,6 +11008,8 @@ snapshots: strip-final-newline@2.0.0: {} + strip-final-newline@3.0.0: {} + strip-indent@3.0.0: dependencies: min-indent: 1.0.1 From 3fa9fbfe77ae8599be1ea38ed221d2d3571a73a9 Mon Sep 17 00:00:00 2001 From: Johannes Date: Wed, 25 Sep 2024 16:59:00 +0200 Subject: [PATCH 2/7] remove var --- env.mjs | 2 -- lib/constants.ts | 2 -- 2 files changed, 4 deletions(-) diff --git a/env.mjs b/env.mjs index 09b1441..f1e4d5b 100644 --- a/env.mjs +++ b/env.mjs @@ -31,7 +31,6 @@ export const env = createEnv({ TREMENDOUS_CAMPAIGN_ID: z.string().min(1), DISCORD_BOT_TOKEN: z.string(), DISCORD_CHANNEL_ID: z.string(), - OSS_GG_REPO_ID: z.string().min(1), }, client: { NEXT_PUBLIC_APP_URL: z.string().min(1), @@ -64,6 +63,5 @@ export const env = createEnv({ TREMENDOUS_CAMPAIGN_ID: process.env.TREMENDOUS_CAMPAIGN_ID, DISCORD_BOT_TOKEN: process.env.DISCORD_BOT_TOKEN, DISCORD_CHANNEL_ID: process.env.DISCORD_CHANNEL_ID, - OSS_GG_REPO_ID: process.env.OSS_GG_REPO_ID, }, }); diff --git a/lib/constants.ts b/lib/constants.ts index 1cac30a..7adda27 100644 --- a/lib/constants.ts +++ b/lib/constants.ts @@ -74,5 +74,3 @@ export const DISCORD_CHANNEL_ID = env.DISCORD_CHANNEL_ID; export const DISCORD_BOT_TOKEN = env.DISCORD_BOT_TOKEN; export const DISCORD_AWARD_POINTS_MESSAGE = (username: string, points: number) => `Way to go, ${username} 🎉 You've just earned ${points} points. Your contribution is invaluable to our community 🙌 Keep up the fantastic work and let's keep pushing forward! đŸ’Ē`; - -export const OSS_GG_REPO_ID = env.OSS_GG_REPO_ID; From db67bfe9ecbd239dbaf221a5d2e9bfa17e70f8cd Mon Sep 17 00:00:00 2001 From: Johannes Date: Wed, 25 Sep 2024 18:07:09 +0200 Subject: [PATCH 3/7] add logging to installation --- lib/github/hooks/installation.ts | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/lib/github/hooks/installation.ts b/lib/github/hooks/installation.ts index a11eb96..2ecb5e3 100644 --- a/lib/github/hooks/installation.ts +++ b/lib/github/hooks/installation.ts @@ -1,4 +1,4 @@ -import { EmitterWebhookEvent, Webhooks } from "@octokit/webhooks"; +import { EmitterWebhookEvent } from "@octokit/webhooks"; import { sendInstallationDetails } from "../services/user"; @@ -12,9 +12,19 @@ type GitHubRepository = { }; export const onInstallationCreated = async (payload: EmitterWebhookEvent<"installation">["payload"]) => { + console.log("onInstallationCreated called with payload:", JSON.stringify(payload, null, 2)); + const installationId = payload.installation.id; const appId = payload.installation.app_id; const repos = payload.repositories as GitHubRepository[]; - await sendInstallationDetails(installationId, appId, repos, payload.installation); + console.log(`Processing installation: ${installationId}, appId: ${appId}, repos: ${repos.length}`); + + try { + await sendInstallationDetails(installationId, appId, repos, payload.installation); + console.log("sendInstallationDetails completed successfully"); + } catch (error) { + console.error("Error in sendInstallationDetails:", error); + throw error; + } }; From 4e92726253c433f5426ad1a96535f17642dcb5df Mon Sep 17 00:00:00 2001 From: Johannes Date: Wed, 25 Sep 2024 18:19:17 +0200 Subject: [PATCH 4/7] add more logging --- lib/github/services/user.ts | 35 ++++++++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/lib/github/services/user.ts b/lib/github/services/user.ts index 6fdcdb3..3fda4ab 100644 --- a/lib/github/services/user.ts +++ b/lib/github/services/user.ts @@ -18,7 +18,12 @@ export const sendInstallationDetails = async ( | undefined, installation: any ): Promise => { + console.log(`Starting sendInstallationDetails with installationId: ${installationId}, appId: ${appId}`); + console.log(`Repos:`, JSON.stringify(repos, null, 2)); + console.log(`Installation:`, JSON.stringify(installation, null, 2)); + try { + console.log(`Creating App instance with appId: ${appId}`); const app = new App({ appId, privateKey: GITHUB_APP_PRIVATE_KEY, @@ -26,9 +31,16 @@ export const sendInstallationDetails = async ( secret: GITHUB_APP_WEBHOOK_SECRET!, }, }); + console.log(`App instance created successfully`); + + console.log(`Getting installation Octokit for installationId: ${installationId}`); const octokit = await app.getInstallationOctokit(installationId); + console.log(`Octokit instance obtained successfully`); await db.$transaction(async (tx) => { + console.log(`Starting database transaction`); + + console.log(`Upserting installation with githubId: ${installationId}`); const installationPrisma = await tx.installation.upsert({ where: { githubId: installationId }, update: { type: installation?.account?.type.toLowerCase() }, @@ -37,16 +49,22 @@ export const sendInstallationDetails = async ( type: installation?.account?.type.toLowerCase(), }, }); + console.log(`Installation upserted successfully:`, installationPrisma); const userType = installation?.account?.type.toLowerCase(); + console.log(`User type: ${userType}`); + if (userType === "organization") { + console.log(`Processing organization members`); const membersOfOrg = await octokit.rest.orgs.listMembers({ org: installation?.account?.login, role: "all", }); + console.log(`Found ${membersOfOrg.data.length} members in the organization`); await Promise.all( membersOfOrg.data.map(async (member) => { + console.log(`Processing member: ${member.login}`); const newUser = await tx.user.upsert({ where: { githubId: member.id }, update: {}, @@ -58,6 +76,7 @@ export const sendInstallationDetails = async ( avatarUrl: member.avatar_url, }, }); + console.log(`User upserted:`, newUser); await tx.membership.upsert({ where: { @@ -73,9 +92,11 @@ export const sendInstallationDetails = async ( role: "member", }, }); + console.log(`Membership upserted for user: ${newUser.login}`); }) ); } else { + console.log(`Processing individual user`); const user = installation.account; const newUser = await tx.user.upsert({ where: { githubId: user.id }, @@ -88,6 +109,7 @@ export const sendInstallationDetails = async ( avatarUrl: user.avatar_url, }, }); + console.log(`User upserted:`, newUser); await tx.membership.upsert({ where: { @@ -103,13 +125,18 @@ export const sendInstallationDetails = async ( role: "owner", }, }); + console.log(`Membership upserted for user: ${newUser.login}`); } if (repos) { + console.log(`Processing ${repos.length} repositories`); await Promise.all( repos.map(async (repo) => { + console.log(`Processing repository: ${repo.name}`); const defaultBranch = await getRepositoryDefaultBranch(installation.account.login, repo.name); + console.log(`Default branch for ${repo.name}: ${defaultBranch}`); const readme = await getRepositoryReadme(installation.account.login, repo.name, defaultBranch); + console.log(`README fetched for ${repo.name}, length: ${readme.length}`); await tx.repository.upsert({ where: { githubId: repo.id }, @@ -124,12 +151,18 @@ export const sendInstallationDetails = async ( projectDescription: readme, }, }); + console.log(`Repository upserted: ${repo.name}`); }) ); } + + console.log(`Database transaction completed successfully`); }); + + console.log(`sendInstallationDetails completed successfully`); } catch (error) { - console.error(`Failed to post installation details: ${error}`); + console.error(`Failed to post installation details:`, error); + console.error(`Error stack:`, error instanceof Error ? error.stack : "No stack trace available"); throw new Error(`Failed to post installation details: ${error}`); } }; From a47fc0165a7b16d0a0b3348fd8c5322f6c158787 Mon Sep 17 00:00:00 2001 From: Johannes Date: Wed, 25 Sep 2024 18:44:17 +0200 Subject: [PATCH 5/7] even more logging --- lib/github/services/user.ts | 228 ++++++++++++++++++------------------ 1 file changed, 117 insertions(+), 111 deletions(-) diff --git a/lib/github/services/user.ts b/lib/github/services/user.ts index 3fda4ab..db6f3a3 100644 --- a/lib/github/services/user.ts +++ b/lib/github/services/user.ts @@ -37,127 +37,133 @@ export const sendInstallationDetails = async ( const octokit = await app.getInstallationOctokit(installationId); console.log(`Octokit instance obtained successfully`); - await db.$transaction(async (tx) => { - console.log(`Starting database transaction`); - - console.log(`Upserting installation with githubId: ${installationId}`); - const installationPrisma = await tx.installation.upsert({ - where: { githubId: installationId }, - update: { type: installation?.account?.type.toLowerCase() }, - create: { - githubId: installationId, - type: installation?.account?.type.toLowerCase(), - }, - }); - console.log(`Installation upserted successfully:`, installationPrisma); - - const userType = installation?.account?.type.toLowerCase(); - console.log(`User type: ${userType}`); - - if (userType === "organization") { - console.log(`Processing organization members`); - const membersOfOrg = await octokit.rest.orgs.listMembers({ - org: installation?.account?.login, - role: "all", + console.log("About to start database transaction"); + await db + .$transaction(async (tx) => { + console.log(`Starting database transaction`); + + console.log(`Upserting installation with githubId: ${installationId}`); + const installationPrisma = await tx.installation.upsert({ + where: { githubId: installationId }, + update: { type: installation?.account?.type.toLowerCase() }, + create: { + githubId: installationId, + type: installation?.account?.type.toLowerCase(), + }, }); - console.log(`Found ${membersOfOrg.data.length} members in the organization`); - - await Promise.all( - membersOfOrg.data.map(async (member) => { - console.log(`Processing member: ${member.login}`); - const newUser = await tx.user.upsert({ - where: { githubId: member.id }, - update: {}, - create: { - githubId: member.id, - login: member.login, - name: member.name, - email: member.email, - avatarUrl: member.avatar_url, - }, - }); - console.log(`User upserted:`, newUser); - - await tx.membership.upsert({ - where: { - userId_installationId: { + console.log(`Installation upserted successfully:`, installationPrisma); + + const userType = installation?.account?.type.toLowerCase(); + console.log(`User type: ${userType}`); + + if (userType === "organization") { + console.log(`Processing organization members`); + const membersOfOrg = await octokit.rest.orgs.listMembers({ + org: installation?.account?.login, + role: "all", + }); + console.log(`Found ${membersOfOrg.data.length} members in the organization`); + + await Promise.all( + membersOfOrg.data.map(async (member) => { + console.log(`Processing member: ${member.login}`); + const newUser = await tx.user.upsert({ + where: { githubId: member.id }, + update: {}, + create: { + githubId: member.id, + login: member.login, + name: member.name, + email: member.email, + avatarUrl: member.avatar_url, + }, + }); + console.log(`User upserted:`, newUser); + + await tx.membership.upsert({ + where: { + userId_installationId: { + userId: newUser.id, + installationId: installationPrisma.id, + }, + }, + update: {}, + create: { userId: newUser.id, installationId: installationPrisma.id, + role: "member", }, - }, - update: {}, - create: { + }); + console.log(`Membership upserted for user: ${newUser.login}`); + }) + ); + } else { + console.log(`Processing individual user`); + const user = installation.account; + const newUser = await tx.user.upsert({ + where: { githubId: user.id }, + update: {}, + create: { + githubId: user.id, + login: user.login, + name: user.name, + email: user.email, + avatarUrl: user.avatar_url, + }, + }); + console.log(`User upserted:`, newUser); + + await tx.membership.upsert({ + where: { + userId_installationId: { userId: newUser.id, installationId: installationPrisma.id, - role: "member", }, - }); - console.log(`Membership upserted for user: ${newUser.login}`); - }) - ); - } else { - console.log(`Processing individual user`); - const user = installation.account; - const newUser = await tx.user.upsert({ - where: { githubId: user.id }, - update: {}, - create: { - githubId: user.id, - login: user.login, - name: user.name, - email: user.email, - avatarUrl: user.avatar_url, - }, - }); - console.log(`User upserted:`, newUser); - - await tx.membership.upsert({ - where: { - userId_installationId: { + }, + update: {}, + create: { userId: newUser.id, installationId: installationPrisma.id, + role: "owner", }, - }, - update: {}, - create: { - userId: newUser.id, - installationId: installationPrisma.id, - role: "owner", - }, - }); - console.log(`Membership upserted for user: ${newUser.login}`); - } - - if (repos) { - console.log(`Processing ${repos.length} repositories`); - await Promise.all( - repos.map(async (repo) => { - console.log(`Processing repository: ${repo.name}`); - const defaultBranch = await getRepositoryDefaultBranch(installation.account.login, repo.name); - console.log(`Default branch for ${repo.name}: ${defaultBranch}`); - const readme = await getRepositoryReadme(installation.account.login, repo.name, defaultBranch); - console.log(`README fetched for ${repo.name}, length: ${readme.length}`); - - await tx.repository.upsert({ - where: { githubId: repo.id }, - update: {}, - create: { - githubId: repo.id, - name: repo.name, - owner: repo.full_name.split("/")[0], - installationId: installationPrisma.id, - logoUrl: `https://avatars.githubusercontent.com/u/${installation.account.id}?s=200&v=4`, - default_branch: defaultBranch, - projectDescription: readme, - }, - }); - console.log(`Repository upserted: ${repo.name}`); - }) - ); - } - - console.log(`Database transaction completed successfully`); - }); + }); + console.log(`Membership upserted for user: ${newUser.login}`); + } + + if (repos) { + console.log(`Processing ${repos.length} repositories`); + await Promise.all( + repos.map(async (repo) => { + console.log(`Processing repository: ${repo.name}`); + const defaultBranch = await getRepositoryDefaultBranch(installation.account.login, repo.name); + console.log(`Default branch for ${repo.name}: ${defaultBranch}`); + const readme = await getRepositoryReadme(installation.account.login, repo.name, defaultBranch); + console.log(`README fetched for ${repo.name}, length: ${readme.length}`); + + await tx.repository.upsert({ + where: { githubId: repo.id }, + update: {}, + create: { + githubId: repo.id, + name: repo.name, + owner: repo.full_name.split("/")[0], + installationId: installationPrisma.id, + logoUrl: `https://avatars.githubusercontent.com/u/${installation.account.id}?s=200&v=4`, + default_branch: defaultBranch, + projectDescription: readme, + }, + }); + console.log(`Repository upserted: ${repo.name}`); + }) + ); + } + + console.log("Database transaction completed successfully"); + }) + .catch((error) => { + console.error("Error in database transaction:", error); + throw error; + }); console.log(`sendInstallationDetails completed successfully`); } catch (error) { From 9ad24bb91b0db1818c5825c243f924c276ab4c5a Mon Sep 17 00:00:00 2001 From: Johannes Date: Wed, 25 Sep 2024 19:00:06 +0200 Subject: [PATCH 6/7] revalidate cache --- lib/github/services/user.ts | 3 +++ lib/repository/service.ts | 4 +++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/github/services/user.ts b/lib/github/services/user.ts index db6f3a3..6b02a67 100644 --- a/lib/github/services/user.ts +++ b/lib/github/services/user.ts @@ -1,6 +1,7 @@ import { GITHUB_APP_PRIVATE_KEY, GITHUB_APP_WEBHOOK_SECRET } from "@/lib/constants"; import { db } from "@/lib/db"; import { getRepositoryDefaultBranch, getRepositoryReadme } from "@/lib/github/services/repo"; +import { repositoryCache } from "@/lib/repository/cache"; import { App } from "octokit"; export const sendInstallationDetails = async ( @@ -94,6 +95,7 @@ export const sendInstallationDetails = async ( role: "member", }, }); + repositoryCache.revalidate({ userId: newUser.id, id: installationPrisma.id }); console.log(`Membership upserted for user: ${newUser.login}`); }) ); @@ -128,6 +130,7 @@ export const sendInstallationDetails = async ( }, }); console.log(`Membership upserted for user: ${newUser.login}`); + repositoryCache.revalidate({ userId: newUser.id, id: installationPrisma.id }); } if (repos) { diff --git a/lib/repository/service.ts b/lib/repository/service.ts index c21159b..a252eb4 100644 --- a/lib/repository/service.ts +++ b/lib/repository/service.ts @@ -149,6 +149,8 @@ export const fetchRepoDetails = (id: string) => export const getRepositoriesForUser = (userId: string) => unstable_cache( async (): Promise => { + console.log(`getRepositoriesForUser called with userId: ${userId}`); + console.log(`Starting database transaction`); try { const userRepositories = await db.repository.findMany({ where: { @@ -161,7 +163,7 @@ export const getRepositoriesForUser = (userId: string) => }, }, }); - + console.log(`Database transaction completed successfully`); return userRepositories as TRepository[]; } catch (error) { console.error(`Failed to get repositories for user: ${error}`); From 7d07814dae3c5dad34ebcdcc6548f4557a871cb4 Mon Sep 17 00:00:00 2001 From: chronark Date: Thu, 26 Sep 2024 09:25:16 +0200 Subject: [PATCH 7/7] chore: invalidate all older sessions A new cookie name is used for sessions, so all older sessions will no longer be valid. I could not really test this, cause there is a ton of required env variables, but I checked the source for nextauth and it should work :tm: --- lib/auth.ts | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/lib/auth.ts b/lib/auth.ts index caa54d6..ab373c3 100644 --- a/lib/auth.ts +++ b/lib/auth.ts @@ -17,6 +17,15 @@ export const authOptions: NextAuthOptions = { clientSecret: env.GITHUB_APP_CLIENT_SECRET, }), ], + cookies: { + sessionToken: { + name: "hackathon-2024.session-token", + // Looks like this isn't optional, but it will merge with the default + // cookies from nextauth + // https://github.com/nextauthjs/next-auth/blob/5e5a7fc5b41ea2e7e687f5c6e6d89c7967609dcb/packages/core/src/lib/utils/cookie.ts#L58 + options: {}, + } + }, callbacks: { async signIn({ user, account, profile, ...rest }: any) { if (account.type !== "oauth") {