diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.errors._index/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.errors._index/route.tsx
index 2459a067902..9b6a54e0e21 100644
--- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.errors._index/route.tsx
+++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.errors._index/route.tsx
@@ -1,8 +1,11 @@
-import { XMarkIcon } from "@heroicons/react/20/solid";
-import { Form, type MetaFunction } from "@remix-run/react";
+import * as Ariakit from "@ariakit/react";
+import { BellAlertIcon, XMarkIcon } from "@heroicons/react/20/solid";
+import { Form, useRevalidator, type MetaFunction } from "@remix-run/react";
import { type LoaderFunctionArgs } from "@remix-run/server-runtime";
+import { IconBugFilled } from "@tabler/icons-react";
import { ErrorId } from "@trigger.dev/core/v3/isomorphic";
-import { Suspense, useMemo } from "react";
+import { type ErrorGroupStatus } from "@trigger.dev/database";
+import { Suspense, useCallback, useMemo, type ReactNode } from "react";
import {
Bar,
BarChart,
@@ -13,30 +16,43 @@ import {
type TooltipProps,
} from "recharts";
import { TypedAwait, typeddefer, useTypedLoaderData } from "remix-typedjson";
+import { ErrorStatusBadge } from "~/components/errors/ErrorStatusBadge";
import { PageBody } from "~/components/layout/AppLayout";
-import { SearchInput } from "~/components/primitives/SearchInput";
+import { ListPagination } from "~/components/ListPagination";
import { LogsTaskFilter } from "~/components/logs/LogsTaskFilter";
-import { Button } from "~/components/primitives/Buttons";
+import { LogsVersionFilter } from "~/components/logs/LogsVersionFilter";
+import { AppliedFilter } from "~/components/primitives/AppliedFilter";
+import { Button, LinkButton } from "~/components/primitives/Buttons";
import { Callout } from "~/components/primitives/Callout";
import { formatDateTime, RelativeDateTime } from "~/components/primitives/DateTime";
import { Header3 } from "~/components/primitives/Headers";
import { NavBar, PageTitle } from "~/components/primitives/PageHeader";
import { Paragraph } from "~/components/primitives/Paragraph";
+import { SearchInput } from "~/components/primitives/SearchInput";
+import {
+ ComboBox,
+ SelectItem,
+ SelectList,
+ SelectPopover,
+ SelectProvider,
+ SelectTrigger,
+} from "~/components/primitives/Select";
import { Spinner } from "~/components/primitives/Spinner";
import {
CopyableTableCell,
Table,
TableBody,
TableCell,
- TableCellChevron,
TableHeader,
TableHeaderCell,
TableRow,
} from "~/components/primitives/Table";
import TooltipPortal from "~/components/primitives/TooltipPortal";
-import { TimeFilter } from "~/components/runs/v3/SharedFilters";
+import { appliedSummary, FilterMenuProvider, TimeFilter } from "~/components/runs/v3/SharedFilters";
import { $replica } from "~/db.server";
+import { useInterval } from "~/hooks/useInterval";
import { useOptimisticLocation } from "~/hooks/useOptimisticLocation";
+import { useSearchParams } from "~/hooks/useSearchParam";
import { findProjectBySlug } from "~/models/project.server";
import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server";
import {
@@ -49,7 +65,6 @@ import {
import { logsClickhouseClient } from "~/services/clickhouseInstance.server";
import { getCurrentPlan } from "~/services/platform.v3.server";
import { requireUser } from "~/services/session.server";
-import { ListPagination } from "~/components/ListPagination";
import { formatNumberCompact } from "~/utils/numberFormatter";
import { EnvironmentParamSchema, v3ErrorPath } from "~/utils/pathBuilder";
import { ServiceValidationError } from "~/v3/services/baseService.server";
@@ -80,6 +95,12 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => {
const url = new URL(request.url);
const tasks = url.searchParams.getAll("tasks").filter((t) => t.length > 0);
+ const versions = url.searchParams.getAll("versions").filter((v) => v.length > 0);
+ const statuses = url.searchParams
+ .getAll("status")
+ .filter(
+ (s): s is ErrorGroupStatus => s === "UNRESOLVED" || s === "RESOLVED" || s === "IGNORED"
+ );
const search = url.searchParams.get("search") ?? undefined;
const period = url.searchParams.get("period") ?? undefined;
const fromStr = url.searchParams.get("from");
@@ -101,6 +122,8 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => {
userId,
projectId: project.id,
tasks: tasks.length > 0 ? tasks : undefined,
+ versions: versions.length > 0 ? versions : undefined,
+ statuses: statuses.length > 0 ? statuses : undefined,
search,
period,
from,
@@ -153,6 +176,24 @@ export default function Page() {
envParam,
} = useTypedLoaderData
();
+ const revalidator = useRevalidator();
+ useInterval({
+ interval: 60_000,
+ onLoad: false,
+ callback: useCallback(() => {
+ if (revalidator.state === "idle") {
+ revalidator.revalidate();
+ }
+ }, [revalidator]),
+ });
+
+ const location = useOptimisticLocation();
+ const alertsHref = useMemo(() => {
+ const params = new URLSearchParams(location.search);
+ params.set("alerts", "true");
+ return `?${params.toString()}`;
+ }, [location.search]);
+
return (
<>
@@ -177,7 +218,11 @@ export default function Page() {
resolve={data}
errorElement={
-
+
Unable to load errors. Please refresh the page or try again in a moment.
@@ -193,6 +238,7 @@ export default function Page() {
@@ -208,6 +254,7 @@ export default function Page() {
list={result}
defaultPeriod={defaultPeriod}
retentionLimitDays={retentionLimitDays}
+ alertsHref={alertsHref}
/>
;
+const statusShortcut = { key: "s" };
+
+function StatusFilter() {
+ const { values, del } = useSearchParams();
+ const selectedStatuses = values("status");
+
+ if (selectedStatuses.length === 0 || selectedStatuses.every((v) => v === "")) {
+ return (
+
+ {(search, setSearch) => (
+
+ Status
+
+ }
+ searchValue={search}
+ clearSearchValue={() => setSearch("")}
+ />
+ )}
+
+ );
+ }
+
+ return (
+
+ {(search, setSearch) => (
+ }>
+ {
+ const opt = errorStatusOptions.find((o) => o.value === s);
+ return opt ? opt.label : s;
+ })
+ )}
+ onRemove={() => del(["status", "cursor", "direction"])}
+ variant="secondary/small"
+ />
+
+ }
+ searchValue={search}
+ clearSearchValue={() => setSearch("")}
+ />
+ )}
+
+ );
+}
+
+function ErrorStatusDropdown({
+ trigger,
+ clearSearchValue,
+ onClose,
+}: {
+ trigger: ReactNode;
+ clearSearchValue: () => void;
+ searchValue: string;
+ onClose?: () => void;
+}) {
+ const { values, replace } = useSearchParams();
+
+ const handleChange = (values: string[]) => {
+ clearSearchValue();
+ replace({
+ status: values.length > 0 ? values : undefined,
+ cursor: undefined,
+ direction: undefined,
+ });
+ };
+
+ return (
+
+ {trigger}
+ {
+ if (onClose) {
+ onClose();
+ return false;
+ }
+ return true;
+ }}
+ >
+
+ {errorStatusOptions.map((item) => (
+
+
+
+ ))}
+
+
+
+ );
+}
+
function FiltersBar({
list,
defaultPeriod,
retentionLimitDays,
+ alertsHref,
}: {
list?: ErrorsListData;
defaultPeriod?: string;
retentionLimitDays: number;
+ alertsHref: string;
}) {
const location = useOptimisticLocation();
const searchParams = new URLSearchParams(location.search);
const hasFilters =
+ searchParams.has("status") ||
searchParams.has("tasks") ||
- searchParams.has("search") ||
- searchParams.has("period") ||
- searchParams.has("from") ||
- searchParams.has("to");
+ searchParams.has("versions") ||
+ searchParams.has("search");
return (
{list ? (
<>
+
+
) : (
<>
+
+
{hasFilters && (
@@ -283,7 +445,17 @@ function FiltersBar({
>
)}
- {list &&
}
+
+
+ Configure alerts
+
+ {list && }
+
);
}
@@ -303,22 +475,21 @@ function ErrorsList({
}) {
if (errorGroups.length === 0) {
return (
-
-
-
No errors found
-
- No errors have been recorded in the selected time period.
-
-
+
+
+
+ No errors found for this time period.
+
);
}
return (
-
+
ID
+ Status
Task
Error
Occurrences
@@ -373,6 +544,9 @@ function ErrorGroupRow({
if (period) carry.set("period", period);
if (from) carry.set("from", from);
if (to) carry.set("to", to);
+ for (const v of searchParams.getAll("versions")) {
+ if (v) carry.append("versions", v);
+ }
const qs = carry.toString();
return qs ? `${base}?${qs}` : base;
}, [organizationSlug, projectParam, envParam, errorGroup.fingerprint, searchParams.toString()]);
@@ -384,11 +558,16 @@ function ErrorGroupRow({
{errorGroup.fingerprint.slice(-8)}
+
+
+
{errorGroup.taskIdentifier}
- {errorMessage}
+ {errorMessage.length > 128 ? `${errorMessage.slice(0, 128)}…` : errorMessage}
- {errorGroup.count.toLocaleString()}
+
+ {errorGroup.count.toLocaleString()}
+
}>
}>
@@ -403,10 +582,10 @@ function ErrorGroupRow({
-
+
-
+
@@ -418,12 +597,12 @@ function ErrorActivityGraph({ activity }: { activity: ErrorOccurrenceActivity })
return (
-
+
}
allowEscapeViewBox={{ x: true, y: true }}
wrapperStyle={{ zIndex: 1000 }}
diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.errors.connect-to-slack.ts b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.errors.connect-to-slack.ts
new file mode 100644
index 00000000000..cb9ba373bc5
--- /dev/null
+++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.errors.connect-to-slack.ts
@@ -0,0 +1,47 @@
+import { type LoaderFunctionArgs } from "@remix-run/server-runtime";
+import { prisma } from "~/db.server";
+import { redirectWithSuccessMessage } from "~/models/message.server";
+import { OrgIntegrationRepository } from "~/models/orgIntegration.server";
+import { findProjectBySlug } from "~/models/project.server";
+import { requireUserId } from "~/services/session.server";
+import {
+ EnvironmentParamSchema,
+ v3ErrorsPath,
+ v3ErrorsConnectToSlackPath,
+} from "~/utils/pathBuilder";
+
+export async function loader({ request, params }: LoaderFunctionArgs) {
+ const userId = await requireUserId(request);
+ const { organizationSlug, projectParam, envParam } = EnvironmentParamSchema.parse(params);
+
+ const url = new URL(request.url);
+ const shouldReinstall = url.searchParams.get("reinstall") === "true";
+
+ const project = await findProjectBySlug(organizationSlug, projectParam, userId);
+
+ if (!project) {
+ throw new Response("Project not found", { status: 404 });
+ }
+
+ const integration = await prisma.organizationIntegration.findFirst({
+ where: {
+ service: "SLACK",
+ organizationId: project.organizationId,
+ },
+ });
+
+ if (integration && !shouldReinstall) {
+ return redirectWithSuccessMessage(
+ `${v3ErrorsPath({ slug: organizationSlug }, project, { slug: envParam })}?alerts`,
+ request,
+ "Successfully connected your Slack workspace"
+ );
+ }
+
+ return await OrgIntegrationRepository.redirectToAuthService(
+ "SLACK",
+ project.organizationId,
+ request,
+ v3ErrorsConnectToSlackPath({ slug: organizationSlug }, project, { slug: envParam })
+ );
+}
diff --git a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.errors/route.tsx b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.errors/route.tsx
index f6723ddebaa..07105f3f963 100644
--- a/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.errors/route.tsx
+++ b/apps/webapp/app/routes/_app.orgs.$organizationSlug.projects.$projectParam.env.$envParam.errors/route.tsx
@@ -1,10 +1,202 @@
-import { Outlet } from "@remix-run/react";
+import { parse } from "@conform-to/zod";
+import { Outlet, useNavigate } from "@remix-run/react";
+import { type ActionFunctionArgs, type LoaderFunctionArgs, json } from "@remix-run/server-runtime";
+import { useCallback } from "react";
+import { typedjson, useTypedLoaderData } from "remix-typedjson";
import { PageContainer } from "~/components/layout/AppLayout";
+import {
+ ConfigureErrorAlerts,
+ ErrorAlertsFormSchema,
+} from "~/components/errors/ConfigureErrorAlerts";
+import { Sheet, SheetContent } from "~/components/primitives/SheetV3";
+import { prisma } from "~/db.server";
+import { ErrorAlertChannelPresenter } from "~/presenters/v3/ErrorAlertChannelPresenter.server";
+import { findProjectBySlug } from "~/models/project.server";
+import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server";
+import { requireUserId } from "~/services/session.server";
+import { env } from "~/env.server";
+import {
+ EnvironmentParamSchema,
+ v3ErrorsConnectToSlackPath,
+ v3ErrorsPath,
+} from "~/utils/pathBuilder";
+import {
+ type CreateAlertChannelOptions,
+ CreateAlertChannelService,
+} from "~/v3/services/alerts/createAlertChannel.server";
+import { useOptimisticLocation } from "~/hooks/useOptimisticLocation";
+import { useSearchParams } from "~/hooks/useSearchParam";
+
+export const loader = async ({ request, params }: LoaderFunctionArgs) => {
+ const userId = await requireUserId(request);
+ const { projectParam, organizationSlug, envParam } = EnvironmentParamSchema.parse(params);
+
+ const project = await findProjectBySlug(organizationSlug, projectParam, userId);
+ if (!project) {
+ throw new Response("Project not found", { status: 404 });
+ }
+
+ const environment = await findEnvironmentBySlug(project.id, envParam, userId);
+ if (!environment) {
+ throw new Response("Environment not found", { status: 404 });
+ }
+
+ const presenter = new ErrorAlertChannelPresenter();
+ const alertData = await presenter.call(project.id, environment.type);
+
+ const connectToSlackHref = v3ErrorsConnectToSlackPath({ slug: organizationSlug }, project, {
+ slug: envParam,
+ });
+
+ const errorsPath = v3ErrorsPath({ slug: organizationSlug }, project, { slug: envParam });
+
+ return typedjson({
+ alertData,
+ projectRef: project.externalRef,
+ projectId: project.id,
+ environmentType: environment.type,
+ connectToSlackHref,
+ errorsPath,
+ });
+};
+
+export const action = async ({ request, params }: ActionFunctionArgs) => {
+ const userId = await requireUserId(request);
+ const { projectParam, organizationSlug, envParam } = EnvironmentParamSchema.parse(params);
+
+ if (request.method.toUpperCase() !== "POST") {
+ return json({ status: 405, error: "Method Not Allowed" }, { status: 405 });
+ }
+
+ const project = await findProjectBySlug(organizationSlug, projectParam, userId);
+ if (!project) {
+ return json({ error: "Project not found" }, { status: 404 });
+ }
+
+ const environment = await findEnvironmentBySlug(project.id, envParam, userId);
+ if (!environment) {
+ return json({ error: "Environment not found" }, { status: 404 });
+ }
+
+ const formData = await request.formData();
+ const submission = parse(formData, { schema: ErrorAlertsFormSchema });
+
+ if (!submission.value) {
+ return json(submission);
+ }
+
+ const { emails, webhooks, slackChannel, slackIntegrationId } = submission.value;
+
+ const existingChannels = await prisma.projectAlertChannel.findMany({
+ where: {
+ projectId: project.id,
+ alertTypes: { has: "ERROR_GROUP" },
+ environmentTypes: { has: environment.type },
+ },
+ });
+
+ const service = new CreateAlertChannelService();
+ const environmentTypes = [environment.type];
+ const processedChannelIds = new Set();
+
+ for (const email of emails) {
+ const options: CreateAlertChannelOptions = {
+ name: `Error alert to ${email}`,
+ alertTypes: ["ERROR_GROUP"],
+ environmentTypes,
+ deduplicationKey: `error-email:${email}:${environment.type}`,
+ channel: { type: "EMAIL", email },
+ };
+ const channel = await service.call(project.externalRef, userId, options);
+ processedChannelIds.add(channel.id);
+ }
+
+ if (slackChannel) {
+ const [channelId, channelName] = slackChannel.split("/");
+ if (channelId && channelName) {
+ const options: CreateAlertChannelOptions = {
+ name: `Error alert to #${channelName}`,
+ alertTypes: ["ERROR_GROUP"],
+ environmentTypes,
+ deduplicationKey: `error-slack:${environment.type}`,
+ channel: {
+ type: "SLACK",
+ channelId,
+ channelName,
+ integrationId: slackIntegrationId,
+ },
+ };
+ const channel = await service.call(project.externalRef, userId, options);
+ processedChannelIds.add(channel.id);
+ }
+ }
+
+ for (const url of webhooks) {
+ const options: CreateAlertChannelOptions = {
+ name: `Error alert to ${new URL(url).hostname}`,
+ alertTypes: ["ERROR_GROUP"],
+ environmentTypes,
+ deduplicationKey: `error-webhook:${url}:${environment.type}`,
+ channel: { type: "WEBHOOK", url },
+ };
+ const channel = await service.call(project.externalRef, userId, options);
+ processedChannelIds.add(channel.id);
+ }
+
+ const editableTypes = new Set(["WEBHOOK"]);
+ if (env.ALERT_FROM_EMAIL !== undefined && env.ALERT_RESEND_API_KEY !== undefined) {
+ editableTypes.add("EMAIL");
+ }
+ if (slackIntegrationId) {
+ editableTypes.add("SLACK");
+ }
+
+ const channelsToDelete = existingChannels.filter(
+ (ch) =>
+ !processedChannelIds.has(ch.id) &&
+ editableTypes.has(ch.type) &&
+ ch.alertTypes.length === 1 &&
+ ch.alertTypes[0] === "ERROR_GROUP"
+ );
+
+ for (const ch of channelsToDelete) {
+ await prisma.projectAlertChannel.delete({ where: { id: ch.id } });
+ }
+
+ return json({ ok: true });
+};
export default function Page() {
+ const { alertData, connectToSlackHref, errorsPath } = useTypedLoaderData();
+ const { has } = useSearchParams();
+ const showAlerts = has("alerts") ?? false;
+ const navigate = useNavigate();
+ const location = useOptimisticLocation();
+
+ const closeAlerts = useCallback(() => {
+ const params = new URLSearchParams(location.search);
+ params.delete("alerts");
+ const qs = params.toString();
+ navigate(qs ? `?${qs}` : location.pathname, { replace: true });
+ }, [location.search, location.pathname, navigate]);
+
return (
+
+ !open && closeAlerts()}>
+ e.preventDefault()}
+ >
+
+
+
);
}
diff --git a/apps/webapp/app/routes/storybook.unordered-list/route.tsx b/apps/webapp/app/routes/storybook.unordered-list/route.tsx
new file mode 100644
index 00000000000..b17bb2dda11
--- /dev/null
+++ b/apps/webapp/app/routes/storybook.unordered-list/route.tsx
@@ -0,0 +1,67 @@
+import { Header2 } from "~/components/primitives/Headers";
+import { Paragraph, type ParagraphVariant } from "~/components/primitives/Paragraph";
+import { UnorderedList } from "~/components/primitives/UnorderedList";
+
+const sampleItems = [
+ "A new issue is seen for the first time",
+ "A resolved issue re-occurs",
+ "An ignored issue re-occurs depending on the settings you configured",
+];
+
+const variantGroups: { label: string; variants: ParagraphVariant[] }[] = [
+ {
+ label: "Base",
+ variants: ["base", "base/bright"],
+ },
+ {
+ label: "Small",
+ variants: ["small", "small/bright", "small/dimmed"],
+ },
+ {
+ label: "Extra small",
+ variants: [
+ "extra-small",
+ "extra-small/bright",
+ "extra-small/dimmed",
+ "extra-small/mono",
+ "extra-small/bright/mono",
+ "extra-small/dimmed/mono",
+ "extra-small/caps",
+ "extra-small/bright/caps",
+ ],
+ },
+ {
+ label: "Extra extra small",
+ variants: [
+ "extra-extra-small",
+ "extra-extra-small/bright",
+ "extra-extra-small/caps",
+ "extra-extra-small/bright/caps",
+ "extra-extra-small/dimmed/caps",
+ ],
+ },
+];
+
+export default function Story() {
+ return (
+
+ {variantGroups.map((group) => (
+
+
{group.label}
+ {group.variants.map((variant) => (
+
+
{variant}
+
This is a paragraph before the list.
+
+ {sampleItems.map((item) => (
+ {item}
+ ))}
+
+
This is a paragraph after the list.
+
+ ))}
+
+ ))}
+
+ );
+}
diff --git a/apps/webapp/app/routes/storybook/route.tsx b/apps/webapp/app/routes/storybook/route.tsx
index 83d455c2a55..bcaee62d6b0 100644
--- a/apps/webapp/app/routes/storybook/route.tsx
+++ b/apps/webapp/app/routes/storybook/route.tsx
@@ -136,6 +136,10 @@ const stories: Story[] = [
name: "Typography",
slug: "typography",
},
+ {
+ name: "Unordered list",
+ slug: "unordered-list",
+ },
{
name: "Usage",
slug: "usage",
diff --git a/apps/webapp/app/utils/pathBuilder.ts b/apps/webapp/app/utils/pathBuilder.ts
index f73f4139a01..7a151053f5a 100644
--- a/apps/webapp/app/utils/pathBuilder.ts
+++ b/apps/webapp/app/utils/pathBuilder.ts
@@ -584,6 +584,14 @@ export function v3ErrorsPath(
return `${v3EnvironmentPath(organization, project, environment)}/errors`;
}
+export function v3ErrorsConnectToSlackPath(
+ organization: OrgForPath,
+ project: ProjectForPath,
+ environment: EnvironmentForPath
+) {
+ return `${v3ErrorsPath(organization, project, environment)}/connect-to-slack`;
+}
+
export function v3ErrorPath(
organization: OrgForPath,
project: ProjectForPath,
diff --git a/apps/webapp/app/v3/alertsWorker.server.ts b/apps/webapp/app/v3/alertsWorker.server.ts
index 46670887a75..693b16b738a 100644
--- a/apps/webapp/app/v3/alertsWorker.server.ts
+++ b/apps/webapp/app/v3/alertsWorker.server.ts
@@ -1,10 +1,12 @@
import { Logger } from "@trigger.dev/core/logger";
-import { Worker as RedisWorker } from "@trigger.dev/redis-worker";
+import { CronSchema, Worker as RedisWorker } from "@trigger.dev/redis-worker";
import { z } from "zod";
import { env } from "~/env.server";
import { logger } from "~/services/logger.server";
import { singleton } from "~/utils/singleton";
import { DeliverAlertService } from "./services/alerts/deliverAlert.server";
+import { DeliverErrorGroupAlertService } from "./services/alerts/deliverErrorGroupAlert.server";
+import { ErrorAlertEvaluator } from "./services/alerts/errorAlertEvaluator.server";
import { PerformDeploymentAlertsService } from "./services/alerts/performDeploymentAlerts.server";
import { PerformTaskRunAlertsService } from "./services/alerts/performTaskRunAlerts.server";
@@ -55,6 +57,42 @@ function initializeWorker() {
},
logErrors: false,
},
+ "v3.evaluateErrorAlerts": {
+ schema: z.object({
+ projectId: z.string(),
+ scheduledAt: z.number(),
+ }),
+ visibilityTimeoutMs: 60_000 * 5,
+ retry: {
+ maxAttempts: 3,
+ },
+ logErrors: true,
+ },
+ "v3.deliverErrorGroupAlert": {
+ schema: z.object({
+ channelId: z.string(),
+ projectId: z.string(),
+ classification: z.enum(["new_issue", "regression", "unignored"]),
+ error: z.object({
+ fingerprint: z.string(),
+ environmentId: z.string(),
+ environmentSlug: z.string(),
+ environmentName: z.string(),
+ taskIdentifier: z.string(),
+ errorType: z.string(),
+ errorMessage: z.string(),
+ sampleStackTrace: z.string(),
+ firstSeen: z.string(),
+ lastSeen: z.string(),
+ occurrenceCount: z.number(),
+ }),
+ }),
+ visibilityTimeoutMs: 60_000,
+ retry: {
+ maxAttempts: 3,
+ },
+ logErrors: true,
+ },
},
concurrency: {
workers: env.ALERTS_WORKER_CONCURRENCY_WORKERS,
@@ -80,6 +118,14 @@ function initializeWorker() {
const service = new PerformTaskRunAlertsService();
await service.call(payload.runId);
},
+ "v3.evaluateErrorAlerts": async ({ payload }) => {
+ const evaluator = new ErrorAlertEvaluator();
+ await evaluator.evaluate(payload.projectId, payload.scheduledAt);
+ },
+ "v3.deliverErrorGroupAlert": async ({ payload }) => {
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+ },
},
});
diff --git a/apps/webapp/app/v3/otlpExporter.server.ts b/apps/webapp/app/v3/otlpExporter.server.ts
index 5fe2624557d..7505693e3ab 100644
--- a/apps/webapp/app/v3/otlpExporter.server.ts
+++ b/apps/webapp/app/v3/otlpExporter.server.ts
@@ -1194,4 +1194,4 @@ function initializeOTLPExporter() {
? parseInt(process.env.SERVER_OTEL_SPAN_ATTRIBUTE_VALUE_LENGTH_LIMIT, 10)
: 8192
);
-}
+}
\ No newline at end of file
diff --git a/apps/webapp/app/v3/services/alerts/createAlertChannel.server.ts b/apps/webapp/app/v3/services/alerts/createAlertChannel.server.ts
index b2bbb423983..c87218f2bfc 100644
--- a/apps/webapp/app/v3/services/alerts/createAlertChannel.server.ts
+++ b/apps/webapp/app/v3/services/alerts/createAlertChannel.server.ts
@@ -7,6 +7,7 @@ import { nanoid } from "nanoid";
import { env } from "~/env.server";
import { findProjectByRef } from "~/models/project.server";
import { encryptSecret } from "~/services/secrets/secretStore.server";
+import { alertsWorker } from "~/v3/alertsWorker.server";
import { generateFriendlyId } from "~/v3/friendlyIdentifiers";
import { BaseService, ServiceValidationError } from "../baseService.server";
@@ -60,7 +61,7 @@ export class CreateAlertChannelService extends BaseService {
: undefined;
if (existingAlertChannel) {
- return await this._prisma.projectAlertChannel.update({
+ const updated = await this._prisma.projectAlertChannel.update({
where: { id: existingAlertChannel.id },
data: {
name: options.name,
@@ -70,6 +71,12 @@ export class CreateAlertChannelService extends BaseService {
environmentTypes,
},
});
+
+ if (options.alertTypes.includes("ERROR_GROUP")) {
+ await this.#scheduleErrorAlertEvaluation(project.id);
+ }
+
+ return updated;
}
const alertChannel = await this._prisma.projectAlertChannel.create({
@@ -87,9 +94,24 @@ export class CreateAlertChannelService extends BaseService {
},
});
+ if (options.alertTypes.includes("ERROR_GROUP")) {
+ await this.#scheduleErrorAlertEvaluation(project.id);
+ }
+
return alertChannel;
}
+ async #scheduleErrorAlertEvaluation(projectId: string): Promise {
+ await alertsWorker.enqueue({
+ id: `evaluateErrorAlerts:${projectId}`,
+ job: "v3.evaluateErrorAlerts",
+ payload: {
+ projectId,
+ scheduledAt: Date.now(),
+ },
+ });
+ }
+
async #createProperties(channel: CreateAlertChannelOptions["channel"]) {
switch (channel.type) {
case "EMAIL":
diff --git a/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts b/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts
index 8b922f91e9f..5ab99bf8046 100644
--- a/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts
+++ b/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts
@@ -319,6 +319,9 @@ export class DeliverAlertService extends BaseService {
break;
}
+ case "ERROR_GROUP": {
+ break;
+ }
default: {
assertNever(alert.type);
}
@@ -657,6 +660,9 @@ export class DeliverAlertService extends BaseService {
break;
}
+ case "ERROR_GROUP": {
+ break;
+ }
default: {
assertNever(alert.type);
}
@@ -913,6 +919,9 @@ export class DeliverAlertService extends BaseService {
return;
}
}
+ case "ERROR_GROUP": {
+ break;
+ }
default: {
assertNever(alert.type);
}
diff --git a/apps/webapp/app/v3/services/alerts/deliverErrorGroupAlert.server.ts b/apps/webapp/app/v3/services/alerts/deliverErrorGroupAlert.server.ts
new file mode 100644
index 00000000000..9b59d8fab3c
--- /dev/null
+++ b/apps/webapp/app/v3/services/alerts/deliverErrorGroupAlert.server.ts
@@ -0,0 +1,406 @@
+import {
+ type ChatPostMessageArguments,
+ ErrorCode,
+ type WebAPIHTTPError,
+ type WebAPIPlatformError,
+ type WebAPIRateLimitedError,
+ type WebAPIRequestError,
+} from "@slack/web-api";
+import { type ProjectAlertChannelType } from "@trigger.dev/database";
+import assertNever from "assert-never";
+import { prisma } from "~/db.server";
+import { env } from "~/env.server";
+import { v3ErrorPath } from "~/utils/pathBuilder";
+import {
+ isIntegrationForService,
+ type OrganizationIntegrationForService,
+ OrgIntegrationRepository,
+} from "~/models/orgIntegration.server";
+import {
+ ProjectAlertEmailProperties,
+ ProjectAlertSlackProperties,
+ ProjectAlertWebhookProperties,
+} from "~/models/projectAlert.server";
+import { sendAlertEmail } from "~/services/email.server";
+import { logger } from "~/services/logger.server";
+import { decryptSecret } from "~/services/secrets/secretStore.server";
+import { subtle } from "crypto";
+import { generateErrorGroupWebhookPayload } from "./errorGroupWebhook.server";
+
+type ErrorAlertClassification = "new_issue" | "regression" | "unignored";
+
+interface ErrorAlertPayload {
+ channelId: string;
+ projectId: string;
+ classification: ErrorAlertClassification;
+ error: {
+ fingerprint: string;
+ environmentId: string;
+ environmentSlug: string;
+ environmentName: string;
+ taskIdentifier: string;
+ errorType: string;
+ errorMessage: string;
+ sampleStackTrace: string;
+ firstSeen: string;
+ lastSeen: string;
+ occurrenceCount: number;
+ };
+}
+
+class SkipRetryError extends Error {}
+
+export class DeliverErrorGroupAlertService {
+ async call(payload: ErrorAlertPayload): Promise {
+ const channel = await prisma.projectAlertChannel.findFirst({
+ where: { id: payload.channelId, enabled: true },
+ include: {
+ project: {
+ include: {
+ organization: true,
+ },
+ },
+ },
+ });
+
+ if (!channel) {
+ logger.warn("[DeliverErrorGroupAlert] Channel not found or disabled", {
+ channelId: payload.channelId,
+ });
+ return;
+ }
+
+ const errorLink = this.#buildErrorLink(channel.project.organization, channel.project, payload.error);
+
+ try {
+ switch (channel.type) {
+ case "EMAIL":
+ await this.#sendEmail(channel, payload, errorLink);
+ break;
+ case "SLACK":
+ await this.#sendSlack(channel, payload, errorLink);
+ break;
+ case "WEBHOOK":
+ await this.#sendWebhook(channel, payload, errorLink);
+ break;
+ default:
+ assertNever(channel.type);
+ }
+ } catch (error) {
+ if (error instanceof SkipRetryError) {
+ logger.warn("[DeliverErrorGroupAlert] Skipping retry", { reason: (error as Error).message });
+ return;
+ }
+ throw error;
+ }
+ }
+
+ #buildErrorLink(
+ organization: { slug: string },
+ project: { slug: string },
+ error: ErrorAlertPayload["error"]
+ ): string {
+ return `${env.APP_ORIGIN}${v3ErrorPath(organization, project, { slug: error.environmentSlug }, { fingerprint: error.fingerprint })}`;
+ }
+
+ #classificationLabel(classification: ErrorAlertClassification): string {
+ switch (classification) {
+ case "new_issue":
+ return "New error";
+ case "regression":
+ return "Regression";
+ case "unignored":
+ return "Error resurfaced";
+ }
+ }
+
+ async #sendEmail(
+ channel: { type: ProjectAlertChannelType; properties: unknown; project: { name: string; organization: { title: string } } },
+ payload: ErrorAlertPayload,
+ errorLink: string
+ ): Promise {
+ const emailProperties = ProjectAlertEmailProperties.safeParse(channel.properties);
+ if (!emailProperties.success) {
+ logger.error("[DeliverErrorGroupAlert] Failed to parse email properties", {
+ issues: emailProperties.error.issues,
+ });
+ return;
+ }
+
+ await sendAlertEmail({
+ email: "alert-error-group",
+ to: emailProperties.data.email,
+ classification: payload.classification,
+ taskIdentifier: payload.error.taskIdentifier,
+ environment: payload.error.environmentName,
+ error: {
+ message: payload.error.errorMessage,
+ type: payload.error.errorType,
+ stackTrace: payload.error.sampleStackTrace || undefined,
+ },
+ occurrenceCount: payload.error.occurrenceCount,
+ errorLink,
+ organization: channel.project.organization.title,
+ project: channel.project.name,
+ });
+ }
+
+ async #sendSlack(
+ channel: {
+ type: ProjectAlertChannelType;
+ properties: unknown;
+ project: { organizationId: string; name: string; organization: { title: string } };
+ },
+ payload: ErrorAlertPayload,
+ errorLink: string
+ ): Promise {
+ const slackProperties = ProjectAlertSlackProperties.safeParse(channel.properties);
+ if (!slackProperties.success) {
+ logger.error("[DeliverErrorGroupAlert] Failed to parse slack properties", {
+ issues: slackProperties.error.issues,
+ });
+ return;
+ }
+
+ const integration = slackProperties.data.integrationId
+ ? await prisma.organizationIntegration.findFirst({
+ where: {
+ id: slackProperties.data.integrationId,
+ organizationId: channel.project.organizationId,
+ },
+ include: { tokenReference: true },
+ })
+ : await prisma.organizationIntegration.findFirst({
+ where: {
+ service: "SLACK",
+ organizationId: channel.project.organizationId,
+ },
+ orderBy: { createdAt: "desc" },
+ include: { tokenReference: true },
+ });
+
+ if (!integration || !isIntegrationForService(integration, "SLACK")) {
+ logger.error("[DeliverErrorGroupAlert] Slack integration not found");
+ return;
+ }
+
+ const message = this.#buildErrorGroupSlackMessage(
+ payload,
+ errorLink,
+ channel.project.name
+ );
+
+ await this.#postSlackMessage(integration, {
+ channel: slackProperties.data.channelId,
+ ...message,
+ } as ChatPostMessageArguments);
+ }
+
+ async #sendWebhook(
+ channel: {
+ type: ProjectAlertChannelType;
+ properties: unknown;
+ project: { id: string; externalRef: string; slug: string; name: string; organizationId: string; organization: { slug: string; title: string } };
+ },
+ payload: ErrorAlertPayload,
+ errorLink: string
+ ): Promise {
+ const webhookProperties = ProjectAlertWebhookProperties.safeParse(channel.properties);
+ if (!webhookProperties.success) {
+ logger.error("[DeliverErrorGroupAlert] Failed to parse webhook properties", {
+ issues: webhookProperties.error.issues,
+ });
+ return;
+ }
+
+ const webhookPayload = generateErrorGroupWebhookPayload({
+ classification: payload.classification,
+ error: payload.error,
+ organization: {
+ id: channel.project.organizationId,
+ slug: channel.project.organization.slug,
+ name: channel.project.organization.title,
+ },
+ project: {
+ id: channel.project.id,
+ externalRef: channel.project.externalRef,
+ slug: channel.project.slug,
+ name: channel.project.name,
+ },
+ dashboardUrl: errorLink,
+ });
+
+ const rawPayload = JSON.stringify(webhookPayload);
+ const hashPayload = Buffer.from(rawPayload, "utf-8");
+ const secret = await decryptSecret(env.ENCRYPTION_KEY, webhookProperties.data.secret);
+ const hmacSecret = Buffer.from(secret, "utf-8");
+ const key = await subtle.importKey(
+ "raw",
+ hmacSecret,
+ { name: "HMAC", hash: "SHA-256" },
+ false,
+ ["sign"]
+ );
+ const signature = await subtle.sign("HMAC", key, hashPayload);
+ const signatureHex = Buffer.from(signature).toString("hex");
+
+ const response = await fetch(webhookProperties.data.url, {
+ method: "POST",
+ headers: {
+ "content-type": "application/json",
+ "x-trigger-signature-hmacsha256": signatureHex,
+ },
+ body: rawPayload,
+ signal: AbortSignal.timeout(5000),
+ });
+
+ if (!response.ok) {
+ logger.info("[DeliverErrorGroupAlert] Failed to send webhook", {
+ status: response.status,
+ statusText: response.statusText,
+ url: webhookProperties.data.url,
+ });
+ throw new Error(`Failed to send error group alert webhook to ${webhookProperties.data.url}`);
+ }
+ }
+
+ async #postSlackMessage(
+ integration: OrganizationIntegrationForService<"SLACK">,
+ message: ChatPostMessageArguments
+ ) {
+ const client = await OrgIntegrationRepository.getAuthenticatedClientForIntegration(
+ integration,
+ { forceBotToken: true }
+ );
+
+ try {
+ return await client.chat.postMessage({
+ ...message,
+ unfurl_links: false,
+ unfurl_media: false,
+ });
+ } catch (error) {
+ if (isWebAPIRateLimitedError(error)) {
+ throw new Error("Slack rate limited");
+ }
+ if (isWebAPIPlatformError(error)) {
+ if (
+ (error as WebAPIPlatformError).data.error === "invalid_blocks" ||
+ (error as WebAPIPlatformError).data.error === "account_inactive"
+ ) {
+ throw new SkipRetryError(`Slack: ${(error as WebAPIPlatformError).data.error}`);
+ }
+ throw new Error("Slack platform error");
+ }
+ throw error;
+ }
+ }
+
+ #buildErrorGroupSlackMessage(
+ payload: ErrorAlertPayload,
+ errorLink: string,
+ projectName: string
+ ): { text: string; blocks: object[]; attachments: object[] } {
+ const label = this.#classificationLabel(payload.classification);
+ const errorType = payload.error.errorType || "Error";
+ const task = payload.error.taskIdentifier;
+ const envName = payload.error.environmentName;
+
+ return {
+ text: `${label}: ${errorType} in ${task} [${envName}]`,
+ blocks: [
+ {
+ type: "section",
+ text: {
+ type: "mrkdwn",
+ text: `*${label} in ${task} [${envName}]*`,
+ },
+ },
+ ],
+ attachments: [
+ {
+ color: "danger",
+ blocks: [
+ {
+ type: "section",
+ text: {
+ type: "mrkdwn",
+ text: this.#wrapInCodeBlock(
+ payload.error.sampleStackTrace || payload.error.errorMessage
+ ),
+ },
+ },
+ {
+ type: "section",
+ fields: [
+ {
+ type: "mrkdwn",
+ text: `*Task:*\n${task}`,
+ },
+ {
+ type: "mrkdwn",
+ text: `*Environment:*\n${envName}`,
+ },
+ {
+ type: "mrkdwn",
+ text: `*Project:*\n${projectName}`,
+ },
+ {
+ type: "mrkdwn",
+ text: `*Occurrences:*\n${payload.error.occurrenceCount}`,
+ },
+ {
+ type: "mrkdwn",
+ text: `*Last seen:*\n${this.#formatTimestamp(new Date(Number(payload.error.lastSeen)))}`,
+ },
+ ],
+ },
+ {
+ type: "actions",
+ elements: [
+ {
+ type: "button",
+ text: { type: "plain_text", text: "Investigate" },
+ url: errorLink,
+ style: "primary",
+ },
+ ],
+ },
+ ],
+ },
+ ],
+ };
+ }
+
+ #wrapInCodeBlock(text: string, maxLength = 3000) {
+ const wrapperLength = 6; // ``` prefix + ``` suffix
+ const truncationSuffix = "\n\n...truncated — check dashboard for full error";
+ const innerMax = maxLength - wrapperLength;
+
+ const truncated =
+ text.length > innerMax
+ ? text.slice(0, innerMax - truncationSuffix.length) + truncationSuffix
+ : text;
+ return `\`\`\`${truncated}\`\`\``;
+ }
+
+ #formatTimestamp(date: Date): string {
+ return new Intl.DateTimeFormat("en-US", {
+ month: "short",
+ day: "numeric",
+ year: "numeric",
+ hour: "numeric",
+ minute: "2-digit",
+ second: "2-digit",
+ hour12: true,
+ }).format(date);
+ }
+}
+
+function isWebAPIPlatformError(error: unknown): error is WebAPIPlatformError {
+ return (error as WebAPIPlatformError).code === ErrorCode.PlatformError;
+}
+
+function isWebAPIRateLimitedError(error: unknown): error is WebAPIRateLimitedError {
+ return (error as WebAPIRateLimitedError).code === ErrorCode.RateLimitedError;
+}
diff --git a/apps/webapp/app/v3/services/alerts/errorAlertEvaluator.server.ts b/apps/webapp/app/v3/services/alerts/errorAlertEvaluator.server.ts
new file mode 100644
index 00000000000..78e51b3d847
--- /dev/null
+++ b/apps/webapp/app/v3/services/alerts/errorAlertEvaluator.server.ts
@@ -0,0 +1,437 @@
+import { type ActiveErrorsSinceQueryResult, type ClickHouse } from "@internal/clickhouse";
+import {
+ type ErrorGroupState,
+ type PrismaClientOrTransaction,
+ type ProjectAlertChannel,
+ type RuntimeEnvironmentType,
+} from "@trigger.dev/database";
+import { $replica, prisma } from "~/db.server";
+import { ErrorAlertConfig } from "~/models/projectAlert.server";
+import { clickhouseClient } from "~/services/clickhouseInstance.server";
+import { logger } from "~/services/logger.server";
+import { alertsWorker } from "~/v3/alertsWorker.server";
+
+type ErrorClassification = "new_issue" | "regression" | "unignored";
+
+interface AlertableError {
+ classification: ErrorClassification;
+ error: ActiveErrorsSinceQueryResult;
+ environmentSlug: string;
+ environmentName: string;
+}
+
+interface ResolvedEnvironment {
+ id: string;
+ slug: string;
+ type: RuntimeEnvironmentType;
+ displayName: string;
+}
+
+const DEFAULT_INTERVAL_MS = 300_000;
+
+/**
+ * For a project evalutes whether to send error alerts
+ *
+ * Alerts are sent if an error is
+ * 1. A new issue
+ * 2. A regression (was resolved and now back)
+ * 3. Unignored (was ignored and is no longer)
+ *
+ * Unignored happens in 3 situations
+ * 1. It was ignored with a future date, and that's now in the past
+ * 2. It was ignored until reaching an error rate (e.g. 10/minute) and that has been exceeded
+ * 3. It was ignored until reaching a total occurrence count (e.g. 1,000) and that has been exceeded
+ */
+export class ErrorAlertEvaluator {
+ constructor(
+ protected readonly _prisma: PrismaClientOrTransaction = prisma,
+ protected readonly _replica: PrismaClientOrTransaction = $replica,
+ protected readonly _clickhouse: ClickHouse = clickhouseClient
+ ) {}
+
+ async evaluate(projectId: string, scheduledAt: number): Promise {
+ const nextScheduledAt = Date.now();
+
+ const channels = await this.resolveChannels(projectId);
+ if (channels.length === 0) {
+ logger.info("[ErrorAlertEvaluator] No active ERROR_GROUP channels, self-terminating", {
+ projectId,
+ });
+ return;
+ }
+
+ const minIntervalMs = this.computeMinInterval(channels);
+ const windowMs = nextScheduledAt - scheduledAt;
+
+ if (windowMs > minIntervalMs * 2) {
+ logger.info("[ErrorAlertEvaluator] Large evaluation window (gap detected)", {
+ projectId,
+ scheduledAt,
+ nextScheduledAt,
+ windowMs,
+ minIntervalMs,
+ });
+ }
+
+ const allEnvTypes = this.collectEnvironmentTypes(channels);
+ const environments = await this.resolveEnvironments(projectId, allEnvTypes);
+
+ if (environments.length === 0) {
+ logger.info("[ErrorAlertEvaluator] No matching environments found", { projectId });
+ await this.selfChain(projectId, nextScheduledAt, minIntervalMs);
+ return;
+ }
+
+ const envIds = environments.map((e) => e.id);
+ const envMap = new Map(environments.map((e) => [e.id, e]));
+ const channelsByEnvId = this.buildChannelsByEnvId(channels, environments);
+
+ const activeErrors = await this.getActiveErrors(projectId, envIds, scheduledAt);
+
+ if (activeErrors.length === 0) {
+ await this.selfChain(projectId, nextScheduledAt, minIntervalMs);
+ return;
+ }
+
+ const states = await this.getErrorGroupStates(activeErrors);
+ const stateMap = this.buildStateMap(states);
+
+ const occurrenceCounts = await this.getOccurrenceCountsSince(projectId, envIds, scheduledAt);
+ const occurrenceMap = this.buildOccurrenceMap(occurrenceCounts);
+
+ const alertableErrors: AlertableError[] = [];
+
+ for (const error of activeErrors) {
+ const key = `${error.environment_id}:${error.task_identifier}:${error.error_fingerprint}`;
+ const state = stateMap.get(key);
+ const env = envMap.get(error.environment_id);
+ const firstSeenMs = Number(error.first_seen);
+
+ const classification = this.classifyError(error, state, firstSeenMs, scheduledAt, {
+ occurrencesSince: occurrenceMap.get(key) ?? 0,
+ windowMs,
+ totalOccurrenceCount: error.occurrence_count,
+ });
+
+ if (classification) {
+ alertableErrors.push({
+ classification,
+ error,
+ environmentSlug: env?.slug ?? "",
+ environmentName: env?.displayName ?? error.environment_id,
+ });
+ }
+ }
+
+ const stateUpdates = alertableErrors.filter(
+ (a) => a.classification === "regression" || a.classification === "unignored"
+ );
+ await this.updateErrorGroupStates(stateUpdates, stateMap);
+
+ for (const alertable of alertableErrors) {
+ const envChannels = channelsByEnvId.get(alertable.error.environment_id) ?? [];
+ for (const channel of envChannels) {
+ await alertsWorker.enqueue({
+ job: "v3.deliverErrorGroupAlert",
+ payload: {
+ channelId: channel.id,
+ projectId,
+ classification: alertable.classification,
+ error: {
+ fingerprint: alertable.error.error_fingerprint,
+ environmentId: alertable.error.environment_id,
+ environmentSlug: alertable.environmentSlug,
+ environmentName: alertable.environmentName,
+ taskIdentifier: alertable.error.task_identifier,
+ errorType: alertable.error.error_type,
+ errorMessage: alertable.error.error_message,
+ sampleStackTrace: alertable.error.sample_stack_trace,
+ firstSeen: alertable.error.first_seen,
+ lastSeen: alertable.error.last_seen,
+ occurrenceCount: alertable.error.occurrence_count,
+ },
+ },
+ });
+ }
+ }
+
+ logger.info("[ErrorAlertEvaluator] Evaluation complete", {
+ projectId,
+ activeErrors: activeErrors.length,
+ alertableErrors: alertableErrors.length,
+ deliveryJobsEnqueued: alertableErrors.reduce(
+ (sum, a) => sum + (channelsByEnvId.get(a.error.environment_id)?.length ?? 0),
+ 0
+ ),
+ });
+
+ await this.selfChain(projectId, nextScheduledAt, minIntervalMs);
+ }
+
+ private classifyError(
+ error: ActiveErrorsSinceQueryResult,
+ state: ErrorGroupState | undefined,
+ firstSeenMs: number,
+ scheduledAt: number,
+ thresholdContext: { occurrencesSince: number; windowMs: number; totalOccurrenceCount: number }
+ ): ErrorClassification | null {
+ if (!state) {
+ return firstSeenMs > scheduledAt ? "new_issue" : null;
+ }
+
+ switch (state.status) {
+ case "UNRESOLVED":
+ return null;
+
+ case "RESOLVED": {
+ if (!state.resolvedAt) return null;
+ const lastSeenMs = Number(error.last_seen);
+ return lastSeenMs > state.resolvedAt.getTime() ? "regression" : null;
+ }
+
+ case "IGNORED":
+ return this.isIgnoreBreached(state, thresholdContext) ? "unignored" : null;
+
+ default:
+ return null;
+ }
+ }
+
+ private isIgnoreBreached(
+ state: ErrorGroupState,
+ context: { occurrencesSince: number; windowMs: number; totalOccurrenceCount: number }
+ ): boolean {
+ if (state.ignoredUntil && state.ignoredUntil.getTime() <= Date.now()) {
+ return true;
+ }
+
+ if (
+ state.ignoredUntilOccurrenceRate !== null &&
+ state.ignoredUntilOccurrenceRate !== undefined
+ ) {
+ const windowMinutes = Math.max(context.windowMs / 60_000, 1);
+ const rate = context.occurrencesSince / windowMinutes;
+ if (rate > state.ignoredUntilOccurrenceRate) {
+ return true;
+ }
+ }
+
+ if (
+ state.ignoredUntilTotalOccurrences != null &&
+ state.ignoredAtOccurrenceCount != null
+ ) {
+ const occurrencesSinceIgnored =
+ context.totalOccurrenceCount - Number(state.ignoredAtOccurrenceCount);
+ if (occurrencesSinceIgnored >= state.ignoredUntilTotalOccurrences) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ private async resolveChannels(projectId: string): Promise {
+ return this._replica.projectAlertChannel.findMany({
+ where: {
+ projectId,
+ alertTypes: { has: "ERROR_GROUP" },
+ enabled: true,
+ },
+ });
+ }
+
+ private computeMinInterval(channels: ProjectAlertChannel[]): number {
+ let min = DEFAULT_INTERVAL_MS;
+ for (const ch of channels) {
+ const config = ErrorAlertConfig.safeParse(ch.errorAlertConfig);
+ if (config.success) {
+ min = Math.min(min, config.data.evaluationIntervalMs);
+ }
+ }
+ return min;
+ }
+
+ private collectEnvironmentTypes(channels: ProjectAlertChannel[]): RuntimeEnvironmentType[] {
+ const types = new Set();
+ for (const ch of channels) {
+ for (const t of ch.environmentTypes) {
+ types.add(t);
+ }
+ }
+ return Array.from(types);
+ }
+
+ private async resolveEnvironments(
+ projectId: string,
+ types: RuntimeEnvironmentType[]
+ ): Promise {
+ const envs = await this._replica.runtimeEnvironment.findMany({
+ where: {
+ projectId,
+ type: { in: types },
+ },
+ select: {
+ id: true,
+ type: true,
+ slug: true,
+ branchName: true,
+ },
+ });
+
+ return envs.map((e) => ({
+ id: e.id,
+ slug: e.slug,
+ type: e.type,
+ displayName: e.branchName ?? e.slug,
+ }));
+ }
+
+ private buildChannelsByEnvId(
+ channels: ProjectAlertChannel[],
+ environments: ResolvedEnvironment[]
+ ): Map {
+ const result = new Map();
+ for (const env of environments) {
+ const matching = channels.filter((ch) => ch.environmentTypes.includes(env.type));
+ if (matching.length > 0) {
+ result.set(env.id, matching);
+ }
+ }
+ return result;
+ }
+
+ private async getActiveErrors(
+ projectId: string,
+ envIds: string[],
+ scheduledAt: number
+ ): Promise {
+ const qb = this._clickhouse.errors.activeErrorsSinceQueryBuilder();
+ qb.where("project_id = {projectId: String}", { projectId });
+ qb.where("environment_id IN {envIds: Array(String)}", { envIds });
+ qb.groupBy("environment_id, task_identifier, error_fingerprint");
+ qb.having("toInt64(last_seen) > {scheduledAt: Int64}", {
+ scheduledAt,
+ });
+
+ const [err, results] = await qb.execute();
+ if (err) {
+ logger.error("[ErrorAlertEvaluator] Failed to query active errors", { error: err });
+ return [];
+ }
+ return results ?? [];
+ }
+
+ private async getErrorGroupStates(
+ activeErrors: ActiveErrorsSinceQueryResult[]
+ ): Promise {
+ if (activeErrors.length === 0) return [];
+
+ return this._replica.errorGroupState.findMany({
+ where: {
+ OR: activeErrors.map((e) => ({
+ environmentId: e.environment_id,
+ taskIdentifier: e.task_identifier,
+ errorFingerprint: e.error_fingerprint,
+ })),
+ },
+ });
+ }
+
+ private buildStateMap(states: ErrorGroupState[]): Map {
+ const map = new Map();
+ for (const s of states) {
+ map.set(`${s.environmentId}:${s.taskIdentifier}:${s.errorFingerprint}`, s);
+ }
+ return map;
+ }
+
+ private async getOccurrenceCountsSince(
+ projectId: string,
+ envIds: string[],
+ scheduledAt: number
+ ): Promise<
+ Array<{
+ environment_id: string;
+ task_identifier: string;
+ error_fingerprint: string;
+ occurrences_since: number;
+ }>
+ > {
+ const qb = this._clickhouse.errors.occurrenceCountsSinceQueryBuilder();
+ qb.where("project_id = {projectId: String}", { projectId });
+ qb.where("environment_id IN {envIds: Array(String)}", { envIds });
+ qb.where("minute >= toStartOfMinute(fromUnixTimestamp64Milli({scheduledAt: Int64}))", {
+ scheduledAt,
+ });
+ qb.groupBy("environment_id, task_identifier, error_fingerprint");
+
+ const [err, results] = await qb.execute();
+ if (err) {
+ logger.error("[ErrorAlertEvaluator] Failed to query occurrence counts", { error: err });
+ return [];
+ }
+ return results ?? [];
+ }
+
+ private buildOccurrenceMap(
+ counts: Array<{
+ environment_id: string;
+ task_identifier: string;
+ error_fingerprint: string;
+ occurrences_since: number;
+ }>
+ ): Map {
+ const map = new Map();
+ for (const c of counts) {
+ map.set(
+ `${c.environment_id}:${c.task_identifier}:${c.error_fingerprint}`,
+ c.occurrences_since
+ );
+ }
+ return map;
+ }
+
+ private async updateErrorGroupStates(
+ alertableErrors: AlertableError[],
+ stateMap: Map
+ ): Promise {
+ for (const alertable of alertableErrors) {
+ const key = `${alertable.error.environment_id}:${alertable.error.task_identifier}:${alertable.error.error_fingerprint}`;
+ const state = stateMap.get(key);
+ if (!state) continue;
+
+ await this._prisma.errorGroupState.update({
+ where: { id: state.id },
+ data: {
+ status: "UNRESOLVED",
+ ignoredUntil: null,
+ ignoredUntilOccurrenceRate: null,
+ ignoredUntilTotalOccurrences: null,
+ ignoredAtOccurrenceCount: null,
+ ignoredAt: null,
+ ignoredReason: null,
+ ignoredByUserId: null,
+ resolvedAt: null,
+ resolvedInVersion: null,
+ resolvedBy: null,
+ },
+ });
+ }
+ }
+
+ private async selfChain(
+ projectId: string,
+ nextScheduledAt: number,
+ intervalMs: number
+ ): Promise {
+ await alertsWorker.enqueue({
+ id: `evaluateErrorAlerts:${projectId}`,
+ job: "v3.evaluateErrorAlerts",
+ payload: {
+ projectId,
+ scheduledAt: nextScheduledAt,
+ },
+ availableAt: new Date(nextScheduledAt + intervalMs),
+ });
+ }
+}
diff --git a/apps/webapp/app/v3/services/alerts/errorGroupWebhook.server.ts b/apps/webapp/app/v3/services/alerts/errorGroupWebhook.server.ts
new file mode 100644
index 00000000000..cfcb38ee4ca
--- /dev/null
+++ b/apps/webapp/app/v3/services/alerts/errorGroupWebhook.server.ts
@@ -0,0 +1,74 @@
+import { nanoid } from "nanoid";
+import type { ErrorWebhook } from "@trigger.dev/core/v3/schemas";
+
+export type ErrorAlertClassification = "new_issue" | "regression" | "unignored";
+
+export interface ErrorGroupAlertData {
+ classification: ErrorAlertClassification;
+ error: {
+ fingerprint: string;
+ environmentId: string;
+ environmentName: string;
+ taskIdentifier: string;
+ errorType: string;
+ errorMessage: string;
+ sampleStackTrace: string;
+ firstSeen: string;
+ lastSeen: string;
+ occurrenceCount: number;
+ };
+ organization: {
+ id: string;
+ slug: string;
+ name: string;
+ };
+ project: {
+ id: string;
+ externalRef: string;
+ slug: string;
+ name: string;
+ };
+ dashboardUrl: string;
+}
+
+/**
+ * Generates a webhook payload for an error group alert that conforms to the
+ * ErrorWebhook schema from @trigger.dev/core/v3/schemas
+ */
+export function generateErrorGroupWebhookPayload(data: ErrorGroupAlertData): ErrorWebhook {
+ return {
+ id: nanoid(),
+ created: new Date(),
+ webhookVersion: "2025-01-01",
+ type: "alert.error" as const,
+ object: {
+ classification: data.classification,
+ error: {
+ fingerprint: data.error.fingerprint,
+ type: data.error.errorType,
+ message: data.error.errorMessage,
+ stackTrace: data.error.sampleStackTrace || undefined,
+ firstSeen: new Date(Number(data.error.firstSeen)),
+ lastSeen: new Date(Number(data.error.lastSeen)),
+ occurrenceCount: data.error.occurrenceCount,
+ taskIdentifier: data.error.taskIdentifier,
+ },
+ environment: {
+ id: data.error.environmentId,
+ name: data.error.environmentName,
+ },
+ organization: {
+ id: data.organization.id,
+ slug: data.organization.slug,
+ name: data.organization.name,
+ },
+ project: {
+ id: data.project.id,
+ ref: data.project.externalRef,
+ slug: data.project.slug,
+ name: data.project.name,
+ },
+ dashboardUrl: data.dashboardUrl,
+ },
+ };
+}
diff --git a/apps/webapp/app/v3/services/errorGroupActions.server.ts b/apps/webapp/app/v3/services/errorGroupActions.server.ts
new file mode 100644
index 00000000000..c026efe2aba
--- /dev/null
+++ b/apps/webapp/app/v3/services/errorGroupActions.server.ts
@@ -0,0 +1,144 @@
+import { type PrismaClientOrTransaction, prisma } from "~/db.server";
+
+type ErrorGroupIdentifier = {
+ organizationId: string;
+ projectId: string;
+ environmentId: string;
+ taskIdentifier: string;
+ errorFingerprint: string;
+};
+
+export class ErrorGroupActions {
+ constructor(private readonly _prisma: PrismaClientOrTransaction = prisma) {}
+
+ async resolveError(
+ identifier: ErrorGroupIdentifier,
+ params: {
+ userId: string;
+ resolvedInVersion?: string;
+ }
+ ) {
+ const where = {
+ environmentId_taskIdentifier_errorFingerprint: {
+ environmentId: identifier.environmentId,
+ taskIdentifier: identifier.taskIdentifier,
+ errorFingerprint: identifier.errorFingerprint,
+ },
+ };
+
+ const now = new Date();
+
+ return this._prisma.errorGroupState.upsert({
+ where,
+ update: {
+ status: "RESOLVED",
+ resolvedAt: now,
+ resolvedInVersion: params.resolvedInVersion ?? null,
+ resolvedBy: params.userId,
+ ignoredUntil: null,
+ ignoredUntilOccurrenceRate: null,
+ ignoredUntilTotalOccurrences: null,
+ ignoredAtOccurrenceCount: null,
+ ignoredAt: null,
+ ignoredReason: null,
+ ignoredByUserId: null,
+ },
+ create: {
+ organizationId: identifier.organizationId,
+ projectId: identifier.projectId,
+ environmentId: identifier.environmentId,
+ taskIdentifier: identifier.taskIdentifier,
+ errorFingerprint: identifier.errorFingerprint,
+ status: "RESOLVED",
+ resolvedAt: now,
+ resolvedInVersion: params.resolvedInVersion ?? null,
+ resolvedBy: params.userId,
+ },
+ });
+ }
+
+ async ignoreError(
+ identifier: ErrorGroupIdentifier,
+ params: {
+ userId: string;
+ duration?: number;
+ occurrenceRateThreshold?: number;
+ totalOccurrencesThreshold?: number;
+ occurrenceCountAtIgnoreTime?: number;
+ reason?: string;
+ }
+ ) {
+ const where = {
+ environmentId_taskIdentifier_errorFingerprint: {
+ environmentId: identifier.environmentId,
+ taskIdentifier: identifier.taskIdentifier,
+ errorFingerprint: identifier.errorFingerprint,
+ },
+ };
+
+ const now = new Date();
+ const ignoredUntil = params.duration ? new Date(now.getTime() + params.duration) : null;
+
+ const data = {
+ status: "IGNORED" as const,
+ ignoredAt: now,
+ ignoredUntil,
+ ignoredUntilOccurrenceRate: params.occurrenceRateThreshold ?? null,
+ ignoredUntilTotalOccurrences: params.totalOccurrencesThreshold ?? null,
+ ignoredAtOccurrenceCount: params.occurrenceCountAtIgnoreTime ?? null,
+ ignoredReason: params.reason ?? null,
+ ignoredByUserId: params.userId,
+ resolvedAt: null,
+ resolvedInVersion: null,
+ resolvedBy: null,
+ };
+
+ return this._prisma.errorGroupState.upsert({
+ where,
+ update: data,
+ create: {
+ organizationId: identifier.organizationId,
+ projectId: identifier.projectId,
+ environmentId: identifier.environmentId,
+ taskIdentifier: identifier.taskIdentifier,
+ errorFingerprint: identifier.errorFingerprint,
+ ...data,
+ },
+ });
+ }
+
+ async unresolveError(identifier: ErrorGroupIdentifier) {
+ const where = {
+ environmentId_taskIdentifier_errorFingerprint: {
+ environmentId: identifier.environmentId,
+ taskIdentifier: identifier.taskIdentifier,
+ errorFingerprint: identifier.errorFingerprint,
+ },
+ };
+
+ return this._prisma.errorGroupState.upsert({
+ where,
+ update: {
+ status: "UNRESOLVED",
+ resolvedAt: null,
+ resolvedInVersion: null,
+ resolvedBy: null,
+ ignoredUntil: null,
+ ignoredUntilOccurrenceRate: null,
+ ignoredUntilTotalOccurrences: null,
+ ignoredAtOccurrenceCount: null,
+ ignoredAt: null,
+ ignoredReason: null,
+ ignoredByUserId: null,
+ },
+ create: {
+ organizationId: identifier.organizationId,
+ projectId: identifier.projectId,
+ environmentId: identifier.environmentId,
+ taskIdentifier: identifier.taskIdentifier,
+ errorFingerprint: identifier.errorFingerprint,
+ status: "UNRESOLVED",
+ },
+ });
+ }
+}
diff --git a/apps/webapp/test/errorGroupWebhook.test.ts b/apps/webapp/test/errorGroupWebhook.test.ts
new file mode 100644
index 00000000000..a7e797685ae
--- /dev/null
+++ b/apps/webapp/test/errorGroupWebhook.test.ts
@@ -0,0 +1,248 @@
+import { describe, test, expect } from "vitest";
+import { Webhook } from "@trigger.dev/core/v3/schemas";
+import {
+ generateErrorGroupWebhookPayload,
+ type ErrorGroupAlertData,
+} from "~/v3/services/alerts/errorGroupWebhook.server";
+
+function createMockAlertData(overrides: Partial = {}): ErrorGroupAlertData {
+ const now = Date.now();
+ const earlier = now - 3600000; // 1 hour ago
+
+ return {
+ classification: "new_issue",
+ error: {
+ fingerprint: "fp_test_12345",
+ environmentId: "env_abc123",
+ environmentName: "Production",
+ taskIdentifier: "process-payment",
+ errorType: "TypeError",
+ errorMessage: "Cannot read property 'id' of undefined",
+ sampleStackTrace: `TypeError: Cannot read property 'id' of undefined
+ at processPayment (src/tasks/payment.ts:42:15)
+ at Object.run (src/tasks/payment.ts:15:20)`,
+ firstSeen: String(earlier),
+ lastSeen: String(now),
+ occurrenceCount: 5,
+ },
+ organization: {
+ id: "org_xyz789",
+ slug: "acme-corp",
+ name: "Acme Corp",
+ },
+ project: {
+ id: "proj_123",
+ externalRef: "proj_abc",
+ slug: "my-project",
+ name: "My Project",
+ },
+ dashboardUrl:
+ "https://cloud.trigger.dev/orgs/acme-corp/projects/my-project/errors/fp_test_12345",
+ ...overrides,
+ };
+}
+
+describe("generateErrorGroupWebhookPayload", () => {
+ test("generates a valid webhook payload", () => {
+ const alertData = createMockAlertData();
+ const payload = generateErrorGroupWebhookPayload(alertData);
+
+ expect(payload).toMatchObject({
+ type: "alert.error",
+ object: {
+ classification: "new_issue",
+ error: {
+ fingerprint: "fp_test_12345",
+ type: "TypeError",
+ message: "Cannot read property 'id' of undefined",
+ taskIdentifier: "process-payment",
+ occurrenceCount: 5,
+ },
+ environment: {
+ id: "env_abc123",
+ name: "Production",
+ },
+ organization: {
+ id: "org_xyz789",
+ slug: "acme-corp",
+ name: "Acme Corp",
+ },
+ project: {
+ id: "proj_123",
+ ref: "proj_abc",
+ slug: "my-project",
+ name: "My Project",
+ },
+ dashboardUrl:
+ "https://cloud.trigger.dev/orgs/acme-corp/projects/my-project/errors/fp_test_12345",
+ },
+ });
+
+ expect(payload.id).toBeDefined();
+ expect(payload.created).toBeInstanceOf(Date);
+ expect(payload.webhookVersion).toBe("2025-01-01");
+ });
+
+ test("payload is valid according to Webhook schema", () => {
+ const alertData = createMockAlertData();
+ const payload = generateErrorGroupWebhookPayload(alertData);
+
+ const parsed = Webhook.parse(payload);
+ expect(parsed.type).toBe("alert.error");
+ });
+
+ test("payload can be serialized and deserialized", () => {
+ const alertData = createMockAlertData();
+ const payload = generateErrorGroupWebhookPayload(alertData);
+
+ // Serialize to JSON (simulating sending over HTTP)
+ const serialized = JSON.stringify(payload);
+ const deserialized = JSON.parse(serialized);
+
+ // Verify it can still be parsed by the schema
+ const parsed = Webhook.parse(deserialized);
+ expect(parsed.type).toBe("alert.error");
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.classification).toBe("new_issue");
+ expect(parsed.object.error.fingerprint).toBe("fp_test_12345");
+ }
+ });
+
+ test("handles new_issue classification", () => {
+ const alertData = createMockAlertData({ classification: "new_issue" });
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.classification).toBe("new_issue");
+ }
+ });
+
+ test("handles regression classification", () => {
+ const alertData = createMockAlertData({ classification: "regression" });
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.classification).toBe("regression");
+ }
+ });
+
+ test("handles unignored classification", () => {
+ const alertData = createMockAlertData({ classification: "unignored" });
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.classification).toBe("unignored");
+ }
+ });
+
+ test("handles empty stack trace", () => {
+ const alertData = createMockAlertData({
+ error: {
+ ...createMockAlertData().error,
+ sampleStackTrace: "",
+ },
+ });
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.stackTrace).toBeUndefined();
+ }
+ });
+
+ test("includes stack trace when present", () => {
+ const stackTrace = "Error at line 42";
+ const alertData = createMockAlertData({
+ error: {
+ ...createMockAlertData().error,
+ sampleStackTrace: stackTrace,
+ },
+ });
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.stackTrace).toBe(stackTrace);
+ }
+ });
+
+ test("preserves date fields correctly", () => {
+ const firstSeen = new Date("2024-01-01T00:00:00Z");
+ const lastSeen = new Date("2024-01-02T12:00:00Z");
+
+ const alertData = createMockAlertData({
+ error: {
+ ...createMockAlertData().error,
+ firstSeen: String(firstSeen.getTime()),
+ lastSeen: String(lastSeen.getTime()),
+ },
+ });
+
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.firstSeen).toEqual(firstSeen);
+ expect(parsed.object.error.lastSeen).toEqual(lastSeen);
+ }
+ });
+
+ test("handles special characters in error messages", () => {
+ const alertData = createMockAlertData({
+ error: {
+ ...createMockAlertData().error,
+ errorMessage: "Unexpected token `<` in JSON at position 0",
+ sampleStackTrace: `SyntaxError: Unexpected token \`<\` in JSON
+ at JSON.parse ()
+ at fetch("https://api.example.com/data?query=test&limit=10")`,
+ },
+ });
+
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const serialized = JSON.stringify(payload);
+ const deserialized = JSON.parse(serialized);
+ const parsed = Webhook.parse(deserialized);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.message).toBe("Unexpected token `<` in JSON at position 0");
+ }
+ });
+
+ test("handles unicode and emoji in error messages", () => {
+ const alertData = createMockAlertData({
+ error: {
+ ...createMockAlertData().error,
+ errorMessage: "Failed to process emoji 🔥 in message: Hello 世界",
+ },
+ });
+
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const serialized = JSON.stringify(payload);
+ const deserialized = JSON.parse(serialized);
+ const parsed = Webhook.parse(deserialized);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.message).toBe("Failed to process emoji 🔥 in message: Hello 世界");
+ }
+ });
+
+ test("handles large occurrence counts", () => {
+ const alertData = createMockAlertData({
+ error: {
+ ...createMockAlertData().error,
+ occurrenceCount: 999999,
+ },
+ });
+
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.occurrenceCount).toBe(999999);
+ }
+ });
+});
diff --git a/apps/webapp/test/slackErrorAlerts.test.ts b/apps/webapp/test/slackErrorAlerts.test.ts
new file mode 100644
index 00000000000..f0d26d02b2b
--- /dev/null
+++ b/apps/webapp/test/slackErrorAlerts.test.ts
@@ -0,0 +1,383 @@
+import { describe, test, expect, beforeAll, afterAll } from "vitest";
+import { DeliverErrorGroupAlertService } from "../app/v3/services/alerts/deliverErrorGroupAlert.server.js";
+import { prisma } from "../app/db.server.js";
+import { getSecretStore } from "../app/services/secrets/secretStore.server.js";
+
+// Helper type matching the service's ErrorAlertPayload
+type ErrorAlertPayload = {
+ channelId: string;
+ projectId: string;
+ classification: "new_issue" | "regression" | "unignored";
+ error: {
+ fingerprint: string;
+ environmentId: string;
+ environmentSlug: string;
+ environmentName: string;
+ taskIdentifier: string;
+ errorType: string;
+ errorMessage: string;
+ sampleStackTrace: string;
+ firstSeen: string;
+ lastSeen: string;
+ occurrenceCount: number;
+ };
+};
+
+// Test context for database setup
+let testChannelId: string;
+let testProjectId: string;
+let testOrganizationId: string;
+
+// Helper to create mock error payloads
+function createMockErrorPayload(
+ overrides: Partial> & {
+ error?: Partial;
+ } = {}
+): ErrorAlertPayload {
+ const { error: errorOverrides, ...payloadOverrides } = overrides;
+
+ const defaultError: ErrorAlertPayload["error"] = {
+ fingerprint: "fp_test_" + Date.now(),
+ environmentId: "env_test_dev",
+ environmentSlug: "dev",
+ environmentName: "Development",
+ taskIdentifier: "process-payment",
+ errorType: "TypeError",
+ errorMessage: "Cannot read property 'id' of undefined",
+ sampleStackTrace: `TypeError: Cannot read property 'id' of undefined
+ at processPayment (src/tasks/payment.ts:42:15)
+ at Object.run (src/tasks/payment.ts:15:20)
+ at TaskExecutor.execute (node_modules/@trigger.dev/core/dist/index.js:234:18)`,
+ firstSeen: Date.now().toString(),
+ lastSeen: Date.now().toString(),
+ occurrenceCount: 42,
+ ...errorOverrides,
+ };
+
+ return {
+ channelId: testChannelId,
+ projectId: testProjectId,
+ classification: "new_issue",
+ ...payloadOverrides,
+ error: defaultError,
+ };
+}
+
+// Skip tests if Slack credentials not configured
+const hasSlackCredentials =
+ !!process.env.TEST_SLACK_CHANNEL_ID &&
+ !!process.env.TEST_SLACK_BOT_TOKEN;
+
+describe.skipIf(!hasSlackCredentials)("Slack Error Alert Visual Tests", () => {
+ beforeAll(async () => {
+ // Create test organization
+ const organization = await prisma.organization.create({
+ data: {
+ title: "Slack Test Org",
+ slug: "slack-test-org-" + Date.now(),
+ },
+ });
+ testOrganizationId = organization.id;
+
+ // Create test project
+ const project = await prisma.project.create({
+ data: {
+ name: "Slack Test Project",
+ slug: "slack-test-project-" + Date.now(),
+ externalRef: "proj_slack_test_" + Date.now(),
+ organizationId: organization.id,
+ },
+ });
+ testProjectId = project.id;
+
+ // Create secret reference for Slack token
+ const secretStore = getSecretStore("DATABASE");
+ const secretKey = `slack-test-token-${Date.now()}`;
+
+ await secretStore.setSecret(secretKey, {
+ botAccessToken: process.env.TEST_SLACK_BOT_TOKEN!,
+ });
+
+ const secretReference = await prisma.secretReference.create({
+ data: {
+ key: secretKey,
+ provider: "DATABASE",
+ },
+ });
+
+ // Create Slack organization integration
+ const integration = await prisma.organizationIntegration.create({
+ data: {
+ friendlyId: "integration_test_" + Date.now(),
+ organizationId: organization.id,
+ service: "SLACK",
+ integrationData: {},
+ tokenReferenceId: secretReference.id,
+ },
+ });
+
+ // Create alert channel
+ const channel = await prisma.projectAlertChannel.create({
+ data: {
+ friendlyId: "channel_test_" + Date.now(),
+ name: "Test Slack Channel",
+ type: "SLACK",
+ enabled: true,
+ projectId: project.id,
+ integrationId: integration.id,
+ properties: {
+ channelId: process.env.TEST_SLACK_CHANNEL_ID!,
+ channelName: "test-slack-alerts",
+ integrationId: integration.id,
+ },
+ },
+ });
+ testChannelId = channel.id;
+ });
+
+ afterAll(async () => {
+ // Clean up test data
+ if (testChannelId) {
+ await prisma.projectAlertChannel.deleteMany({
+ where: { id: testChannelId },
+ });
+ }
+ if (testProjectId) {
+ await prisma.project.deleteMany({
+ where: { id: testProjectId },
+ });
+ }
+ if (testOrganizationId) {
+ await prisma.organizationIntegration.deleteMany({
+ where: { organizationId: testOrganizationId },
+ });
+ await prisma.organization.deleteMany({
+ where: { id: testOrganizationId },
+ });
+ }
+ });
+
+ test("new_issue classification", async () => {
+ const payload = createMockErrorPayload({
+ classification: "new_issue",
+ error: {
+ taskIdentifier: "process-order",
+ errorMessage: "Failed to process order due to invalid payment method",
+ errorType: "PaymentError",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ // Message sent - check Slack channel visually
+ expect(true).toBe(true);
+ });
+
+ test("regression classification", async () => {
+ const payload = createMockErrorPayload({
+ classification: "regression",
+ error: {
+ taskIdentifier: "send-email",
+ errorMessage: "SMTP connection timeout after 30 seconds",
+ errorType: "TimeoutError",
+ occurrenceCount: 156,
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("unignored (resurfaced) classification", async () => {
+ const payload = createMockErrorPayload({
+ classification: "unignored",
+ error: {
+ taskIdentifier: "sync-database",
+ errorMessage: "Connection pool exhausted",
+ errorType: "DatabaseError",
+ occurrenceCount: 99,
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("short error message", async () => {
+ const payload = createMockErrorPayload({
+ error: {
+ errorMessage: "Not found",
+ errorType: "NotFoundError",
+ sampleStackTrace: "NotFoundError: Not found\n at findUser (src/db.ts:10:5)",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("long stack trace", async () => {
+ const longStackTrace = `ReferenceError: processData is not defined
+ at handler (src/tasks/data-processor.ts:125:15)
+ at async TaskRunner.execute (node_modules/@trigger.dev/sdk/dist/runner.js:89:12)
+ at async WorkerThread.processTask (node_modules/@trigger.dev/sdk/dist/worker.js:234:18)
+ at async WorkerPool.run (src/lib/worker-pool.ts:56:10)
+ at async TaskQueue.dequeue (src/lib/queue.ts:142:8)
+ at async Orchestrator.processNextTask (src/orchestrator.ts:98:5)
+ at async Orchestrator.start (src/orchestrator.ts:45:7)
+ at async main (src/index.ts:12:3)
+ at Object. (src/index.ts:20:1)
+ at Module._compile (node:internal/modules/cjs/loader:1376:14)
+ at Module._extensions..js (node:internal/modules/cjs/loader:1435:10)
+ at Module.load (node:internal/modules/cjs/loader:1207:32)
+ at Module._load (node:internal/modules/cjs/loader:1023:12)
+ at Function.executeUserEntryPoint [as runMain] (node:internal/modules/run_main:135:12)
+ at node:internal/main/run_main_module:28:49`;
+
+ const payload = createMockErrorPayload({
+ error: {
+ errorType: "ReferenceError",
+ errorMessage: "processData is not defined",
+ sampleStackTrace: longStackTrace,
+ taskIdentifier: "data-processor",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("very long error message (triggers truncation)", async () => {
+ // Create a message that's over 3000 characters
+ const longMessage = "x".repeat(3500);
+ const longStackTrace = `Error: ${longMessage}
+ at verylongfunctionname (src/tasks/long-task.ts:1:1)
+ ${" at stackframe (file.ts:1:1)\n".repeat(100)}`;
+
+ const payload = createMockErrorPayload({
+ error: {
+ errorMessage: longMessage,
+ sampleStackTrace: longStackTrace,
+ taskIdentifier: "long-error-task",
+ errorType: "Error",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ // Should see truncation message in Slack
+ expect(true).toBe(true);
+ });
+
+ test("special characters in error", async () => {
+ const payload = createMockErrorPayload({
+ error: {
+ errorMessage: "Unexpected token `<` in JSON at position 0",
+ errorType: "SyntaxError",
+ sampleStackTrace: `SyntaxError: Unexpected token \`<\` in JSON at position 0
+ at JSON.parse ()
+ at parseResponse (src/api/client.ts:42:15)
+ at fetch("https://api.example.com/data?query=test&limit=10")`,
+ taskIdentifier: "api-fetch-task",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("unicode and emoji in error", async () => {
+ const payload = createMockErrorPayload({
+ error: {
+ errorMessage: "Failed to process emoji 🔥 in message: Hello 世界",
+ errorType: "EncodingError",
+ sampleStackTrace: `EncodingError: Failed to process emoji 🔥 in message: Hello 世界
+ at encodeMessage (src/utils/encoding.ts:15:10)
+ at sendMessage (src/tasks/messaging.ts:42:8)`,
+ taskIdentifier: "messaging-task",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("different error types - TypeError", async () => {
+ const payload = createMockErrorPayload({
+ error: {
+ errorType: "TypeError",
+ errorMessage: "Cannot call method 'map' on undefined",
+ sampleStackTrace: `TypeError: Cannot call method 'map' on undefined
+ at transformData (src/transformers/data.ts:18:25)`,
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("different error types - ReferenceError", async () => {
+ const payload = createMockErrorPayload({
+ error: {
+ errorType: "ReferenceError",
+ errorMessage: "userConfig is not defined",
+ sampleStackTrace: `ReferenceError: userConfig is not defined
+ at initializeApp (src/app.ts:32:10)`,
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("different error types - Custom Error", async () => {
+ const payload = createMockErrorPayload({
+ error: {
+ errorType: "InvalidConfigurationError",
+ errorMessage: "API key is missing or invalid",
+ sampleStackTrace: `InvalidConfigurationError: API key is missing or invalid
+ at validateConfig (src/config/validator.ts:45:11)
+ at loadConfig (src/config/loader.ts:23:5)`,
+ taskIdentifier: "config-loader",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("error with no stack trace", async () => {
+ const payload = createMockErrorPayload({
+ error: {
+ errorMessage: "An unknown error occurred",
+ errorType: "Error",
+ sampleStackTrace: "",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+});
diff --git a/apps/webapp/test/webhookErrorAlerts.test.ts b/apps/webapp/test/webhookErrorAlerts.test.ts
new file mode 100644
index 00000000000..f147d470fab
--- /dev/null
+++ b/apps/webapp/test/webhookErrorAlerts.test.ts
@@ -0,0 +1,332 @@
+import { describe, test, expect, beforeAll, afterAll } from "vitest";
+import { DeliverErrorGroupAlertService } from "~/v3/services/alerts/deliverErrorGroupAlert.server";
+import { prisma } from "~/db.server";
+import { getSecretStore } from "~/services/secrets/secretStore.server";
+import { Webhook } from "@trigger.dev/core/v3/schemas";
+
+type ErrorAlertPayload = {
+ channelId: string;
+ projectId: string;
+ classification: "new_issue" | "regression" | "unignored";
+ error: {
+ fingerprint: string;
+ environmentId: string;
+ environmentSlug: string;
+ environmentName: string;
+ taskIdentifier: string;
+ errorType: string;
+ errorMessage: string;
+ sampleStackTrace: string;
+ firstSeen: string;
+ lastSeen: string;
+ occurrenceCount: number;
+ };
+};
+
+let testChannelId: string;
+let testProjectId: string;
+let testOrganizationId: string;
+let webhookServer: ReturnType | null = null;
+
+interface WebhookCall {
+ payload: unknown;
+ signature: string;
+}
+
+function createWebhookServer() {
+ const calls: WebhookCall[] = [];
+
+ return {
+ calls,
+ handler: async (request: Request) => {
+ const signature = request.headers.get("x-trigger-signature-hmacsha256");
+ const payload = await request.json();
+
+ calls.push({
+ payload,
+ signature: signature || "",
+ });
+
+ return new Response(JSON.stringify({ success: true }), {
+ status: 200,
+ headers: { "content-type": "application/json" },
+ });
+ },
+ };
+}
+
+function createMockErrorPayload(
+ overrides: Partial> & {
+ error?: Partial;
+ } = {}
+): ErrorAlertPayload {
+ const { error: errorOverrides, ...payloadOverrides } = overrides;
+
+ const defaultError: ErrorAlertPayload["error"] = {
+ fingerprint: "fp_test_" + Date.now(),
+ environmentId: "env_test_dev",
+ environmentSlug: "dev",
+ environmentName: "Development",
+ taskIdentifier: "process-payment",
+ errorType: "TypeError",
+ errorMessage: "Cannot read property 'id' of undefined",
+ sampleStackTrace: `TypeError: Cannot read property 'id' of undefined
+ at processPayment (src/tasks/payment.ts:42:15)
+ at Object.run (src/tasks/payment.ts:15:20)
+ at TaskExecutor.execute (node_modules/@trigger.dev/core/dist/index.js:234:18)`,
+ firstSeen: new Date().toISOString(),
+ lastSeen: new Date().toISOString(),
+ occurrenceCount: 42,
+ ...errorOverrides,
+ };
+
+ return {
+ channelId: testChannelId,
+ projectId: testProjectId,
+ classification: "new_issue",
+ ...payloadOverrides,
+ error: defaultError,
+ };
+}
+
+describe("Webhook Error Alert Tests", () => {
+ beforeAll(async () => {
+ // Create test organization
+ const organization = await prisma.organization.create({
+ data: {
+ title: "Webhook Test Org",
+ slug: "webhook-test-org-" + Date.now(),
+ },
+ });
+ testOrganizationId = organization.id;
+
+ // Create test project
+ const project = await prisma.project.create({
+ data: {
+ name: "Webhook Test Project",
+ slug: "webhook-test-project-" + Date.now(),
+ externalRef: "proj_webhook_test_" + Date.now(),
+ organizationId: organization.id,
+ },
+ });
+ testProjectId = project.id;
+
+ // Create webhook server for testing
+ webhookServer = createWebhookServer();
+
+ // We'll use a mock webhook URL in the tests
+ // In a real integration test, you'd start a local server
+ // For now, we'll just test that the payload is constructed correctly
+ });
+
+ afterAll(async () => {
+ // Clean up test data
+ if (testChannelId) {
+ await prisma.projectAlertChannel.deleteMany({
+ where: { id: testChannelId },
+ });
+ }
+ if (testProjectId) {
+ await prisma.project.deleteMany({
+ where: { id: testProjectId },
+ });
+ }
+ if (testOrganizationId) {
+ await prisma.organization.deleteMany({
+ where: { id: testOrganizationId },
+ });
+ }
+ });
+
+ test("webhook payload structure is valid", async () => {
+ // This test verifies the payload structure without actually sending it
+ const mockPayload = createMockErrorPayload({
+ classification: "new_issue",
+ });
+
+ // Import the function to generate the payload
+ const { generateErrorGroupWebhookPayload } = await import(
+ "~/v3/services/alerts/errorGroupWebhook.server"
+ );
+
+ const webhookPayload = generateErrorGroupWebhookPayload({
+ classification: mockPayload.classification,
+ error: mockPayload.error,
+ organization: {
+ id: testOrganizationId,
+ slug: "webhook-test-org",
+ name: "Webhook Test Org",
+ },
+ project: {
+ id: testProjectId,
+ externalRef: "proj_webhook_test",
+ slug: "webhook-test-project",
+ name: "Webhook Test Project",
+ },
+ dashboardUrl: "https://cloud.trigger.dev/test",
+ });
+
+ // Verify it can be parsed by the Webhook schema
+ const parsed = Webhook.parse(webhookPayload);
+ expect(parsed.type).toBe("alert.error");
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.classification).toBe("new_issue");
+ expect(parsed.object.error.type).toBe("TypeError");
+ expect(parsed.object.organization.slug).toBe("webhook-test-org");
+ expect(parsed.object.project.ref).toBe("proj_webhook_test");
+ }
+ });
+
+ test("webhook payload can be serialized and deserialized", async () => {
+ const mockPayload = createMockErrorPayload({
+ classification: "regression",
+ });
+
+ const { generateErrorGroupWebhookPayload } = await import(
+ "~/v3/services/alerts/errorGroupWebhook.server"
+ );
+
+ const webhookPayload = generateErrorGroupWebhookPayload({
+ classification: mockPayload.classification,
+ error: mockPayload.error,
+ organization: {
+ id: testOrganizationId,
+ slug: "webhook-test-org",
+ name: "Webhook Test Org",
+ },
+ project: {
+ id: testProjectId,
+ externalRef: "proj_webhook_test",
+ slug: "webhook-test-project",
+ name: "Webhook Test Project",
+ },
+ dashboardUrl: "https://cloud.trigger.dev/test",
+ });
+
+ // Serialize to JSON (simulating HTTP transmission)
+ const serialized = JSON.stringify(webhookPayload);
+ const deserialized = JSON.parse(serialized);
+
+ // Verify it can still be parsed
+ const parsed = Webhook.parse(deserialized);
+ expect(parsed.type).toBe("alert.error");
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.classification).toBe("regression");
+ expect(parsed.object.error.fingerprint).toBe(mockPayload.error.fingerprint);
+ }
+ });
+
+ test("webhook payload includes all classifications", async () => {
+ const classifications = ["new_issue", "regression", "unignored"] as const;
+
+ const { generateErrorGroupWebhookPayload } = await import(
+ "~/v3/services/alerts/errorGroupWebhook.server"
+ );
+
+ for (const classification of classifications) {
+ const mockPayload = createMockErrorPayload({ classification });
+
+ const webhookPayload = generateErrorGroupWebhookPayload({
+ classification: mockPayload.classification,
+ error: mockPayload.error,
+ organization: {
+ id: testOrganizationId,
+ slug: "webhook-test-org",
+ name: "Webhook Test Org",
+ },
+ project: {
+ id: testProjectId,
+ externalRef: "proj_webhook_test",
+ slug: "webhook-test-project",
+ name: "Webhook Test Project",
+ },
+ dashboardUrl: "https://cloud.trigger.dev/test",
+ });
+
+ const parsed = Webhook.parse(webhookPayload);
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.classification).toBe(classification);
+ }
+ }
+ });
+
+ test("webhook payload includes error details", async () => {
+ const mockPayload = createMockErrorPayload({
+ error: {
+ fingerprint: "fp_custom_123",
+ errorType: "CustomError",
+ errorMessage: "Custom error message",
+ sampleStackTrace: "CustomError: at line 42",
+ taskIdentifier: "my-custom-task",
+ occurrenceCount: 999,
+ } as any,
+ });
+
+ const { generateErrorGroupWebhookPayload } = await import(
+ "~/v3/services/alerts/errorGroupWebhook.server"
+ );
+
+ const webhookPayload = generateErrorGroupWebhookPayload({
+ classification: mockPayload.classification,
+ error: mockPayload.error,
+ organization: {
+ id: testOrganizationId,
+ slug: "webhook-test-org",
+ name: "Webhook Test Org",
+ },
+ project: {
+ id: testProjectId,
+ externalRef: "proj_webhook_test",
+ slug: "webhook-test-project",
+ name: "Webhook Test Project",
+ },
+ dashboardUrl: "https://cloud.trigger.dev/test",
+ });
+
+ const parsed = Webhook.parse(webhookPayload);
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.fingerprint).toBe("fp_custom_123");
+ expect(parsed.object.error.type).toBe("CustomError");
+ expect(parsed.object.error.message).toBe("Custom error message");
+ expect(parsed.object.error.stackTrace).toBe("CustomError: at line 42");
+ expect(parsed.object.error.taskIdentifier).toBe("my-custom-task");
+ expect(parsed.object.error.occurrenceCount).toBe(999);
+ }
+ });
+
+ test("webhook payload handles empty stack trace", async () => {
+ const mockPayload = createMockErrorPayload({
+ error: {
+ sampleStackTrace: "",
+ } as any,
+ });
+
+ const { generateErrorGroupWebhookPayload } = await import(
+ "~/v3/services/alerts/errorGroupWebhook.server"
+ );
+
+ const webhookPayload = generateErrorGroupWebhookPayload({
+ classification: mockPayload.classification,
+ error: mockPayload.error,
+ organization: {
+ id: testOrganizationId,
+ slug: "webhook-test-org",
+ name: "Webhook Test Org",
+ },
+ project: {
+ id: testProjectId,
+ externalRef: "proj_webhook_test",
+ slug: "webhook-test-project",
+ name: "Webhook Test Project",
+ },
+ dashboardUrl: "https://cloud.trigger.dev/test",
+ });
+
+ const parsed = Webhook.parse(webhookPayload);
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.stackTrace).toBeUndefined();
+ }
+ });
+});
diff --git a/internal-packages/clickhouse/src/errors.ts b/internal-packages/clickhouse/src/errors.ts
index c93efbcaf1f..4b13ce18c80 100644
--- a/internal-packages/clickhouse/src/errors.ts
+++ b/internal-packages/clickhouse/src/errors.ts
@@ -94,8 +94,8 @@ export function getErrorGroups(ch: ClickhouseReader, settings?: ClickHouseSettin
AND project_id = {projectId: String}
AND environment_id = {environmentId: String}
GROUP BY error_fingerprint, task_identifier
- HAVING max(last_seen) >= now() - INTERVAL {days: Int64} DAY
- ORDER BY last_seen DESC
+ HAVING toInt64(last_seen) >= toInt64(toUnixTimestamp(now() - INTERVAL {days: Int64} DAY)) * 1000
+ ORDER BY toInt64(last_seen) DESC
LIMIT {limit: Int64}
OFFSET {offset: Int64}
`,
@@ -314,3 +314,148 @@ export function createErrorOccurrencesQueryBuilder(
settings
);
}
+
+export const ErrorOccurrencesByVersionQueryResult = z.object({
+ error_fingerprint: z.string(),
+ task_version: z.string(),
+ bucket_epoch: z.number(),
+ count: z.number(),
+});
+
+export type ErrorOccurrencesByVersionQueryResult = z.infer<
+ typeof ErrorOccurrencesByVersionQueryResult
+>;
+
+/**
+ * Creates a query builder for bucketed error occurrence counts grouped by task_version.
+ * Used for stacked-by-version activity charts on the error detail page.
+ */
+export function createErrorOccurrencesByVersionQueryBuilder(
+ ch: ClickhouseReader,
+ intervalExpr: string,
+ settings?: ClickHouseSettings
+): ClickhouseQueryBuilder {
+ return new ClickhouseQueryBuilder(
+ "getErrorOccurrencesByVersion",
+ `
+ SELECT
+ error_fingerprint,
+ task_version,
+ toUnixTimestamp(toStartOfInterval(minute, ${intervalExpr})) as bucket_epoch,
+ sum(count) as count
+ FROM trigger_dev.error_occurrences_v1
+ `,
+ ch,
+ ErrorOccurrencesByVersionQueryResult,
+ settings
+ );
+}
+
+// ---------------------------------------------------------------------------
+// Alert evaluator – active errors since a timestamp
+// ---------------------------------------------------------------------------
+
+export const ActiveErrorsSinceQueryResult = z.object({
+ environment_id: z.string(),
+ task_identifier: z.string(),
+ error_fingerprint: z.string(),
+ error_type: z.string(),
+ error_message: z.string(),
+ sample_stack_trace: z.string(),
+ first_seen: z.string(),
+ last_seen: z.string(),
+ occurrence_count: z.number(),
+});
+
+export type ActiveErrorsSinceQueryResult = z.infer;
+
+/**
+ * Query builder for fetching all errors active since a given timestamp.
+ * Returns errors with last_seen > scheduledAt, grouped by env/task/fingerprint.
+ * Used by the error alert evaluator to find new issues, regressions, and un-ignored errors.
+ */
+export function getActiveErrorsSinceQueryBuilder(
+ ch: ClickhouseReader,
+ settings?: ClickHouseSettings
+) {
+ return ch.queryBuilder({
+ name: "getActiveErrorsSince",
+ baseQuery: `
+ SELECT
+ environment_id,
+ task_identifier,
+ error_fingerprint,
+ any(error_type) as error_type,
+ any(error_message) as error_message,
+ any(sample_stack_trace) as sample_stack_trace,
+ toString(toUnixTimestamp64Milli(min(first_seen))) as first_seen,
+ toString(toUnixTimestamp64Milli(max(last_seen))) as last_seen,
+ toUInt64(sumMerge(occurrence_count)) as occurrence_count
+ FROM trigger_dev.errors_v1
+ `,
+ schema: ActiveErrorsSinceQueryResult,
+ settings,
+ });
+}
+
+export const OccurrenceCountsSinceQueryResult = z.object({
+ environment_id: z.string(),
+ task_identifier: z.string(),
+ error_fingerprint: z.string(),
+ occurrences_since: z.number(),
+});
+
+export type OccurrenceCountsSinceQueryResult = z.infer;
+
+/**
+ * Query builder for occurrence counts since a given timestamp, grouped by error.
+ * Used by the alert evaluator to check ignore thresholds.
+ */
+export function getOccurrenceCountsSinceQueryBuilder(
+ ch: ClickhouseReader,
+ settings?: ClickHouseSettings
+) {
+ return ch.queryBuilder({
+ name: "getOccurrenceCountsSince",
+ baseQuery: `
+ SELECT
+ environment_id,
+ task_identifier,
+ error_fingerprint,
+ sum(count) as occurrences_since
+ FROM trigger_dev.error_occurrences_v1
+ `,
+ schema: OccurrenceCountsSinceQueryResult,
+ settings,
+ });
+}
+
+// ---------------------------------------------------------------------------
+// Alert evaluator helpers – occurrence rate & count since timestamp
+// ---------------------------------------------------------------------------
+
+export const ErrorOccurrenceTotalCountResult = z.object({
+ total_count: z.number(),
+});
+
+export type ErrorOccurrenceTotalCountResult = z.infer;
+
+/**
+ * Query builder for summing occurrences since a given timestamp.
+ * Used by the alert evaluator to check total-count-based ignore thresholds.
+ */
+export function getOccurrenceCountSinceQueryBuilder(
+ ch: ClickhouseReader,
+ settings?: ClickHouseSettings
+) {
+ return ch.queryBuilder({
+ name: "getOccurrenceCountSince",
+ baseQuery: `
+ SELECT
+ sum(count) as total_count
+ FROM trigger_dev.error_occurrences_v1
+ `,
+ schema: ErrorOccurrenceTotalCountResult,
+ settings,
+ });
+}
diff --git a/internal-packages/clickhouse/src/index.ts b/internal-packages/clickhouse/src/index.ts
index 99d22a5a18e..c6b8858fa9c 100644
--- a/internal-packages/clickhouse/src/index.ts
+++ b/internal-packages/clickhouse/src/index.ts
@@ -40,7 +40,11 @@ import {
getErrorHourlyOccurrences,
getErrorOccurrencesListQueryBuilder,
createErrorOccurrencesQueryBuilder,
+ createErrorOccurrencesByVersionQueryBuilder,
getErrorAffectedVersionsQueryBuilder,
+ getOccurrenceCountSinceQueryBuilder,
+ getActiveErrorsSinceQueryBuilder,
+ getOccurrenceCountsSinceQueryBuilder,
} from "./errors.js";
export { msToClickHouseInterval } from "./intervals.js";
import { Logger, type LogLevel } from "@trigger.dev/core/logger";
@@ -273,6 +277,11 @@ export class ClickHouse {
occurrencesListQueryBuilder: getErrorOccurrencesListQueryBuilder(this.reader),
createOccurrencesQueryBuilder: (intervalExpr: string) =>
createErrorOccurrencesQueryBuilder(this.reader, intervalExpr),
+ createOccurrencesByVersionQueryBuilder: (intervalExpr: string) =>
+ createErrorOccurrencesByVersionQueryBuilder(this.reader, intervalExpr),
+ occurrenceCountSinceQueryBuilder: getOccurrenceCountSinceQueryBuilder(this.reader),
+ activeErrorsSinceQueryBuilder: getActiveErrorsSinceQueryBuilder(this.reader),
+ occurrenceCountsSinceQueryBuilder: getOccurrenceCountsSinceQueryBuilder(this.reader),
};
}
}
diff --git a/internal-packages/database/prisma/migrations/20260306102053_error_group_state/migration.sql b/internal-packages/database/prisma/migrations/20260306102053_error_group_state/migration.sql
new file mode 100644
index 00000000000..0510505b6ae
--- /dev/null
+++ b/internal-packages/database/prisma/migrations/20260306102053_error_group_state/migration.sql
@@ -0,0 +1,53 @@
+-- CreateEnum
+CREATE TYPE "public"."ErrorGroupStatus" AS ENUM ('UNRESOLVED', 'RESOLVED', 'IGNORED');
+
+-- AlterEnum
+ALTER TYPE "public"."ProjectAlertType" ADD VALUE IF NOT EXISTS 'ERROR_GROUP';
+
+-- CreateTable
+CREATE TABLE
+ "public"."ErrorGroupState" (
+ "id" TEXT NOT NULL,
+ "organizationId" TEXT NOT NULL,
+ "projectId" TEXT NOT NULL,
+ "environmentId" TEXT,
+ "taskIdentifier" TEXT NOT NULL,
+ "errorFingerprint" TEXT NOT NULL,
+ "status" "public"."ErrorGroupStatus" NOT NULL DEFAULT 'UNRESOLVED',
+ "ignoredUntil" TIMESTAMP(3),
+ "ignoredUntilOccurrenceRate" INTEGER,
+ "ignoredUntilTotalOccurrences" INTEGER,
+ "ignoredAtOccurrenceCount" BIGINT,
+ "ignoredAt" TIMESTAMP(3),
+ "ignoredReason" TEXT,
+ "ignoredByUserId" TEXT,
+ "resolvedAt" TIMESTAMP(3),
+ "resolvedInVersion" TEXT,
+ "resolvedBy" TEXT,
+ "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ "updatedAt" TIMESTAMP(3) NOT NULL,
+ CONSTRAINT "ErrorGroupState_pkey" PRIMARY KEY ("id")
+ );
+
+-- CreateIndex
+CREATE UNIQUE INDEX "ErrorGroupState_environmentId_taskIdentifier_errorFingerpri_key" ON "public"."ErrorGroupState" (
+ "environmentId",
+ "taskIdentifier",
+ "errorFingerprint"
+);
+
+-- CreateIndex
+CREATE INDEX "ErrorGroupState_environmentId_status_idx" ON "public"."ErrorGroupState" ("environmentId", "status");
+
+-- AddForeignKey
+ALTER TABLE "public"."ErrorGroupState" ADD CONSTRAINT "ErrorGroupState_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "public"."Organization" ("id") ON DELETE CASCADE ON UPDATE CASCADE;
+
+-- AddForeignKey
+ALTER TABLE "public"."ErrorGroupState" ADD CONSTRAINT "ErrorGroupState_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "public"."Project" ("id") ON DELETE CASCADE ON UPDATE CASCADE;
+
+-- AddForeignKey
+ALTER TABLE "public"."ErrorGroupState" ADD CONSTRAINT "ErrorGroupState_environmentId_fkey" FOREIGN KEY ("environmentId") REFERENCES "public"."RuntimeEnvironment" ("id") ON DELETE CASCADE ON UPDATE CASCADE;
+
+-- AlterTable
+ALTER TABLE "public"."ProjectAlertChannel"
+ADD COLUMN "errorAlertConfig" JSONB;
\ No newline at end of file
diff --git a/internal-packages/database/prisma/schema.prisma b/internal-packages/database/prisma/schema.prisma
index b60dcd7c9b0..8ae1e749e12 100644
--- a/internal-packages/database/prisma/schema.prisma
+++ b/internal-packages/database/prisma/schema.prisma
@@ -61,11 +61,10 @@ model User {
backupCodes MfaBackupCode[]
bulkActions BulkActionGroup[]
- impersonationsPerformed ImpersonationAuditLog[] @relation("ImpersonationAdmin")
- impersonationsReceived ImpersonationAuditLog[] @relation("ImpersonationTarget")
- customerQueries CustomerQuery[]
- metricsDashboards MetricsDashboard[]
-
+ impersonationsPerformed ImpersonationAuditLog[] @relation("ImpersonationAdmin")
+ impersonationsReceived ImpersonationAuditLog[] @relation("ImpersonationTarget")
+ customerQueries CustomerQuery[]
+ metricsDashboards MetricsDashboard[]
platformNotifications PlatformNotification[]
platformNotificationInteractions PlatformNotificationInteraction[]
}
@@ -233,7 +232,8 @@ model Organization {
metricsDashboards MetricsDashboard[]
prompts Prompt[]
- platformNotifications PlatformNotification[]
+ platformNotifications PlatformNotification[]
+ errorGroupStates ErrorGroupState[]
}
model OrgMember {
@@ -353,6 +353,7 @@ model RuntimeEnvironment {
BulkActionGroup BulkActionGroup[]
customerQueries CustomerQuery[]
prompts Prompt[]
+ errorGroupStates ErrorGroupState[]
@@unique([projectId, slug, orgMemberId])
@@unique([projectId, shortcode])
@@ -426,8 +427,8 @@ model Project {
metricsDashboards MetricsDashboard[]
llmModels LlmModel[]
prompts Prompt[]
-
platformNotifications PlatformNotification[]
+ errorGroupStates ErrorGroupState[]
}
enum ProjectVersion {
@@ -2102,6 +2103,8 @@ model ProjectAlertChannel {
alertTypes ProjectAlertType[]
environmentTypes RuntimeEnvironmentType[] @default([STAGING, PRODUCTION])
+ errorAlertConfig Json?
+
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade, onUpdate: Cascade)
projectId String
@@ -2156,6 +2159,7 @@ enum ProjectAlertType {
TASK_RUN_ATTEMPT
DEPLOYMENT_FAILURE
DEPLOYMENT_SUCCESS
+ ERROR_GROUP
}
enum ProjectAlertStatus {
@@ -2825,3 +2829,83 @@ model PlatformNotificationInteraction {
@@unique([notificationId, userId])
}
+
+enum ErrorGroupStatus {
+ UNRESOLVED
+ RESOLVED
+ IGNORED
+}
+
+/**
+ * Error group state is used to track when a user has interacted with an error (ignored/resolved)
+ * The actual error data is in ClickHouse.
+ */
+model ErrorGroupState {
+ id String @id @default(cuid())
+
+ organization Organization @relation(fields: [organizationId], references: [id], onDelete: Cascade, onUpdate: Cascade)
+ organizationId String
+
+ project Project @relation(fields: [projectId], references: [id], onDelete: Cascade, onUpdate: Cascade)
+ projectId String
+
+ /**
+ * You can ignore/resolve an error across all environments, or specific ones
+ */
+ environment RuntimeEnvironment? @relation(fields: [environmentId], references: [id], onDelete: Cascade, onUpdate: Cascade)
+ environmentId String?
+
+ taskIdentifier String
+ errorFingerprint String
+
+ status ErrorGroupStatus @default(UNRESOLVED)
+
+ /**
+ * Error is ignored until this date
+ */
+ ignoredUntil DateTime?
+ /**
+ * Error is ignored until this occurrence rate
+ */
+ ignoredUntilOccurrenceRate Int?
+ /**
+ * Error is ignored until this total occurrences
+ */
+ ignoredUntilTotalOccurrences Int?
+
+ /// Total occurrence count at the time the error was ignored (from ClickHouse).
+ /// Used with ignoredUntilTotalOccurrences to compute occurrences since ignoring.
+ ignoredAtOccurrenceCount BigInt?
+
+ /**
+ * Error was ignored at this date
+ */
+ ignoredAt DateTime?
+ /**
+ * Reason for ignoring the error
+ */
+ ignoredReason String?
+ /**
+ * User who ignored the error
+ */
+ ignoredByUserId String?
+
+ /**
+ * Error was resolved at this date
+ */
+ resolvedAt DateTime?
+ /**
+ * Error was resolved in this version
+ */
+ resolvedInVersion String?
+ /**
+ * User who resolved the error
+ */
+ resolvedBy String?
+
+ createdAt DateTime @default(now())
+ updatedAt DateTime @updatedAt
+
+ @@unique([environmentId, taskIdentifier, errorFingerprint])
+ @@index([environmentId, status])
+}
diff --git a/internal-packages/emails/emails/alert-error-group.tsx b/internal-packages/emails/emails/alert-error-group.tsx
new file mode 100644
index 00000000000..f584f06edba
--- /dev/null
+++ b/internal-packages/emails/emails/alert-error-group.tsx
@@ -0,0 +1,114 @@
+import {
+ Body,
+ CodeBlock,
+ Container,
+ Head,
+ Html,
+ Link,
+ Preview,
+ Text,
+ dracula,
+} from "@react-email/components";
+import { z } from "zod";
+import { Footer } from "./components/Footer";
+import { Image } from "./components/Image";
+import { anchor, container, h1, main, paragraphLight, paragraphTight } from "./components/styles";
+import React from "react";
+
+export const AlertErrorGroupEmailSchema = z.object({
+ email: z.literal("alert-error-group"),
+ classification: z.enum(["new_issue", "regression", "unignored"]),
+ taskIdentifier: z.string(),
+ environment: z.string(),
+ error: z.object({
+ message: z.string(),
+ type: z.string().optional(),
+ stackTrace: z.string().optional(),
+ }),
+ occurrenceCount: z.number(),
+ errorLink: z.string().url(),
+ organization: z.string(),
+ project: z.string(),
+});
+
+type AlertErrorGroupEmailProps = z.infer;
+
+const classificationLabels: Record = {
+ new_issue: "New error",
+ regression: "Regression",
+ unignored: "Error resurfaced",
+};
+
+const previewDefaults: AlertErrorGroupEmailProps = {
+ email: "alert-error-group",
+ classification: "new_issue",
+ taskIdentifier: "my-task",
+ environment: "Production",
+ error: {
+ message: "Cannot read property 'foo' of undefined",
+ type: "TypeError",
+ stackTrace: "TypeError: Cannot read property 'foo' of undefined\n at Object.",
+ },
+ occurrenceCount: 42,
+ errorLink: "https://trigger.dev",
+ organization: "my-organization",
+ project: "my-project",
+};
+
+export default function Email(props: AlertErrorGroupEmailProps) {
+ const {
+ classification,
+ taskIdentifier,
+ environment,
+ error,
+ occurrenceCount,
+ errorLink,
+ organization,
+ project,
+ } = {
+ ...previewDefaults,
+ ...props,
+ };
+
+ const label = classificationLabels[classification] ?? "Error alert";
+
+ return (
+
+
+
+ {`${organization}: [${label}] ${error.type ?? "Error"} in ${taskIdentifier} (${environment})`}
+
+
+
+
+ {label}: {error.type ?? "Error"} in {taskIdentifier}
+
+ Organization: {organization}
+ Project: {project}
+ Task: {taskIdentifier}
+ Environment: {environment}
+ Occurrences: {occurrenceCount}
+
+ {error.message}
+ {error.stackTrace && (
+
+ )}
+
+ Investigate this error
+
+
+
+
+
+
+
+ );
+}
diff --git a/internal-packages/emails/src/index.tsx b/internal-packages/emails/src/index.tsx
index e43e60f3f4c..a1bd00d03cf 100644
--- a/internal-packages/emails/src/index.tsx
+++ b/internal-packages/emails/src/index.tsx
@@ -2,6 +2,9 @@ import { ReactElement } from "react";
import { z } from "zod";
import AlertAttemptFailureEmail, { AlertAttemptEmailSchema } from "../emails/alert-attempt-failure";
+import AlertErrorGroupEmail, {
+ AlertErrorGroupEmailSchema,
+} from "../emails/alert-error-group";
import AlertRunFailureEmail, { AlertRunEmailSchema } from "../emails/alert-run-failure";
import { setGlobalBasePath } from "../emails/components/BasePath";
import AlertDeploymentFailureEmail, {
@@ -31,6 +34,7 @@ export const DeliverEmailSchema = z
InviteEmailSchema,
AlertRunEmailSchema,
AlertAttemptEmailSchema,
+ AlertErrorGroupEmailSchema,
AlertDeploymentFailureEmailSchema,
AlertDeploymentSuccessEmailSchema,
MfaEnabledEmailSchema,
@@ -114,6 +118,18 @@ export class EmailClient {
component: ,
};
}
+ case "alert-error-group": {
+ const classLabel =
+ data.classification === "new_issue"
+ ? "New error"
+ : data.classification === "regression"
+ ? "Regression"
+ : "Error resurfaced";
+ return {
+ subject: `[${data.organization}] ${classLabel}: ${data.error.type ?? "Error"} in ${data.taskIdentifier} [${data.environment}]`,
+ component: ,
+ };
+ }
case "alert-deployment-failure": {
return {
subject: `[${data.organization}] Deployment ${data.version} [${data.environment}] failed: ${data.error.name}`,
diff --git a/packages/core/src/v3/schemas/webhooks.ts b/packages/core/src/v3/schemas/webhooks.ts
index 047ea98c4b3..b5ed927602e 100644
--- a/packages/core/src/v3/schemas/webhooks.ts
+++ b/packages/core/src/v3/schemas/webhooks.ts
@@ -190,6 +190,62 @@ export type AlertWebhookDeploymentSuccessObject = z.infer<
>;
export type AlertWebhookDeploymentFailedObject = z.infer;
+/** Represents an error group alert webhook payload */
+export const AlertWebhookErrorGroupObject = z.object({
+ /** Classification of the error alert */
+ classification: z.enum(["new_issue", "regression", "unignored"]),
+ /** Error information */
+ error: z.object({
+ /** Error fingerprint identifier */
+ fingerprint: z.string(),
+ /** Error type */
+ type: z.string(),
+ /** Error message */
+ message: z.string(),
+ /** Sample stack trace */
+ stackTrace: z.string().optional(),
+ /** When the error was first seen */
+ firstSeen: z.coerce.date(),
+ /** When the error was last seen */
+ lastSeen: z.coerce.date(),
+ /** Number of occurrences */
+ occurrenceCount: z.number(),
+ /** Task identifier where the error occurred */
+ taskIdentifier: z.string(),
+ }),
+ /** Environment information */
+ environment: z.object({
+ /** Environment ID */
+ id: z.string(),
+ /** Environment name */
+ name: z.string(),
+ }),
+ /** Organization information */
+ organization: z.object({
+ /** Organization ID */
+ id: z.string(),
+ /** Organization slug */
+ slug: z.string(),
+ /** Organization name */
+ name: z.string(),
+ }),
+ /** Project information */
+ project: z.object({
+ /** Project ID */
+ id: z.string(),
+ /** Project reference */
+ ref: z.string(),
+ /** Project slug */
+ slug: z.string(),
+ /** Project name */
+ name: z.string(),
+ }),
+ /** URL to view the error in the dashboard */
+ dashboardUrl: z.string(),
+});
+
+export type AlertWebhookErrorGroupObject = z.infer;
+
/** Common properties for all webhooks */
const commonProperties = {
/** Webhook ID */
@@ -220,9 +276,16 @@ export const Webhook = z.discriminatedUnion("type", [
type: z.literal("alert.deployment.failed"),
object: AlertWebhookDeploymentFailedObject,
}),
+ /** Error group alert webhook */
+ z.object({
+ ...commonProperties,
+ type: z.literal("alert.error"),
+ object: AlertWebhookErrorGroupObject,
+ }),
]);
export type Webhook = z.infer;
export type RunFailedWebhook = Extract;
export type DeploymentSuccessWebhook = Extract;
export type DeploymentFailedWebhook = Extract;
+export type ErrorWebhook = Extract;