Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions apps/gateway/src/common/tracer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,11 @@ tracer.init({
apmTracingEnabled: process.env.NODE_ENV === 'production',
service: 'latitude-llm-gateway',
env: process.env.NODE_ENV,
blocklist: []
})

tracer.use('http', {
blocklist: [/AI_NoOutputGeneratedError/]
})

export default tracer
Expand Down
9 changes: 0 additions & 9 deletions apps/workers/docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -49,18 +49,9 @@ RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install \
COPY --from=pruner /app/out/full/ .

RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
--mount=type=secret,id=SENTRY_AUTH_TOKEN \
BUILDING_CONTAINER=true \
SENTRY_AUTH_TOKEN="$(cat /run/secrets/SENTRY_AUTH_TOKEN 2>/dev/null || echo '')" \
pnpm turbo build --filter="${PROJECT}..."

# Run sentry:sourcemaps if SENTRY_AUTH_TOKEN secret is present and not empty
RUN --mount=type=secret,id=SENTRY_AUTH_TOKEN \
if [ -s /run/secrets/SENTRY_AUTH_TOKEN ]; then \
cd ${PROJECT_PATH} && \
SENTRY_AUTH_TOKEN="$(cat /run/secrets/SENTRY_AUTH_TOKEN)" pnpm sentry:sourcemaps; \
fi

# Since `pnpm prune` doesn't handle recursive dependencies effectively,
# we follow pnpm's recommended approach: remove node_modules entirely
# and perform a fresh production install with --frozen-lockfile
Expand Down
5 changes: 1 addition & 4 deletions apps/workers/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,17 +10,14 @@
"dev:debug": "tsx watch --inspect-brk src/server",
"lint": "eslint src/",
"start": "node -r module-alias/register ./dist --env=production",
"tc": "tsc --noEmit",
"sentry:sourcemaps": "sentry-cli sourcemaps inject --org latitude-l5 --project latitude-workers ./dist && sentry-cli sourcemaps upload --org latitude-l5 --project latitude-workers ./dist"
"tc": "tsc --noEmit"
},
"dependencies": {
"@bull-board/api": "6.10.1",
"@bull-board/express": "6.10.1",
"@latitude-data/core": "workspace:^",
"@latitude-data/env": "workspace:^",
"@latitude-data/telemetry": "workspace:*",
"@sentry/cli": "2.37.0",
"@sentry/node": "9.9.0",
"@t3-oss/env-core": "*",
"bullmq": "5.44.4",
"dd-trace": "catalog:",
Expand Down
11 changes: 0 additions & 11 deletions apps/workers/src/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,6 @@ import { createBullBoard } from '@bull-board/api'
import { BullMQAdapter } from '@bull-board/api/bullMQAdapter.js'
import { ExpressAdapter } from '@bull-board/express'

import {
captureException,
captureMessage,
} from '@latitude-data/core/utils/workers/sentry'
import { startWorkers } from './workers'
import { setupSchedules } from './workers/schedule'
import { env } from '@latitude-data/env'
Expand Down Expand Up @@ -94,10 +90,3 @@ const gracefulShutdown = async (signal: string) => {
process.on('SIGINT', () => gracefulShutdown('SIGINT'))
process.on('SIGTERM', () => gracefulShutdown('SIGTERM'))

process.on('uncaughtException', function (err) {
captureException(err)
})

process.on('unhandledRejection', (reason: string) => {
captureMessage(reason)
})
5 changes: 0 additions & 5 deletions apps/workers/src/workers/utils/createWorker.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import { Queues } from '@latitude-data/core/queues/types'
import { REDIS_KEY_PREFIX } from '@latitude-data/core/redis'
import { captureException } from '@latitude-data/core/utils/workers/sentry'
import { Worker, WorkerOptions } from 'bullmq'
import { WORKER_OPTIONS } from './connectionConfig'
import { createJobHandler } from './createJobHandler'
Expand All @@ -22,10 +21,6 @@ export function createWorker<T extends Record<string, Function>>(
prefix: REDIS_KEY_PREFIX,
})

worker.on('error', (error: Error) => {
captureException(error)
})

// Register worker with job tracker for scale-in protection
jobTracker.registerWorker(worker)

Expand Down
1 change: 0 additions & 1 deletion docs/guides/self-hosted/production-setup.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,6 @@ If using [Third-Party Integrations via MCP](/guides/integration/mcp-integrations
- **Logging**: Configure Docker log drivers or orchestrator logging to aggregate logs from all services (web, gateway, worker, etc.).
- **Monitoring**: Use tools like Prometheus/Grafana, Datadog, or cloud provider monitoring services to track resource usage (CPU, memory), error rates, request latency, and queue lengths.
- **Health Checks**: Configure health check endpoints in your load balancer or orchestrator to monitor service availability.
- **Optional Integrations**: Configure Sentry (`SENTRY_DSN`) or PostHog in your `.env` for enhanced error tracking and analytics.

## Next Steps

Expand Down
3 changes: 1 addition & 2 deletions docs/guides/self-hosted/production.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ Make sure this network is created before running the containers with `docker com
- `DISABLE_EMAIL_AUTHENTICATION`: Disable email authentication (optional, default: `false`)

If `MAIL_TRANSPORT` is set to `smtp`, you must provide the following environment variables:

- `SMTP_HOST`: The SMTP server host (e.g., `smtp.gmail.com`)
- `SMTP_PORT`: The SMTP port number (e.g., `465` for SMTPS or `587` for STARTTLS)
- `SMTP_SECURE`: Set to `true` for SMTPS (`465`) or `false` for STARTTLS (`587`)
Expand Down Expand Up @@ -157,7 +157,6 @@ Make sure this network is created before running the containers with `docker com
AWS SDK automatically handles credentials from attached IAM roles.

- **Optional Features**:
- Sentry integration for error tracking
- PostHog for analytics

## Starting the Services
Expand Down
10 changes: 1 addition & 9 deletions packages/core/src/events/handlers/undeployDocumentTriggerJob.ts
Original file line number Diff line number Diff line change
@@ -1,20 +1,12 @@
import { getPipedreamClient } from '../../services/integrations/pipedream/apps'
import { EventHandler } from '../events'
import { DocumentTriggerUndeployRequestedEvent } from '../events'
import { captureException } from '../../utils/workers/sentry'

export const undeployDocumentTriggerJob: EventHandler<
DocumentTriggerUndeployRequestedEvent
> = async ({ data: event }) => {
const { triggerId, externalUserId } = event.data
const pipedream = getPipedreamClient().unwrap()

try {
await pipedream.deployedTriggers.delete(triggerId, {
externalUserId,
})
} catch (error) {
captureException(error as Error)
throw error
}
await pipedream.deployedTriggers.delete(triggerId, { externalUserId })
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import {
EvaluationsV2Repository,
} from '../../../repositories'
import { generateDocumentSuggestion } from '../../../services/documentSuggestions'
import { captureException } from '../../../utils/workers/sentry'

export type GenerateDocumentSuggestionJobData = {
workspaceId: number
Expand Down Expand Up @@ -67,7 +66,7 @@ export const generateDocumentSuggestionJob = async (
})
if (result.error) {
if (result.error instanceof UnprocessableEntityError) {
captureException(result.error)
// Ignore
} else throw result.error
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ import {
runEvaluationV2,
} from '../../../services/evaluationsV2/run'
import serializeProviderLog from '../../../services/providerLogs/serialize'
import { captureException } from '../../../utils/workers/sentry'
import { updateExperimentStatus } from '../experiments/shared'

export type RunEvaluationV2JobData = {
Expand Down Expand Up @@ -133,8 +132,6 @@ export const runEvaluationV2Job = async (job: Job<RunEvaluationV2JobData>) => {
} catch (error) {
if (isErrorRetryable(error as Error)) throw error

captureException(error as Error)

if (experiment) {
await updateExperimentStatus(
{ workspaceId, experiment },
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import { NotFoundError } from '../../../lib/errors'
import { ExperimentsRepository } from '../../../repositories'
import { isErrorRetryable } from '../../../services/evaluationsV2/run'
import { BACKGROUND } from '../../../telemetry'
import { captureException } from '../../../utils/workers/sentry'
import { queues } from '../../queues'
import { runDocumentAtCommitWithAutoToolResponses } from '../documents/runDocumentAtCommitWithAutoToolResponses'
import {
Expand Down Expand Up @@ -89,8 +88,6 @@ export const runDocumentForExperimentJob = async (
} catch (error) {
if (isErrorRetryable(error as Error)) throw error

captureException(error as Error)

await updateExperimentStatus(
{
workspaceId,
Expand Down
19 changes: 2 additions & 17 deletions packages/core/src/jobs/job-definitions/tracing/ingestSpansJob.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,7 @@ import {
SpanIngestionData,
} from '../../../constants'
import { diskFactory } from '../../../lib/disk'
import { UnprocessableEntityError } from '../../../lib/errors'
import { ingestSpans } from '../../../services/tracing/spans/ingest'
import { captureException } from '../../../utils/workers/sentry'

export type IngestSpansJobData = {
ingestionId: string
Expand All @@ -28,23 +26,10 @@ export const ingestSpansJob = async (job: Job<IngestSpansJobData>) => {
const payload = await disk.get(key)
data = JSON.parse(payload) as SpanIngestionData
} catch (error) {
captureException(error as Error)
return
}
const { spans } = data

const result = await ingestSpans({ spans, apiKeyId, workspaceId })
if (result.error) {
// @ts-expect-error ingestSpans currently ignores all errors but leaving this for the future
if (result.error instanceof UnprocessableEntityError) {
captureException(result.error)
} else throw result.error
}

try {
await disk.delete(key)
} catch (error) {
captureException(error as Error)
return
}
await ingestSpans({ spans, apiKeyId, workspaceId })
await disk.delete(key)
}
4 changes: 2 additions & 2 deletions packages/core/src/services/copilot/latte/addMessage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ import {
import { ErrorResult, Result } from '../../../lib/Result'
import { PromisedResult } from '../../../lib/Transaction'
import { BACKGROUND, TelemetryContext } from '../../../telemetry'
import { captureException } from '../../../utils/workers/sentry'
import { WebsocketClient } from '../../../websockets/workers'
import { runDocumentAtCommit } from '../../commits'
import { addMessages } from '../../documentLogs/addMessages/index'
Expand Down Expand Up @@ -164,7 +163,8 @@ async function generateLatteResponse(args: GenerateLatteResponseArgs) {
}) // Note: failing silently

if (consuming.error) {
captureException(consuming.error)
// FIXME: Add captureExeption
console.log(consuming.error)
}

if (error && !isAbortError(error)) {
Expand Down
4 changes: 2 additions & 2 deletions packages/core/src/services/copilot/latte/credits/consume.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import { cache as getCache } from '../../../../cache'
import { Result } from '../../../../lib/Result'
import Transaction from '../../../../lib/Transaction'
import { latteRequests } from '../../../../schema/models/latteRequests'
import { captureException } from '../../../../utils/workers/sentry'
import { WebsocketClient } from '../../../../websockets/workers'
import { computeLatteCredits } from './compute'
import { usageLatteCredits } from './usage'
Expand Down Expand Up @@ -66,7 +65,8 @@ export async function consumeLatteCredits(
const key = LATTE_USAGE_CACHE_KEY(workspace.id)
await cache.del(key)
} catch (error) {
captureException(error as Error) // Note: failing silently
// FIXME: Add captureExeption
console.log(error as Error) // Note: failing silently
}

const counting = await usageLatteCredits({ workspace, fresh: true })
Expand Down
4 changes: 2 additions & 2 deletions packages/core/src/services/copilot/latte/credits/usage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import {
import { Result } from '../../../../lib/Result'
import { LatteRequestsRepository } from '../../../../repositories'
import { Workspace } from '../../../../schema/types'
import { captureException } from '../../../../utils/workers/sentry'
import { computeQuota } from '../../../grants/quota'
import { findWorkspaceSubscription } from '../../../subscriptions/data-access/find'

Expand Down Expand Up @@ -69,7 +68,8 @@ export async function usageLatteCredits(
const item = JSON.stringify(usage)
await cache.set(key, item, 'EX', LATTE_USAGE_CACHE_TTL)
} catch (error) {
captureException(error as Error) // Note: failing silently
// FIXME: Add captureExeption
console.log(error as Error) // Note: failing silently
}

return Result.ok(usage)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ import {
import { reloadComponentProps } from '../../../../integrations/pipedream/components/reloadComponentProps'
import { LatitudeError } from '@latitude-data/constants/errors'
import { PipedreamIntegrationConfiguration } from '../../../../integrations/helpers/schema'
import { captureMessage } from '../../../../../utils/workers/sentry'

export async function validateLattesChoices({
pipedream,
Expand Down Expand Up @@ -242,7 +241,8 @@ export function isValidPropType(propType: string, value: any): boolean {

default:
// For unknown types, don't be permissive and capture a warning message
captureMessage(`Unknown Pipedream prop type: ${propType}`, 'warning')
// FIXME: Replace captureException
console.log(`Unknown Pipedream prop type: ${propType}`, 'warning')
return false
}
}
Expand Down
11 changes: 3 additions & 8 deletions packages/core/src/services/grants/issue.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ import { Result } from '../../lib/Result'
import Transaction from '../../lib/Transaction'
import { grants } from '../../schema/models/grants'
import { Workspace } from '../../schema/types'
import { captureException } from '../../utils/workers/sentry'
import { findWorkspaceSubscription } from '../subscriptions/data-access/find'
import { validateGrant } from './validate'

Expand Down Expand Up @@ -76,13 +75,9 @@ export async function issueGrant(
} as Grant

if (type === QuotaType.Credits) {
try {
const cache = await getCache()
const key = LATTE_USAGE_CACHE_KEY(workspace.id)
await cache.del(key)
} catch (error) {
captureException(error as Error) // Note: failing silently
}
const cache = await getCache()
const key = LATTE_USAGE_CACHE_KEY(workspace.id)
await cache.del(key)
}

// TODO - runs dont update automatically when granted an issue
Expand Down
6 changes: 0 additions & 6 deletions packages/core/src/services/tracing/spans/ingest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ import {
} from '../../../constants'
import { Result } from '../../../lib/Result'
import { ApiKey, Workspace } from '../../../schema/types'
import { captureException } from '../../../utils/workers/sentry'
import {
convertSpanAttributes,
convertSpanStatus,
Expand Down Expand Up @@ -50,14 +49,12 @@ export async function ingestSpans(
for (const span of spans) {
const converting = convertSpanAttributes(span.attributes || [])
if (converting.error) {
captureException(converting.error)
continue
}
const attributes = converting.value

const extracting = extractSpanType(attributes)
if (extracting.error) {
captureException(extracting.error)
continue
}
const type = extracting.value
Expand All @@ -68,7 +65,6 @@ export async function ingestSpans(
db,
)
if (extractingApiKeyAndWorkspace.error) {
captureException(extractingApiKeyAndWorkspace.error)
continue
}
const { apiKey, workspace } = extractingApiKeyAndWorkspace.value
Expand All @@ -79,7 +75,6 @@ export async function ingestSpans(

const enriching = enrichAttributes({ resource, scope, span })
if (enriching.error) {
captureException(enriching.error)
continue
}
span.attributes = enriching.value.filter(
Expand Down Expand Up @@ -119,7 +114,6 @@ export async function ingestSpans(

const processing = await processSpansBulk({ spans, apiKey, workspace })
if (processing.error) {
captureException(processing.error)
continue
}
}
Expand Down
Loading
Loading