diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index bba0b0d3e3af6..75fec2640b820 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -19,6 +19,6 @@ /apps/studio/components/interfaces/Organization/Documents/ @supabase/security /apps/studio/pages/new/index.tsx @supabase/security -/apps/studio/data/sql/queries/ @supabase/postgres @avallete +/apps/studio/data/**/*.sql.ts @supabase/postgres @avallete /packages/shared-data/compute-disk-limits.ts @supabase/infra diff --git a/apps/studio/components/interfaces/Integrations/CronJobs/CronJobsTab.CleanupNotice.tsx b/apps/studio/components/interfaces/Integrations/CronJobs/CronJobsTab.CleanupNotice.tsx index cb31fc6d2e02e..2f59734a55860 100644 --- a/apps/studio/components/interfaces/Integrations/CronJobs/CronJobsTab.CleanupNotice.tsx +++ b/apps/studio/components/interfaces/Integrations/CronJobs/CronJobsTab.CleanupNotice.tsx @@ -1,4 +1,4 @@ -import { getScheduleDeleteCronJobRunDetailsSql } from 'data/sql/queries/delete-cron-job-run-details' +import { getScheduleDeleteCronJobRunDetailsSql } from 'data/database-cron-jobs/database-cron-jobs.sql' import { CheckCircle2, XCircle } from 'lucide-react' import { Button, diff --git a/apps/studio/components/interfaces/Integrations/CronJobs/CronJobsTab.useCleanupActions.ts b/apps/studio/components/interfaces/Integrations/CronJobs/CronJobsTab.useCleanupActions.ts index 9800370904a33..037dd747a7f4d 100644 --- a/apps/studio/components/interfaces/Integrations/CronJobs/CronJobsTab.useCleanupActions.ts +++ b/apps/studio/components/interfaces/Integrations/CronJobs/CronJobsTab.useCleanupActions.ts @@ -1,11 +1,11 @@ -import { useExecuteSqlMutation } from 'data/sql/execute-sql-mutation' import { CTID_BATCH_PAGE_SIZE, getDeleteOldCronJobRunDetailsByCtidKey, getDeleteOldCronJobRunDetailsByCtidSql, getJobRunDetailsPageCountKey, getJobRunDetailsPageCountSql, -} from 'data/sql/queries/delete-cron-job-run-details' +} from 'data/database-cron-jobs/database-cron-jobs.sql' +import { useExecuteSqlMutation } from 'data/sql/execute-sql-mutation' import { useCallback, useRef, useState } from 'react' import { toast } from 'sonner' diff --git a/apps/studio/components/interfaces/Realtime/RealtimeSettings.tsx b/apps/studio/components/interfaces/Realtime/RealtimeSettings.tsx index f235598abe9ca..a725664e6f352 100644 --- a/apps/studio/components/interfaces/Realtime/RealtimeSettings.tsx +++ b/apps/studio/components/interfaces/Realtime/RealtimeSettings.tsx @@ -20,6 +20,7 @@ import { import { useAsyncCheckPermissions } from 'hooks/misc/useCheckPermissions' import { useSelectedOrganizationQuery } from 'hooks/misc/useSelectedOrganization' import { useSelectedProjectQuery } from 'hooks/misc/useSelectedProject' +import { useCheckEntitlements } from 'hooks/misc/useCheckEntitlements' import { Button, Card, @@ -89,15 +90,38 @@ export const RealtimeSettings = () => { }, }) + const { getEntitlementMax: getEntitledMaxPayloadSize } = useCheckEntitlements( + 'realtime.max_payload_size_in_kb' + ) + const entitledMaxPayloadSize = getEntitledMaxPayloadSize() ?? 3000 + + const { getEntitlementMax: getEntitledMaxConcurrentUsers } = useCheckEntitlements( + 'realtime.max_concurrent_users' + ) + const entitledMaxConcurrentUsers = getEntitledMaxConcurrentUsers() ?? 50000 + + const { getEntitlementMax: getEntitledMaxEventsPerSecond } = useCheckEntitlements( + 'realtime.max_events_per_second' + ) + const entitledMaxEventsPerSecond = getEntitledMaxEventsPerSecond() ?? 10000 + + const { getEntitlementMax: getEntitledMaxPresenceEventsPerSecond } = useCheckEntitlements( + 'realtime.max_presence_events_per_second' + ) + const entitledMaxPresenceEventsPerSecond = getEntitledMaxPresenceEventsPerSecond() ?? 10000 + const FormSchema = z.object({ connection_pool: z.coerce .number() .min(1) .max(maxConn?.maxConnections ?? 100), - max_concurrent_users: z.coerce.number().min(1).max(50000), - max_events_per_second: z.coerce.number().min(1).max(10000), - max_presence_events_per_second: z.coerce.number().min(1).max(10000), - max_payload_size_in_kb: z.coerce.number().min(1).max(3000), + max_concurrent_users: z.coerce.number().min(1).max(entitledMaxConcurrentUsers), + max_events_per_second: z.coerce.number().min(1).max(entitledMaxEventsPerSecond), + max_presence_events_per_second: z.coerce + .number() + .min(1) + .max(entitledMaxPresenceEventsPerSecond), + max_payload_size_in_kb: z.coerce.number().min(1).max(entitledMaxPayloadSize), suspend: z.boolean(), // [Joshen] These fields are temporarily hidden from the UI // max_bytes_per_second: z.coerce.number().min(1).max(10000000), diff --git a/apps/studio/components/interfaces/Storage/StorageSettings/StorageFileSizeLimitErrorMessage.tsx b/apps/studio/components/interfaces/Storage/StorageSettings/StorageFileSizeLimitErrorMessage.tsx index 9a3f9e8c315a2..7917e07f552b0 100644 --- a/apps/studio/components/interfaces/Storage/StorageSettings/StorageFileSizeLimitErrorMessage.tsx +++ b/apps/studio/components/interfaces/Storage/StorageSettings/StorageFileSizeLimitErrorMessage.tsx @@ -1,9 +1,9 @@ +import { InlineLink } from 'components/ui/InlineLink' +import { LARGEST_SIZE_LIMIT_BUCKETS_COUNT } from 'data/storage/storage.sql' import Link from 'next/link' import { type FieldError } from 'react-hook-form' - -import { InlineLink } from 'components/ui/InlineLink' -import { LARGEST_SIZE_LIMIT_BUCKETS_COUNT } from 'data/sql/queries/get-largest-size-limit-buckets' import { cn, Tooltip, TooltipContent, TooltipTrigger } from 'ui' + import { decodeBucketLimitErrorMessage, formatBytesForDisplay, diff --git a/apps/studio/data/sql/queries/get-user.ts b/apps/studio/data/auth/auth.sql.ts similarity index 100% rename from apps/studio/data/sql/queries/get-user.ts rename to apps/studio/data/auth/auth.sql.ts diff --git a/apps/studio/data/auth/user-query.ts b/apps/studio/data/auth/user-query.ts index 40620107f4b2d..5bd5b0dcd7d14 100644 --- a/apps/studio/data/auth/user-query.ts +++ b/apps/studio/data/auth/user-query.ts @@ -1,11 +1,11 @@ import { useQuery } from '@tanstack/react-query' - -import { UUID_REGEX } from '@/lib/constants' import { executeSql, type ExecuteSqlError } from 'data/sql/execute-sql-query' -import { getUserSQL } from 'data/sql/queries/get-user' import { UseCustomQueryOptions } from 'types' + +import { getUserSQL } from './auth.sql' import { authKeys } from './keys' import { User } from './users-infinite-query' +import { UUID_REGEX } from '@/lib/constants' type UserVariables = { projectRef?: string diff --git a/apps/studio/data/database-cron-jobs/database-cron-jobs-count-estimate-query.ts b/apps/studio/data/database-cron-jobs/database-cron-jobs-count-estimate-query.ts index cf3686d0b8d10..8f645bdff60f1 100644 --- a/apps/studio/data/database-cron-jobs/database-cron-jobs-count-estimate-query.ts +++ b/apps/studio/data/database-cron-jobs/database-cron-jobs-count-estimate-query.ts @@ -1,11 +1,7 @@ import { useQuery } from '@tanstack/react-query' - import type { ConnectionVars } from 'data/common.types' +import { getLiveTupleEstimate, getLiveTupleEstimateKey } from 'data/database/database.sql' import { executeSql } from 'data/sql/execute-sql-query' -import { - getLiveTupleEstimate, - getLiveTupleEstimateKey, -} from 'data/sql/queries/get-live-tuple-stats' import type { UseCustomQueryOptions } from 'types' type DatabaseCronJobsCountEstimateVariables = ConnectionVars diff --git a/apps/studio/data/database-cron-jobs/database-cron-jobs-infinite-query.ts b/apps/studio/data/database-cron-jobs/database-cron-jobs-infinite-query.ts index 84cd0ee9c33d9..bf3eb3c2fa51e 100644 --- a/apps/studio/data/database-cron-jobs/database-cron-jobs-infinite-query.ts +++ b/apps/studio/data/database-cron-jobs/database-cron-jobs-infinite-query.ts @@ -2,7 +2,7 @@ import { InfiniteData, useInfiniteQuery } from '@tanstack/react-query' import { COST_THRESHOLD_ERROR, executeSql } from 'data/sql/execute-sql-query' import type { ResponseError, UseCustomInfiniteQueryOptions } from 'types' -import { getCronJobsSql } from '../sql/queries/get-cron-jobs' +import { getCronJobsSql } from './database-cron-jobs.sql' import { databaseCronJobsKeys } from './keys' export const CRON_JOBS_PAGE_LIMIT = 20 diff --git a/apps/studio/data/database-cron-jobs/database-cron-jobs-minimal-infinite-query.ts b/apps/studio/data/database-cron-jobs/database-cron-jobs-minimal-infinite-query.ts index 26106ae581a81..b833564d2e06f 100644 --- a/apps/studio/data/database-cron-jobs/database-cron-jobs-minimal-infinite-query.ts +++ b/apps/studio/data/database-cron-jobs/database-cron-jobs-minimal-infinite-query.ts @@ -2,12 +2,12 @@ import { InfiniteData, useInfiniteQuery } from '@tanstack/react-query' import { executeSql } from 'data/sql/execute-sql-query' import type { ResponseError, UseCustomInfiniteQueryOptions } from 'types' -import { getCronJobsMinimalSql } from '../sql/queries/get-cron-jobs' import { CRON_JOBS_PAGE_LIMIT, CronJob, DatabaseCronJobRunsVariables, } from './database-cron-jobs-infinite-query' +import { getCronJobsMinimalSql } from './database-cron-jobs.sql' import { databaseCronJobsKeys } from './keys' export async function getDatabaseCronJobsMinimal({ diff --git a/apps/studio/data/sql/queries/delete-cron-job-run-details.ts b/apps/studio/data/database-cron-jobs/database-cron-jobs.sql.ts similarity index 70% rename from apps/studio/data/sql/queries/delete-cron-job-run-details.ts rename to apps/studio/data/database-cron-jobs/database-cron-jobs.sql.ts index 8fb28e27c59fd..5c88e0cef2e2b 100644 --- a/apps/studio/data/sql/queries/delete-cron-job-run-details.ts +++ b/apps/studio/data/database-cron-jobs/database-cron-jobs.sql.ts @@ -1,6 +1,6 @@ import { literal } from '@supabase/pg-meta/src/pg-format' -import { sqlKeys } from '../keys' +import { sqlKeys } from '../sql/keys' const CRON_CLEANUP_SCHEDULE_NAME = 'delete-job-run-details' const CRON_CLEANUP_SCHEDULE_EXPRESSION = '0 12 * * *' @@ -89,3 +89,74 @@ export const getScheduleDeleteCronJobRunDetailsKey = ( projectRef: string | undefined, interval: string ) => sqlKeys.query(projectRef, ['cron-job-run-details', 'schedule', interval]) + +// [Joshen] Just omits the LEFT JOIN as that's the heavy part +export const getCronJobsMinimalSql = ({ + searchTerm, + page, + limit, +}: { + searchTerm?: string + page: number + limit: number +}) => + ` +SELECT + job.jobid, + job.jobname, + job.schedule, + job.command, + job.active +FROM + cron.job job +${!!searchTerm ? `WHERE job.jobname ILIKE ${literal(`%${searchTerm}%`)}` : ''} +ORDER BY job.jobid +LIMIT ${limit} +OFFSET ${page * limit}; +`.trim() + +export const getCronJobsSql = ({ + searchTerm, + page, + limit, +}: { + searchTerm?: string + page: number + limit: number +}) => + ` +WITH latest_runs AS ( + SELECT + jobid, + status, + MAX(start_time) AS latest_run + FROM cron.job_run_details + GROUP BY jobid, status +), most_recent_runs AS ( + SELECT + jobid, + status, + latest_run + FROM latest_runs lr1 + WHERE latest_run = ( + SELECT MAX(latest_run) + FROM latest_runs lr2 + WHERE lr2.jobid = lr1.jobid + ) +) +SELECT + job.jobid, + job.jobname, + job.schedule, + job.command, + job.active, + mr.latest_run, + mr.status +FROM + cron.job job +LEFT JOIN most_recent_runs mr ON job.jobid = mr.jobid +${!!searchTerm ? `WHERE job.jobname ILIKE ${literal(`%${searchTerm}%`)}` : ''} +ORDER BY job.jobid +LIMIT ${limit} +OFFSET ${page * limit}; +`.trim() diff --git a/apps/studio/data/database-cron-jobs/schedule-clean-up-mutation.ts b/apps/studio/data/database-cron-jobs/schedule-clean-up-mutation.ts index 7923c247524cd..3ff43316da1d4 100644 --- a/apps/studio/data/database-cron-jobs/schedule-clean-up-mutation.ts +++ b/apps/studio/data/database-cron-jobs/schedule-clean-up-mutation.ts @@ -6,7 +6,7 @@ import type { ResponseError, UseCustomMutationOptions } from 'types' import { getScheduleDeleteCronJobRunDetailsKey, getScheduleDeleteCronJobRunDetailsSql, -} from '../sql/queries/delete-cron-job-run-details' +} from './database-cron-jobs.sql' export type ScheduleCronJobRunDetailsCleanupVariables = { projectRef: string diff --git a/apps/studio/data/database-extensions/database-extension-schema-query.ts b/apps/studio/data/database-extensions/database-extension-schema-query.ts index ef7b2d6ca020a..f7683d2d8827b 100644 --- a/apps/studio/data/database-extensions/database-extension-schema-query.ts +++ b/apps/studio/data/database-extensions/database-extension-schema-query.ts @@ -1,8 +1,8 @@ import { useQuery } from '@tanstack/react-query' import { UseCustomQueryOptions } from 'types' -import { ExecuteSqlError, executeSql } from '../sql/execute-sql-query' -import { getDatabaseExtensionDefaultSchemaSQL } from '../sql/queries/get-extension-default-schema' +import { executeSql, ExecuteSqlError } from '../sql/execute-sql-query' +import { getDatabaseExtensionDefaultSchemaSQL } from './database-extensions.sql' import { databaseExtensionsKeys } from './keys' type DatabaseExtensionDefaultSchemaVariables = { diff --git a/apps/studio/data/sql/queries/get-extension-default-schema.ts b/apps/studio/data/database-extensions/database-extensions.sql.ts similarity index 100% rename from apps/studio/data/sql/queries/get-extension-default-schema.ts rename to apps/studio/data/database-extensions/database-extensions.sql.ts diff --git a/apps/studio/data/sql/queries/get-indexes.ts b/apps/studio/data/database-indexes/database-indexes.sql.ts similarity index 100% rename from apps/studio/data/sql/queries/get-indexes.ts rename to apps/studio/data/database-indexes/database-indexes.sql.ts diff --git a/apps/studio/data/database-indexes/indexes-query.ts b/apps/studio/data/database-indexes/indexes-query.ts index d713d7b4f55f0..25f51d540faed 100644 --- a/apps/studio/data/database-indexes/indexes-query.ts +++ b/apps/studio/data/database-indexes/indexes-query.ts @@ -1,8 +1,8 @@ import { useQuery } from '@tanstack/react-query' - -import { getIndexesSQL } from 'data/sql/queries/get-indexes' import { UseCustomQueryOptions } from 'types' + import { executeSql, ExecuteSqlError } from '../sql/execute-sql-query' +import { getIndexesSQL } from './database-indexes.sql' import { databaseIndexesKeys } from './keys' type GetIndexesArgs = { diff --git a/apps/studio/data/sql/queries/get-live-tuple-stats.ts b/apps/studio/data/database/database.sql.ts similarity index 92% rename from apps/studio/data/sql/queries/get-live-tuple-stats.ts rename to apps/studio/data/database/database.sql.ts index d96ab8cff41a6..e1408aa15bbd7 100644 --- a/apps/studio/data/sql/queries/get-live-tuple-stats.ts +++ b/apps/studio/data/database/database.sql.ts @@ -1,5 +1,6 @@ import { literal } from '@supabase/pg-meta/src/pg-format' -import { sqlKeys } from '../keys' + +import { sqlKeys } from '../sql/keys' export const getLiveTupleEstimate = (table: string, schema: string = 'public') => { const sql = /* SQL */ ` diff --git a/apps/studio/data/sql/queries/get-cron-jobs.ts b/apps/studio/data/sql/queries/get-cron-jobs.ts deleted file mode 100644 index aa930819ca706..0000000000000 --- a/apps/studio/data/sql/queries/get-cron-jobs.ts +++ /dev/null @@ -1,72 +0,0 @@ -import { literal } from '@supabase/pg-meta/src/pg-format' - -// [Joshen] Just omits the LEFT JOIN as that's the heavy part -export const getCronJobsMinimalSql = ({ - searchTerm, - page, - limit, -}: { - searchTerm?: string - page: number - limit: number -}) => - ` -SELECT - job.jobid, - job.jobname, - job.schedule, - job.command, - job.active -FROM - cron.job job -${!!searchTerm ? `WHERE job.jobname ILIKE ${literal(`%${searchTerm}%`)}` : ''} -ORDER BY job.jobid -LIMIT ${limit} -OFFSET ${page * limit}; -`.trim() - -export const getCronJobsSql = ({ - searchTerm, - page, - limit, -}: { - searchTerm?: string - page: number - limit: number -}) => - ` -WITH latest_runs AS ( - SELECT - jobid, - status, - MAX(start_time) AS latest_run - FROM cron.job_run_details - GROUP BY jobid, status -), most_recent_runs AS ( - SELECT - jobid, - status, - latest_run - FROM latest_runs lr1 - WHERE latest_run = ( - SELECT MAX(latest_run) - FROM latest_runs lr2 - WHERE lr2.jobid = lr1.jobid - ) -) -SELECT - job.jobid, - job.jobname, - job.schedule, - job.command, - job.active, - mr.latest_run, - mr.status -FROM - cron.job job -LEFT JOIN most_recent_runs mr ON job.jobid = mr.jobid -${!!searchTerm ? `WHERE job.jobname ILIKE ${literal(`%${searchTerm}%`)}` : ''} -ORDER BY job.jobid -LIMIT ${limit} -OFFSET ${page * limit}; -`.trim() diff --git a/apps/studio/data/storage/buckets-max-size-limit-query.ts b/apps/studio/data/storage/buckets-max-size-limit-query.ts index a047261afc59f..6a2c5d01fdff9 100644 --- a/apps/studio/data/storage/buckets-max-size-limit-query.ts +++ b/apps/studio/data/storage/buckets-max-size-limit-query.ts @@ -1,16 +1,13 @@ import { useQuery, useQueryClient } from '@tanstack/react-query' -import { useCallback } from 'react' - import type { ConnectionVars } from 'data/common.types' +import { getLiveTupleEstimate, getLiveTupleEstimateKey } from 'data/database/database.sql' import { executeSql } from 'data/sql/execute-sql-query' +import { useCallback } from 'react' + import { getLargestSizeLimitBucketsKey, getLargestSizeLimitBucketsSqlUnoptimized, -} from 'data/sql/queries/get-largest-size-limit-buckets' -import { - getLiveTupleEstimate, - getLiveTupleEstimateKey, -} from 'data/sql/queries/get-live-tuple-stats' +} from './storage.sql' export const THRESHOLD_FOR_AUTO_QUERYING_BUCKET_LIMITS = 10_000 diff --git a/apps/studio/data/sql/queries/get-largest-size-limit-buckets.ts b/apps/studio/data/storage/storage.sql.ts similarity index 94% rename from apps/studio/data/sql/queries/get-largest-size-limit-buckets.ts rename to apps/studio/data/storage/storage.sql.ts index 76248f2e7899c..f2dfedc0485f6 100644 --- a/apps/studio/data/sql/queries/get-largest-size-limit-buckets.ts +++ b/apps/studio/data/storage/storage.sql.ts @@ -1,4 +1,4 @@ -import { sqlKeys } from '../keys' +import { sqlKeys } from '../sql/keys' export const LARGEST_SIZE_LIMIT_BUCKETS_COUNT = 50 diff --git a/apps/studio/hooks/misc/useCheckEntitlements.ts b/apps/studio/hooks/misc/useCheckEntitlements.ts index 97f6105657897..430eac8542223 100644 --- a/apps/studio/hooks/misc/useCheckEntitlements.ts +++ b/apps/studio/hooks/misc/useCheckEntitlements.ts @@ -55,6 +55,12 @@ function getEntitlementSetValues(entitlement: Entitlement | null): string[] { : [] } +function getEntitlementMax(entitlement: Entitlement | null): number | undefined { + return isEntitlementUnlimited(entitlement) + ? Number.MAX_SAFE_INTEGER + : getEntitlementNumericValue(entitlement) +} + export function useCheckEntitlements( featureKey: FeatureKey, organizationSlug?: string, @@ -110,5 +116,6 @@ export function useCheckEntitlements( getEntitlementNumericValue: () => getEntitlementNumericValue(entitlement), isEntitlementUnlimited: () => isEntitlementUnlimited(entitlement), getEntitlementSetValues: () => getEntitlementSetValues(entitlement), + getEntitlementMax: () => getEntitlementMax(entitlement), } } diff --git a/apps/studio/instrumentation-client.test.ts b/apps/studio/instrumentation-client.test.ts new file mode 100644 index 0000000000000..c8f5d1aab13cc --- /dev/null +++ b/apps/studio/instrumentation-client.test.ts @@ -0,0 +1,419 @@ +import type { Event as SentryEvent, StackFrame } from '@sentry/nextjs' +import { describe, expect, it } from 'vitest' + +import { + isBrowserWalletExtensionError, + isCancellationRejection, + isChallengeExpiredError, + isUserAbortedOperation, +} from './instrumentation-client' + +describe('Sentry beforeSend filtering functions', () => { + describe('isBrowserWalletExtensionError', () => { + it('returns true for Gate.io wallet extension error (gt-window-provider.js)', () => { + const event: SentryEvent = { + exception: { + values: [ + { + type: 'TypeError', + value: 'en.shouldSetTallyForCurrentProvider is not a function', + stacktrace: { + frames: [ + { filename: 'app:///_next/static/chunks/main.js' } as StackFrame, + { filename: 'app:///gt-window-provider.js' } as StackFrame, + ], + }, + }, + ], + }, + } + + expect(isBrowserWalletExtensionError(event)).toBe(true) + }) + + it('returns true for Gate.io BTC wallet extension error (gt-window-provider-btc.js)', () => { + const event: SentryEvent = { + exception: { + values: [ + { + type: 'TypeError', + value: 'f.shouldSetTallyForCurrentProvider is not a function', + stacktrace: { + frames: [{ filename: 'app:///gt-window-provider-btc.js' } as StackFrame], + }, + }, + ], + }, + } + + expect(isBrowserWalletExtensionError(event)).toBe(true) + }) + + it('returns true for wallet-provider in abs_path', () => { + const event: SentryEvent = { + exception: { + values: [ + { + type: 'Error', + value: 'wallet error', + stacktrace: { + frames: [ + { abs_path: 'chrome-extension://abc123/wallet-provider.js' } as StackFrame, + ], + }, + }, + ], + }, + } + + expect(isBrowserWalletExtensionError(event)).toBe(true) + }) + + it('returns false for regular application errors', () => { + const event: SentryEvent = { + exception: { + values: [ + { + type: 'Error', + value: 'Regular error', + stacktrace: { + frames: [ + { filename: 'app:///_next/static/chunks/main.js' } as StackFrame, + { filename: 'app:///_next/static/chunks/pages/index.js' } as StackFrame, + ], + }, + }, + ], + }, + } + + expect(isBrowserWalletExtensionError(event)).toBe(false) + }) + + it('returns false for empty event', () => { + const event: SentryEvent = {} + expect(isBrowserWalletExtensionError(event)).toBe(false) + }) + + it('returns false when exception values are empty', () => { + const event: SentryEvent = { + exception: { + values: [], + }, + } + expect(isBrowserWalletExtensionError(event)).toBe(false) + }) + + it('returns false when stacktrace frames are undefined', () => { + const event: SentryEvent = { + exception: { + values: [ + { + type: 'Error', + value: 'Error without stacktrace', + }, + ], + }, + } + expect(isBrowserWalletExtensionError(event)).toBe(false) + }) + }) + + describe('isUserAbortedOperation', () => { + it('returns true for "operation was aborted" error', () => { + const error = new Error('The operation was aborted.') + const event: SentryEvent = {} + + expect(isUserAbortedOperation(error, event)).toBe(true) + }) + + it('returns true for "signal is aborted" error', () => { + const error = new Error('signal is aborted without reason') + const event: SentryEvent = {} + + expect(isUserAbortedOperation(error, event)).toBe(true) + }) + + it('returns true for "manually canceled" error', () => { + const error = new Error('operation is manually canceled') + const event: SentryEvent = {} + + expect(isUserAbortedOperation(error, event)).toBe(true) + }) + + it('returns true for "AbortError" message', () => { + const error = new Error('AbortError: The operation was aborted') + const event: SentryEvent = {} + + expect(isUserAbortedOperation(error, event)).toBe(true) + }) + + it('returns true when message is in event.message (no error object)', () => { + const error = null + const event: SentryEvent = { + message: '[CRITICAL][sign in via EP] Failed: The operation was aborted.', + } + + expect(isUserAbortedOperation(error, event)).toBe(true) + }) + + it('returns true for event message with "signal is aborted"', () => { + const event: SentryEvent = { + message: '[CRITICAL][sign in via EP] Failed: signal is aborted without reason', + } + + expect(isUserAbortedOperation(undefined, event)).toBe(true) + }) + + it('returns false for regular errors', () => { + const error = new Error('Something went wrong') + const event: SentryEvent = {} + + expect(isUserAbortedOperation(error, event)).toBe(false) + }) + + it('returns false for empty inputs', () => { + expect(isUserAbortedOperation(null, {})).toBe(false) + expect(isUserAbortedOperation(undefined, {})).toBe(false) + }) + + it('handles non-Error objects gracefully', () => { + const error = { message: 'The operation was aborted.' } + const event: SentryEvent = {} + + // Non-Error objects should not match since we check instanceof Error + expect(isUserAbortedOperation(error, event)).toBe(false) + }) + }) + + describe('isCancellationRejection', () => { + it('returns true for cancellation type in extra.__serialized__', () => { + const event: SentryEvent = { + extra: { + __serialized__: { + msg: 'operation is manually canceled', + type: 'cancelation', + }, + }, + } + + expect(isCancellationRejection(event)).toBe(true) + }) + + it('returns false when type is not cancelation', () => { + const event: SentryEvent = { + extra: { + __serialized__: { + msg: 'some error', + type: 'error', + }, + }, + } + + expect(isCancellationRejection(event)).toBe(false) + }) + + it('returns false when __serialized__ is undefined', () => { + const event: SentryEvent = { + extra: {}, + } + + expect(isCancellationRejection(event)).toBe(false) + }) + + it('returns false when extra is undefined', () => { + const event: SentryEvent = {} + + expect(isCancellationRejection(event)).toBe(false) + }) + + it('returns false when __serialized__ has no type property', () => { + const event: SentryEvent = { + extra: { + __serialized__: { + msg: 'some message', + }, + }, + } + + expect(isCancellationRejection(event)).toBe(false) + }) + }) + + describe('isChallengeExpiredError', () => { + it('returns true for challenge-expired error message', () => { + const error = new Error('Non-Error promise rejection captured with value: challenge-expired') + const event: SentryEvent = {} + + expect(isChallengeExpiredError(error, event)).toBe(true) + }) + + it('returns true when challenge-expired is in event.message', () => { + const event: SentryEvent = { + message: 'challenge-expired', + } + + expect(isChallengeExpiredError(null, event)).toBe(true) + }) + + it('returns false for regular errors', () => { + const error = new Error('Something went wrong') + const event: SentryEvent = {} + + expect(isChallengeExpiredError(error, event)).toBe(false) + }) + + it('returns false for empty inputs', () => { + expect(isChallengeExpiredError(null, {})).toBe(false) + expect(isChallengeExpiredError(undefined, {})).toBe(false) + }) + + it('returns false for similar but different messages', () => { + const error = new Error('challenge expired') // No hyphen + const event: SentryEvent = {} + + expect(isChallengeExpiredError(error, event)).toBe(false) + }) + }) + + describe('integration scenarios', () => { + it('correctly identifies SUPABASE-APP-353 pattern (cancellation rejection)', () => { + // Based on actual Sentry issue SUPABASE-APP-353 + const event: SentryEvent = { + exception: { + values: [ + { + type: 'UnhandledRejection', + value: 'Object captured as promise rejection with keys: msg, type', + }, + ], + }, + extra: { + __serialized__: { + msg: 'operation is manually canceled', + type: 'cancelation', + }, + }, + } + + expect(isCancellationRejection(event)).toBe(true) + }) + + it('correctly identifies SUPABASE-APP-AFC pattern (wallet extension)', () => { + // Based on actual Sentry issue SUPABASE-APP-AFC + const event: SentryEvent = { + exception: { + values: [ + { + type: 'TypeError', + value: 'f.shouldSetTallyForCurrentProvider is not a function', + stacktrace: { + frames: [ + { + filename: + 'node_modules/.pnpm/@sentry+browser@10.27.0/node_modules/@sentry/browser/src/helpers.ts', + function: 'n', + } as StackFrame, + { + filename: 'app:///gt-window-provider-btc.js', + function: 'GateWindowProvider.internalListener', + } as StackFrame, + ], + }, + }, + ], + }, + } + + expect(isBrowserWalletExtensionError(event)).toBe(true) + }) + + it('correctly identifies SUPABASE-APP-92A pattern (wallet extension)', () => { + // Based on actual Sentry issue SUPABASE-APP-92A + const event: SentryEvent = { + exception: { + values: [ + { + type: 'TypeError', + value: 'en.shouldSetTallyForCurrentProvider is not a function', + stacktrace: { + frames: [ + { + filename: + 'node_modules/.pnpm/@sentry+browser@10.27.0/node_modules/@sentry/browser/src/helpers.ts', + function: 'n', + } as StackFrame, + { + filename: 'app:///gt-window-provider.js', + function: 'GateWindowProvider.internalListener', + } as StackFrame, + ], + }, + }, + ], + }, + } + + expect(isBrowserWalletExtensionError(event)).toBe(true) + }) + + it('correctly identifies SUPABASE-APP-BG6 pattern (user aborted)', () => { + // Based on actual Sentry issue SUPABASE-APP-BG6 + const error = new Error('The operation was aborted.') + const event: SentryEvent = { + message: '[CRITICAL][sign in via EP] Failed: The operation was aborted.', + } + + expect(isUserAbortedOperation(error, event)).toBe(true) + }) + + it('correctly identifies SUPABASE-APP-BG7 pattern (signal aborted)', () => { + // Based on actual Sentry issue SUPABASE-APP-BG7 + const error = new Error('signal is aborted without reason') + const event: SentryEvent = { + message: '[CRITICAL][sign in via EP] Failed: signal is aborted without reason', + } + + expect(isUserAbortedOperation(error, event)).toBe(true) + }) + + it('correctly identifies SUPABASE-APP-ACC pattern (challenge expired)', () => { + // Based on actual Sentry issue SUPABASE-APP-ACC + const error = new Error('Non-Error promise rejection captured with value: challenge-expired') + const event: SentryEvent = { + exception: { + values: [ + { + type: 'UnhandledRejection', + value: 'Non-Error promise rejection captured with value: challenge-expired', + }, + ], + }, + } + + expect(isChallengeExpiredError(error, event)).toBe(true) + }) + + it('does not filter legitimate errors', () => { + const error = new Error('Cannot read property "foo" of undefined') + const event: SentryEvent = { + exception: { + values: [ + { + type: 'TypeError', + value: 'Cannot read property "foo" of undefined', + stacktrace: { + frames: [{ filename: 'app:///_next/static/chunks/pages/index.js' } as StackFrame], + }, + }, + ], + }, + } + + expect(isBrowserWalletExtensionError(event)).toBe(false) + expect(isUserAbortedOperation(error, event)).toBe(false) + expect(isCancellationRejection(event)).toBe(false) + expect(isChallengeExpiredError(error, event)).toBe(false) + }) + }) +}) diff --git a/apps/studio/instrumentation-client.ts b/apps/studio/instrumentation-client.ts index 5dd543ad6d3fd..6f24298c4d8c6 100644 --- a/apps/studio/instrumentation-client.ts +++ b/apps/studio/instrumentation-client.ts @@ -3,8 +3,6 @@ // https://docs.sentry.io/platforms/javascript/guides/nextjs/ import * as Sentry from '@sentry/nextjs' -import { match } from 'path-to-regexp' - import { hasConsented } from 'common' import { IS_PLATFORM } from 'common/constants/environment' import { MIRRORED_BREADCRUMBS } from 'lib/breadcrumbs' @@ -50,6 +48,52 @@ function isThirdPartyError(frames: Sentry.StackFrame[] | undefined) { return !hasAppFrame } +// Filter browser wallet extension errors (e.g., Gate.io wallet) +// These errors come from injected wallet scripts and are not actionable +// Examples: SUPABASE-APP-AFC, SUPABASE-APP-92A +export function isBrowserWalletExtensionError(event: Sentry.Event): boolean { + const frames = event.exception?.values?.flatMap((e) => e.stacktrace?.frames || []) || [] + return frames.some((frame) => { + const filename = frame.filename || frame.abs_path || '' + return filename.includes('gt-window-provider') || filename.includes('wallet-provider') + }) +} + +// Filter user-aborted operations (intentional cancellations) +// These are expected when users cancel requests or navigate away +// Examples: SUPABASE-APP-BG6, SUPABASE-APP-BG7 +export function isUserAbortedOperation(error: unknown, event: Sentry.Event): boolean { + const errorMessage = error instanceof Error ? error.message : '' + const eventMessage = event.message || '' + const message = errorMessage || eventMessage + + return ( + message.includes('operation was aborted') || + message.includes('signal is aborted') || + message.includes('manually canceled') || + message.includes('AbortError') + ) +} + +// Filter cancellation promise rejections (e.g., from query cancellation) +// These occur when operations are intentionally cancelled by the user +// Example: SUPABASE-APP-353 (~466k events) +export function isCancellationRejection(event: Sentry.Event): boolean { + const serialized = event.extra?.__serialized__ as Record | undefined + return serialized?.type === 'cancelation' +} + +// Filter challenge/captcha expired errors (user timeout) +// These happen when users don't complete captcha in time - expected behavior +// Example: SUPABASE-APP-ACC +export function isChallengeExpiredError(error: unknown, event: Sentry.Event): boolean { + const errorMessage = error instanceof Error ? error.message : '' + const eventMessage = event.message || '' + const message = errorMessage || eventMessage + + return message.includes('challenge-expired') +} + Sentry.init({ dsn: process.env.NEXT_PUBLIC_SENTRY_DSN, ...(process.env.NEXT_PUBLIC_SENTRY_ENVIRONMENT && { @@ -120,6 +164,19 @@ Sentry.init({ return null } + if (isBrowserWalletExtensionError(event)) { + return null + } + if (isUserAbortedOperation(hint.originalException, event)) { + return null + } + if (isCancellationRejection(event)) { + return null + } + if (isChallengeExpiredError(hint.originalException, event)) { + return null + } + if (event.breadcrumbs) { event.breadcrumbs = sanitizeArrayOfObjects(event.breadcrumbs) as Sentry.Breadcrumb[] } @@ -130,6 +187,8 @@ Sentry.init({ 'ResizeObserver', 's.getModifierState is not a function', /^Uncaught NetworkError: Failed to execute 'importScripts' on 'WorkerGlobalScope'/, + // Browser wallet extension errors (e.g., Gate.io wallet) + 'shouldSetTallyForCurrentProvider is not a function', // [Joshen] We currently use stripe-js for customers to save their credit card data // I'm unable to reproduce this error on local, staging nor prod across chrome, safari or firefox // Based on https://github.com/stripe/stripe-js/issues/26, it seems like this error is safe to ignore, @@ -153,6 +212,8 @@ Sentry.init({ /504 Gateway Time-out/, // [Joshen] This is the one caused by Google translate in the browser + 3rd party extensions 'Node.insertBefore: Child to insert before is not a child of this node', + // [Ali] Google Translate / browser extension DOM manipulation errors + 'NotFoundError: The object can not be found here.', // [Joshen] This one sprung up recently and I've no idea where this is coming from 'r.default.setDefaultLevel is not a function', // [Joshen] Safe to ignore, it an error from the copyToClipboard @@ -160,31 +221,5 @@ Sentry.init({ ], }) -// Replace dynamic query param with a template text -// Support grouping sentry transaction -function standardiseRouterUrl(url: string) { - let finalUrl = url - - const orgMatch = match('/org/:slug{/*path}', { decode: decodeURIComponent }) - const orgMatchResult = orgMatch(finalUrl) - if (orgMatchResult) { - finalUrl = finalUrl.replace((orgMatchResult.params as any).slug, '[slug]') - } - - const newOrgMatch = match('/new/:slug', { decode: decodeURIComponent }) - const newOrgMatchResult = newOrgMatch(finalUrl) - if (newOrgMatchResult) { - finalUrl = finalUrl.replace((newOrgMatchResult.params as any).slug, '[slug]') - } - - const projectMatch = match('/project/:ref{/*path}', { decode: decodeURIComponent }) - const projectMatchResult = projectMatch(finalUrl) - if (projectMatchResult) { - finalUrl = finalUrl.replace((projectMatchResult.params as any).ref, '[ref]') - } - - return finalUrl -} - // This export will instrument router navigations, and is only relevant if you enable tracing. export const onRouterTransitionStart = Sentry.captureRouterTransitionStart diff --git a/apps/www/next.config.mjs b/apps/www/next.config.mjs index f28c6463698b4..c401aafdcc066 100644 --- a/apps/www/next.config.mjs +++ b/apps/www/next.config.mjs @@ -102,6 +102,24 @@ const nextConfig = { }, ], }, + { + source: '/(docs|blog)/:path*', + headers: [ + { + key: 'X-Robots-Tag', + value: 'all', + }, + ], + }, + { + source: '/dashboard/:path*', + headers: [ + { + key: 'X-Robots-Tag', + value: 'noindex', + }, + ], + }, ] }, async rewrites() { diff --git a/docker/CHANGELOG.md b/docker/CHANGELOG.md index 74a863cb19c42..7a648bb8cdf2b 100644 --- a/docker/CHANGELOG.md +++ b/docker/CHANGELOG.md @@ -13,12 +13,54 @@ Check updates for each service to learn more. ## Unreleased +--- + +## [2026-02-16] + +⚠️ **Note:** This update includes several breaking changes, including a security fix for Analytics. Please check the details below. The following configuration files have been updated: `docker-compose.yml`, `.env.example`, `docker-compose.s3.yml`, `volumes/api/kong.yml`, and `volumes/logs/vector.yml`. + ### Studio -- Added functions management UI (requires `docker-compose.yml` update) - PR [#40690](https://github.com/supabase/supabase/pull/40690), PR [#42322](https://github.com/supabase/supabase/pull/42322), PR [#42349](https://github.com/supabase/supabase/pull/42349), PR [#42350](https://github.com/supabase/supabase/pull/42350) +- Updated to `2026.02.16-sha-26c615c` +- Added Edge Functions management UI (requires `docker-compose.yml` update) - PR [#40690](https://github.com/supabase/supabase/pull/40690), PR [#42322](https://github.com/supabase/supabase/pull/42322), PR [#42349](https://github.com/supabase/supabase/pull/42349), PR [#42350](https://github.com/supabase/supabase/pull/42350) ### MCP Server - Updated to `v0.6.3` - [Release](https://github.com/supabase-community/supabase-mcp/releases/tag/v0.6.3) +### Auth + +- Updated to `v2.186.0` - [Changelog](https://github.com/supabase/auth/blob/master/CHANGELOG.md) | [Release](https://github.com/supabase/auth/releases/tag/v2.186.0) + +### PostgREST + +- Updated to `v14.5` - [Changelog](https://github.com/PostgREST/postgrest/blob/main/CHANGELOG.md) | [Release](https://github.com/PostgREST/postgrest/releases/tag/v14.5) + +### Realtime + +- Updated to `v2.76.5` - [Release](https://github.com/supabase/realtime/releases/tag/v2.76.5) + +### Storage + +- Updated to `v1.37.8` - [Release](https://github.com/supabase/storage/releases/tag/v1.37.8) +- ⚠️ Added configuration to access buckets via `/storage/v1/s3` endpoint (requires `docker-compose.s3.yml` update) - PR [#37185](https://github.com/supabase/supabase/pull/37185) +- ⚠️ Changed environment variable configuration for Storage (requires `docker-compose.yml`, `.env.example` and `.env` update) - PR [#37185](https://github.com/supabase/supabase/pull/37185), PR [#42862](https://github.com/supabase/supabase/pull/42862) + +### Edge Runtime + +- Updated to `v1.70.3` - [Release](https://github.com/supabase/edge-runtime/releases/tag/v1.70.3) + +### Analytics (Logflare) + +- Updated to `v1.31.2` - [Release](https://github.com/Logflare/logflare/releases/tag/v1.31.2) +- ⚠️ Changed default configuration to disable Logflare on `0.0.0.0:4000` to prevent access to `/dashboard` (requires `docker-compose.yml` update). Read more in "Production Recommendations" section of Logflare [documentation](https://supabase.com/docs/reference/self-hosting-analytics/introduction) - PR [#42857](https://github.com/supabase/supabase/pull/42857) +- ⚠️ Changed Kong routes to not include `/analytics/v1` by default (requires `/volumes/api/kong.yml` update) - PR [#42857](https://github.com/supabase/supabase/pull/42857) + +### Vector + +- Updated to `0.53.0-alpine` - [Changelog](https://vector.dev/releases/0.53.0/) | [Release](https://github.com/vectordotdev/vector/releases/tag/v0.53.0) +- ⚠️ Major version jump from `0.28.1` (requires `volumes/logs/vector.yml` update) - PR [#42525](https://github.com/supabase/supabase/pull/42525) +- ⚠️ Changed Postgres sink configuration to bypass Kong (requires `volumes/logs/vector.yml` update) - PR [#42857](https://github.com/supabase/supabase/pull/42857) +- ⚠️ Changed retry settings for all sinks to increase timeouts (requires `volumes/logs/vector.yml` update) - PR [#42857](https://github.com/supabase/supabase/pull/42857) + --- ## [2026-02-05] diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 50db1b44ac93c..11f952c7d6439 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -11,7 +11,7 @@ services: studio: container_name: supabase-studio - image: supabase/studio:2026.01.27-sha-6aa59ff + image: supabase/studio:2026.02.16-sha-26c615c restart: unless-stopped healthcheck: test: @@ -95,7 +95,7 @@ services: auth: container_name: supabase-auth - image: supabase/gotrue:v2.185.0 + image: supabase/gotrue:v2.186.0 restart: unless-stopped healthcheck: test: @@ -178,7 +178,7 @@ services: rest: container_name: supabase-rest - image: postgrest/postgrest:v14.3 + image: postgrest/postgrest:v14.5 restart: unless-stopped depends_on: db: @@ -202,7 +202,7 @@ services: realtime: # This container name looks inconsistent but is correct because realtime constructs tenant id by parsing the subdomain container_name: realtime-dev.supabase-realtime - image: supabase/realtime:v2.72.0 + image: supabase/realtime:v2.76.5 restart: unless-stopped depends_on: db: @@ -242,7 +242,7 @@ services: # To use S3 backed storage: docker compose -f docker-compose.yml -f docker-compose.s3.yml up storage: container_name: supabase-storage - image: supabase/storage-api:v1.37.1 + image: supabase/storage-api:v1.37.8 restart: unless-stopped volumes: - ./volumes/storage:/var/lib/storage:z @@ -330,7 +330,7 @@ services: functions: container_name: supabase-edge-functions - image: supabase/edge-runtime:v1.70.0 + image: supabase/edge-runtime:v1.70.3 restart: unless-stopped volumes: - ./volumes/functions:/home/deno/functions:Z @@ -354,7 +354,7 @@ services: analytics: container_name: supabase-analytics - image: supabase/logflare:1.30.3 + image: supabase/logflare:1.31.2 restart: unless-stopped # ports: # - 4000:4000 diff --git a/docker/versions.md b/docker/versions.md index 3cf853d82bb85..af00d5872d6e7 100644 --- a/docker/versions.md +++ b/docker/versions.md @@ -1,5 +1,15 @@ # Docker Image Versions +## 2026-02-16 +- supabase/studio:2026.02.16-sha-26c615c (prev supabase/studio:2026.01.27-sha-6aa59ff) +- supabase/gotrue:v2.186.0 (prev supabase/gotrue:v2.185.0) +- postgrest/postgrest:v14.5 (prev postgrest/postgrest:v14.3) +- supabase/realtime:v2.76.5 (prev supabase/realtime:v2.72.0) +- supabase/storage-api:v1.37.8 (prev supabase/storage-api:v1.37.1) +- supabase/edge-runtime:v1.70.3 (prev supabase/edge-runtime:v1.70.0) +- supabase/logflare:1.31.2 (prev supabase/logflare:1.30.3) +- timberio/vector:0.53.0-alpine (prev timberio/vector:0.28.1-alpine) + ## 2026-02-05 - supabase/storage-api:v1.37.1 (prev supabase/storage-api:v1.33.5)