diff --git a/package.json b/package.json index 7a8798bc33..a2aaa3e62c 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "capgo-app", "type": "module", - "version": "12.116.1", + "version": "12.116.3", "private": true, "license": "GPL-3.0", "scripts": { diff --git a/src/pages/admin/dashboard/replication.vue b/src/pages/admin/dashboard/replication.vue index b38ae26fbe..93b69bd93b 100644 --- a/src/pages/admin/dashboard/replication.vue +++ b/src/pages/admin/dashboard/replication.vue @@ -10,7 +10,7 @@ import { useRouter } from 'vue-router' import AdminStatsCard from '~/components/admin/AdminStatsCard.vue' import Spinner from '~/components/Spinner.vue' import { formatLocalDateTime } from '~/services/date' -import { defaultApiHost } from '~/services/supabase' +import { defaultApiHost, useSupabase } from '~/services/supabase' import { useDisplayStore } from '~/stores/display' import { useMainStore } from '~/stores/main' @@ -97,13 +97,30 @@ const checkedAt = computed(() => { return formatLocalDateTime(data.value.checked_at) }) +const internalReplicationSecret = import.meta.env.VITE_REPLICATION_API_SECRET as string | undefined + async function loadReplicationStatus() { isLoading.value = true errorMessage.value = null try { + const headers: Record = {} + if (internalReplicationSecret) { + headers.apisecret = internalReplicationSecret + } + else { + const supabase = useSupabase() + const { data: { session } } = await supabase.auth.getSession() + + if (!session?.access_token) + throw new Error('No session available and replication secret is not configured') + + headers.Authorization = `Bearer ${session.access_token}` + } + const response = await fetch(`${defaultApiHost}/replication`, { method: 'GET', + headers, }) const payload = await response.json().catch(() => null) as ReplicationStatusResponse | null diff --git a/src/services/photos.ts b/src/services/photos.ts index 1d16e4b7eb..c166cee7dd 100644 --- a/src/services/photos.ts +++ b/src/services/photos.ts @@ -13,6 +13,19 @@ const supabase = useSupabase() const main = useMainStore() const organizationStore = useOrganizationStore() +function normalizeImageStoragePath(path?: string | null) { + if (!path) + return '' + + const pathWithoutQuery = path.split('?')[0] + const signedUrlRegex = /\/storage\/v1\/object\/(?:public\/|sign\/)?images\/(.+)$/ + const signedUrlMatch = signedUrlRegex.exec(pathWithoutQuery) + if (signedUrlMatch?.[1]) + return signedUrlMatch[1].replace(/^\/+/, '') + + return pathWithoutQuery.replace(/^images\//, '').replace(/^\/+/, '') +} + async function uploadPhotoShared( data: string, storagePath: string, @@ -51,6 +64,17 @@ async function uploadPhotoUser(formId: string, data: string, fileName: string, c return } + let previousImagePath = '' + const { data: currentUser, error: currentUserError } = await supabase + .from('users') + .select('image_url') + .eq('id', safeUserId) + .maybeSingle() + if (currentUserError) + console.error('cannot fetch current user image before update', currentUserError) + else + previousImagePath = normalizeImageStoragePath(currentUser?.image_url) + const { data: usr, error: dbError } = await supabase .from('users') .update({ image_url: storagePath }) @@ -61,8 +85,24 @@ async function uploadPhotoUser(formId: string, data: string, fileName: string, c if (!usr || dbError) { setErrors(formId, [wentWrong], {}) console.error('upload error', dbError) + const { error: cleanupUploadError } = await supabase + .storage + .from('images') + .remove([storagePath]) + if (cleanupUploadError) + console.error('cannot cleanup newly uploaded user image after db error', cleanupUploadError) return } + + if (previousImagePath && previousImagePath !== storagePath) { + const { error: deletePreviousImageError } = await supabase + .storage + .from('images') + .remove([previousImagePath]) + if (deletePreviousImageError) + console.error('cannot delete previous user image', deletePreviousImageError) + } + usr.image_url = signedUrl main.user = usr } diff --git a/supabase/functions/_backend/files/preview.ts b/supabase/functions/_backend/files/preview.ts index 03ac593228..59f474b8e3 100644 --- a/supabase/functions/_backend/files/preview.ts +++ b/supabase/functions/_backend/files/preview.ts @@ -180,12 +180,35 @@ export async function handlePreviewRequest(c: Context): // Use admin client - preview is public when allow_preview is enabled const supabase = supabaseAdmin(c) - // Get app settings to check if preview is enabled (case-insensitive since frontend lowercases) - const { data: appData, error: appError } = await supabase + // Get app settings to check if preview is enabled. + // Try exact match first (prevents wildcard collisions), then fallback to + // case-insensitive match for preview URLs that were lowercased. + const exactLookup = await supabase .from('apps') .select('app_id, allow_preview') - .ilike('app_id', appId) - .single() + .eq('app_id', appId) + .maybeSingle() + + let appData = exactLookup.data + let appError = exactLookup.error + + if (!appData && !appError) { + const escapedAppId = appId + .toLowerCase() + .replace(/\\/g, '\\\\') + .replace(/%/g, '\\%') + .replace(/_/g, '\\_') + + const fallbackLookup = await supabase + .from('apps') + .select('app_id, allow_preview') + .ilike('app_id', escapedAppId) + .limit(1) + .maybeSingle() + + appData = fallbackLookup.data + appError = fallbackLookup.error + } if (appError || !appData) { throw simpleError('app_not_found', 'App not found', { appId }) diff --git a/supabase/functions/_backend/public/build/index.ts b/supabase/functions/_backend/public/build/index.ts index 25ef4dcabe..06faa0a34d 100644 --- a/supabase/functions/_backend/public/build/index.ts +++ b/supabase/functions/_backend/public/build/index.ts @@ -19,6 +19,7 @@ import { getBuildStatus } from './status.ts' import { tusProxy } from './upload.ts' export const app = honoFactory.createApp() +const uploadWriteMiddleware = middlewareKey(['all', 'write']) // POST /build/request - Request a new native build app.post('/request', middlewareKey(['all', 'write']), async (c) => { @@ -89,11 +90,22 @@ app.post('/upload/:jobId', middlewareKey(['all', 'write']), async (c) => { }) // HEAD /build/upload/:jobId/* - Check TUS upload progress (proxied to builder) -app.on('HEAD', '/upload/:jobId/*', middlewareKey(['all', 'write']), async (c) => { - const jobId = c.req.param('jobId') - const apikey = c.get('apikey') as Database['public']['Tables']['apikeys']['Row'] - return tusProxy(c, jobId, apikey) -}) +// Hono resolves HEAD via GET route matching, so we gate by request method here. +app.get( + '/upload/:jobId/*', + async (c, next) => { + if (c.req.method !== 'HEAD') { + return c.notFound() + } + return next() + }, + uploadWriteMiddleware, + async (c) => { + const jobId = c.req.param('jobId') + const apikey = c.get('apikey') as Database['public']['Tables']['apikeys']['Row'] + return tusProxy(c, jobId, apikey) + }, +) // PATCH /build/upload/:jobId/* - Upload TUS chunk (proxied to builder) app.patch('/upload/:jobId/*', middlewareKey(['all', 'write']), async (c) => { diff --git a/supabase/functions/_backend/public/replication.ts b/supabase/functions/_backend/public/replication.ts index 782de43775..1aa39218ca 100644 --- a/supabase/functions/_backend/public/replication.ts +++ b/supabase/functions/_backend/public/replication.ts @@ -1,7 +1,10 @@ +import type { Context } from 'hono' +import type { MiddlewareKeyVariables } from '../utils/hono.ts' import { sql } from 'drizzle-orm' -import { honoFactory, useCors } from '../utils/hono.ts' +import { getClaimsFromJWT, honoFactory, middlewareAPISecret, quickError, useCors } from '../utils/hono.ts' import { cloudlogErr } from '../utils/logging.ts' import { closeClient, getDrizzleClient, getPgClient, logPgError } from '../utils/pg.ts' +import { supabaseClient } from '../utils/supabase.ts' const DEFAULT_THRESHOLD_SECONDS = 180 const DEFAULT_THRESHOLD_BYTES = 16 * 1024 * 1024 @@ -24,13 +27,6 @@ interface ReplicationSlotLag { reasons: string[] } -interface ReplicationErrorInfo { - message: string - code?: string - detail?: string - hint?: string -} - function toNumber(value: unknown): number | null { if (value === null || value === undefined) return null @@ -40,19 +36,6 @@ function toNumber(value: unknown): number | null { return num } -function getErrorInfo(error: unknown): ReplicationErrorInfo { - if (error instanceof Error) { - const err = error as Error & { code?: string, detail?: string, hint?: string } - return { - message: err.message, - code: err.code, - detail: err.detail, - hint: err.hint, - } - } - return { message: String(error) } -} - function buildReplicationQuery(mode: ReplicationQueryMode) { const slotsCte = sql` WITH slots AS ( @@ -162,7 +145,51 @@ export const app = honoFactory.createApp() app.use('*', useCors) +type ReplicationContext = Context + +async function validateReplicationAccess(c: ReplicationContext) { + const apiSecret = c.req.header('apisecret') + + if (apiSecret) { + await middlewareAPISecret(c, async () => {}) + return + } + + const authorization = c.req.header('authorization') + if (!authorization) { + throw quickError(401, 'no_authorization', 'Authorization header or apisecret is required') + } + + const claims = getClaimsFromJWT(authorization) + if (!claims?.sub) { + cloudlogErr({ requestId: c.get('requestId'), message: 'replication_invalid_jwt' }) + throw quickError(401, 'invalid_jwt', 'Invalid JWT') + } + + c.set('authorization', authorization) + c.set('auth', { + userId: claims.sub, + authType: 'jwt', + apikey: null, + jwt: authorization, + }) + + const userClient = supabaseClient(c, authorization) + const { data: isAdmin, error: adminError } = await userClient.rpc('is_admin') + if (adminError) { + cloudlogErr({ requestId: c.get('requestId'), message: 'replication_is_admin_error', error: adminError }) + throw quickError(500, 'is_admin_error', 'Unable to verify admin rights') + } + + if (!isAdmin) { + cloudlogErr({ requestId: c.get('requestId'), message: 'replication_not_admin', userId: claims.sub }) + throw quickError(403, 'not_admin', 'Not admin - only admin users can access replication status') + } +} + app.get('/', async (c) => { + await validateReplicationAccess(c) + const thresholdSeconds = DEFAULT_THRESHOLD_SECONDS const thresholdBytes = DEFAULT_THRESHOLD_BYTES @@ -248,16 +275,11 @@ app.get('/', async (c) => { } catch (error) { logPgError(c, 'replication_lag', error) - const errorInfo = getErrorInfo(error) cloudlogErr({ requestId: c.get('requestId'), message: 'replication_lag_error', error }) return c.json({ status: 'ko', error: 'replication_lag_error', - message: 'Failed to fetch replication slot lag', - error_message: errorInfo.message, - error_code: errorInfo.code, - error_detail: errorInfo.detail, - error_hint: errorInfo.hint, + message: 'Failed to fetch replication lag', threshold_seconds: thresholdSeconds, threshold_minutes: Number((thresholdSeconds / 60).toFixed(2)), threshold_bytes: thresholdBytes, diff --git a/supabase/functions/_backend/triggers/cron_clean_orphan_images.ts b/supabase/functions/_backend/triggers/cron_clean_orphan_images.ts index 5e73c1668e..2b8cb3dcc9 100644 --- a/supabase/functions/_backend/triggers/cron_clean_orphan_images.ts +++ b/supabase/functions/_backend/triggers/cron_clean_orphan_images.ts @@ -6,11 +6,25 @@ import { supabaseAdmin } from '../utils/supabase.ts' export const app = new Hono() +function normalizeImageStoragePath(path?: string | null) { + if (!path) + return '' + + const pathWithoutQuery = path.split('?')[0] + const signedUrlRegex = /\/storage\/v1\/object\/(?:public\/|sign\/)?images\/(.+)$/ + const signedUrlMatch = signedUrlRegex.exec(pathWithoutQuery) + if (signedUrlMatch?.[1]) + return signedUrlMatch[1].replace(/^\/+/, '') + + return pathWithoutQuery.replace(/^images\//, '').replace(/^\/+/, '') +} + // This CRON job cleans up orphaned images from storage // - User avatars stored at: images/{user_id}/* // - App icons stored at: images/org/{org_id}/{app_id}/icon // Images become orphaned when their associated user, org, or app is deleted // but the image cleanup failed or was not implemented at deletion time. +// It also removes stale user avatar files not linked by users.image_url. app.post('/', middlewareAPISecret, async (c) => { cloudlog({ requestId: c.get('requestId'), message: 'starting cron_clean_orphan_images' }) @@ -43,7 +57,7 @@ app.post('/', middlewareAPISecret, async (c) => { // Check if user exists const { data: user, error: userError } = await supabase .from('users') - .select('id') + .select('id, image_url') .eq('id', userId) .maybeSingle() @@ -53,7 +67,7 @@ app.post('/', middlewareAPISecret, async (c) => { continue } - // If user doesn't exist, delete their images + // If user doesn't exist, delete all images in their folder if (!user) { try { const { data: files } = await supabase @@ -75,6 +89,52 @@ app.post('/', middlewareAPISecret, async (c) => { cloudlogErr({ requestId: c.get('requestId'), message: 'error deleting orphaned user images', error, userId }) errors++ } + continue + } + + // User exists: delete stale avatar files not referenced by users.image_url + try { + const linkedImagePath = normalizeImageStoragePath(user.image_url) + const { data: files, error: filesListError } = await supabase + .storage + .from('images') + .list(userId) + + if (filesListError) { + cloudlogErr({ requestId: c.get('requestId'), message: 'error listing user images', error: filesListError, userId }) + errors++ + continue + } + + const filePaths = (files ?? []) + .filter(file => file.id !== null) + .map(file => `${userId}/${file.name}`) + const staleFilePaths = filePaths.filter(path => path !== linkedImagePath) + + if (staleFilePaths.length > 0) { + const { error: removeError } = await supabase + .storage + .from('images') + .remove(staleFilePaths) + + if (removeError) { + cloudlogErr({ requestId: c.get('requestId'), message: 'error deleting stale user images', error: removeError, userId }) + errors++ + continue + } + + deletedUserImages += staleFilePaths.length + cloudlog({ + requestId: c.get('requestId'), + message: 'deleted stale user images not linked in profile', + count: staleFilePaths.length, + userId, + }) + } + } + catch (error) { + cloudlogErr({ requestId: c.get('requestId'), message: 'error deleting stale user images', error, userId }) + errors++ } } } diff --git a/supabase/functions/_backend/utils/version.ts b/supabase/functions/_backend/utils/version.ts index ede3bcd34a..162977c154 100644 --- a/supabase/functions/_backend/utils/version.ts +++ b/supabase/functions/_backend/utils/version.ts @@ -1,3 +1,3 @@ -export const version = '12.116.1' +export const version = '12.116.3' // This is automatically generated by the update-version.js script don't edit it manually diff --git a/supabase/migrations/20260224100000_fix_webhook_rls_org_scoping.sql b/supabase/migrations/20260224100000_fix_webhook_rls_org_scoping.sql new file mode 100644 index 0000000000..efbe7225df --- /dev/null +++ b/supabase/migrations/20260224100000_fix_webhook_rls_org_scoping.sql @@ -0,0 +1,184 @@ +-- ============================================================================= +-- Migration: Fix webhook RLS policies for org-scoped API key isolation +-- +-- The 20260107000000 migration introduced anon role support for webhook endpoints, +-- but still resolves identity through get_identity(...), which does not enforce +-- limited_to_orgs. This allows read-mode API keys scoped to a single org to read +-- webhook secrets and delivery logs from other orgs. +-- +-- This migration switches webhook and webhook_deliveries RLS checks to +-- get_identity_org_allowed(..., org_id), so org restrictions from API keys are +-- enforced per row. +-- ============================================================================= + +-- ===================================================== +-- Recreate webhooks policies with org-scoped API key identity +-- ===================================================== + +DROP POLICY IF EXISTS "Allow org members to select webhooks" ON public.webhooks; +DROP POLICY IF EXISTS "Allow admin to insert webhooks" ON public.webhooks; +DROP POLICY IF EXISTS "Allow admin to update webhooks" ON public.webhooks; +DROP POLICY IF EXISTS "Allow admin to delete webhooks" ON public.webhooks; + +CREATE POLICY "Allow org members to select webhooks" +ON public.webhooks +FOR SELECT +TO authenticated, anon +USING ( + public.check_min_rights( + 'read'::public.user_min_right, + ( + SELECT + public.get_identity_org_allowed( + '{read,upload,write,all}'::public.key_mode [], + org_id + ) + ), + org_id, + null::character varying, + null::bigint + ) +); + +CREATE POLICY "Allow admin to insert webhooks" +ON public.webhooks +FOR INSERT +TO authenticated, anon +WITH CHECK ( + public.check_min_rights( + 'admin'::public.user_min_right, + ( + SELECT + public.get_identity_org_allowed( + '{read,upload,write,all}'::public.key_mode [], + org_id + ) + ), + org_id, + null::character varying, + null::bigint + ) +); + +CREATE POLICY "Allow admin to update webhooks" +ON public.webhooks +FOR UPDATE +TO authenticated, anon +USING ( + public.check_min_rights( + 'admin'::public.user_min_right, + ( + SELECT + public.get_identity_org_allowed( + '{read,upload,write,all}'::public.key_mode [], + org_id + ) + ), + org_id, + null::character varying, + null::bigint + ) +) +WITH CHECK ( + public.check_min_rights( + 'admin'::public.user_min_right, + ( + SELECT + public.get_identity_org_allowed( + '{read,upload,write,all}'::public.key_mode [], + org_id + ) + ), + org_id, + null::character varying, + null::bigint + ) +); + +CREATE POLICY "Allow admin to delete webhooks" +ON public.webhooks +FOR DELETE +TO authenticated, anon +USING ( + public.check_min_rights( + 'admin'::public.user_min_right, + ( + SELECT + public.get_identity_org_allowed( + '{read,upload,write,all}'::public.key_mode [], + org_id + ) + ), + org_id, + null::character varying, + null::bigint + ) +); + +-- ===================================================== +-- Recreate webhook_deliveries policies with org-scoped API key identity +-- ===================================================== + +DROP POLICY IF EXISTS "Allow org members to select webhook_deliveries" ON public.webhook_deliveries; +DROP POLICY IF EXISTS "Allow admin to insert webhook_deliveries" ON public.webhook_deliveries; +DROP POLICY IF EXISTS "Allow admin to update webhook_deliveries" ON public.webhook_deliveries; + +CREATE POLICY "Allow org members to select webhook_deliveries" +ON public.webhook_deliveries +FOR SELECT +TO authenticated, anon +USING ( + public.check_min_rights( + 'read'::public.user_min_right, + ( + SELECT + public.get_identity_org_allowed( + '{read,upload,write,all}'::public.key_mode [], + org_id + ) + ), + org_id, + null::character varying, + null::bigint + ) +); + +CREATE POLICY "Allow admin to insert webhook_deliveries" +ON public.webhook_deliveries +FOR INSERT +TO authenticated, anon +WITH CHECK ( + public.check_min_rights( + 'admin'::public.user_min_right, + ( + SELECT + public.get_identity_org_allowed( + '{read,upload,write,all}'::public.key_mode [], + org_id + ) + ), + org_id, + null::character varying, + null::bigint + ) +); + +CREATE POLICY "Allow admin to update webhook_deliveries" +ON public.webhook_deliveries +FOR UPDATE +TO authenticated, anon +USING ( + public.check_min_rights( + 'admin'::public.user_min_right, + ( + SELECT + public.get_identity_org_allowed( + '{read,upload,write,all}'::public.key_mode [], + org_id + ) + ), + org_id, + null::character varying, + null::bigint + ) +); diff --git a/tests/build-upload-head-routing.test.ts b/tests/build-upload-head-routing.test.ts new file mode 100644 index 0000000000..6349886f72 --- /dev/null +++ b/tests/build-upload-head-routing.test.ts @@ -0,0 +1,26 @@ +import { describe, expect, it } from 'vitest' +import { app } from '../supabase/functions/_backend/public/build/index.ts' + +// Intentionally uses app.request() as a lightweight routing smoke test: +// this validates Hono HEAD-via-GET matching and guard ordering before worker/binding setup. +describe('build upload HEAD routing', () => { + it.concurrent('routes HEAD /upload/:jobId/* through auth middleware', async () => { + const response = await app.request(new Request('http://localhost/upload/test-job/file.zip', { + method: 'HEAD', + headers: { + 'Tus-Resumable': '1.0.0', + }, + })) + + expect(response.status).not.toBe(404) + expect([400, 401]).toContain(response.status) + }) + + it.concurrent('keeps GET /upload/:jobId/* as not found', async () => { + const response = await app.request(new Request('http://localhost/upload/test-job/file.zip', { + method: 'GET', + })) + + expect(response.status).toBe(404) + }) +})