diff --git a/.github/workflows/db-migration-backwards-compatibility.yaml b/.github/workflows/db-migration-backwards-compatibility.yaml index c09ce95553..e9700c67f1 100644 --- a/.github/workflows/db-migration-backwards-compatibility.yaml +++ b/.github/workflows/db-migration-backwards-compatibility.yaml @@ -150,6 +150,9 @@ jobs: - name: Wait on Svix run: pnpx wait-on tcp:localhost:8113 + - name: Wait on ClickHouse + run: pnpx wait-on http://localhost:8136/ping + - name: Initialize database run: pnpm run db:init diff --git a/.github/workflows/e2e-api-tests.yaml b/.github/workflows/e2e-api-tests.yaml index 3d33c11066..7f3cfd4c12 100644 --- a/.github/workflows/e2e-api-tests.yaml +++ b/.github/workflows/e2e-api-tests.yaml @@ -106,6 +106,9 @@ jobs: - name: Wait on QStash run: pnpx wait-on tcp:localhost:8125 + - name: Wait on ClickHouse + run: pnpx wait-on http://localhost:8136/ping + - name: Initialize database run: pnpm run db:init diff --git a/.github/workflows/e2e-custom-base-port-api-tests.yaml b/.github/workflows/e2e-custom-base-port-api-tests.yaml index d458b0653d..4e3a0acaa6 100644 --- a/.github/workflows/e2e-custom-base-port-api-tests.yaml +++ b/.github/workflows/e2e-custom-base-port-api-tests.yaml @@ -100,6 +100,9 @@ jobs: - name: Wait on QStash run: pnpx wait-on tcp:localhost:6725 + - name: Wait on ClickHouse + run: pnpx wait-on http://localhost:6736/ping + - name: Initialize database run: pnpm run db:init diff --git a/apps/backend/scripts/clickhouse-migrations.ts b/apps/backend/scripts/clickhouse-migrations.ts index b83185d68b..29f5918498 100644 --- a/apps/backend/scripts/clickhouse-migrations.ts +++ b/apps/backend/scripts/clickhouse-migrations.ts @@ -70,28 +70,28 @@ ALTER TABLE analytics_internal.events UPDATE data = CAST(concat( '{', - '\"refresh_token_id\":', toJSONString(JSONExtractString(toJSONString(data), 'refreshTokenId')), ',', - '\"is_anonymous\":', toJSONString(JSONExtract(toJSONString(data), 'isAnonymous', 'Bool')), ',', - '\"ip_info\":', if( - JSONExtractString(toJSONString(data), 'ipInfo.ip') = '', + '"refresh_token_id":', toJSONString(data.refreshTokenId::String), ',', + '"is_anonymous":', if(ifNull(data.isAnonymous::Nullable(Bool), false), 'true', 'false'), ',', + '"ip_info":', if( + isNull(data.ipInfo.ip::Nullable(String)), 'null', concat( '{', - '\"ip\":', toJSONString(JSONExtractString(toJSONString(data), 'ipInfo.ip')), ',', - '\"is_trusted\":', toJSONString(JSONExtract(toJSONString(data), 'ipInfo.isTrusted', 'Bool')), ',', - '\"country_code\":', toJSONString(JSONExtract(toJSONString(data), 'ipInfo.countryCode', 'Nullable(String)')), ',', - '\"region_code\":', toJSONString(JSONExtract(toJSONString(data), 'ipInfo.regionCode', 'Nullable(String)')), ',', - '\"city_name\":', toJSONString(JSONExtract(toJSONString(data), 'ipInfo.cityName', 'Nullable(String)')), ',', - '\"latitude\":', toJSONString(JSONExtract(toJSONString(data), 'ipInfo.latitude', 'Nullable(Float64)')), ',', - '\"longitude\":', toJSONString(JSONExtract(toJSONString(data), 'ipInfo.longitude', 'Nullable(Float64)')), ',', - '\"tz_identifier\":', toJSONString(JSONExtract(toJSONString(data), 'ipInfo.tzIdentifier', 'Nullable(String)')), + '"ip":', toJSONString(data.ipInfo.ip::String), ',', + '"is_trusted":', if(ifNull(data.ipInfo.isTrusted::Nullable(Bool), false), 'true', 'false'), ',', + '"country_code":', if(isNull(data.ipInfo.countryCode::Nullable(String)), 'null', toJSONString(data.ipInfo.countryCode::String)), ',', + '"region_code":', if(isNull(data.ipInfo.regionCode::Nullable(String)), 'null', toJSONString(data.ipInfo.regionCode::String)), ',', + '"city_name":', if(isNull(data.ipInfo.cityName::Nullable(String)), 'null', toJSONString(data.ipInfo.cityName::String)), ',', + '"latitude":', if(isNull(data.ipInfo.latitude::Nullable(Float64)), 'null', toString(data.ipInfo.latitude::Float64)), ',', + '"longitude":', if(isNull(data.ipInfo.longitude::Nullable(Float64)), 'null', toString(data.ipInfo.longitude::Float64)), ',', + '"tz_identifier":', if(isNull(data.ipInfo.tzIdentifier::Nullable(String)), 'null', toJSONString(data.ipInfo.tzIdentifier::String)), '}' ) ), '}' ) AS JSON) WHERE event_type = '$token-refresh' - AND JSONHas(toJSONString(data), 'refreshTokenId'); + AND data.refreshTokenId::Nullable(String) IS NOT NULL; `; // Normalizes legacy $sign-up-rule-trigger rows (camelCase JSON) to the new format: diff --git a/apps/backend/src/app/api/latest/internal/external-db-sync/poller/route.ts b/apps/backend/src/app/api/latest/internal/external-db-sync/poller/route.ts index 1b94af32b5..d37ef118d2 100644 --- a/apps/backend/src/app/api/latest/internal/external-db-sync/poller/route.ts +++ b/apps/backend/src/app/api/latest/internal/external-db-sync/poller/route.ts @@ -172,13 +172,11 @@ export const GET = createSmartRouteHandler({ } const flowControl = options.flowControl as UpstashRequest["flowControl"]; - const deduplicationId = options.deduplicationId as UpstashRequest["deduplicationId"]; return { url: fullUrl, body: options.body, ...(flowControl ? { flowControl } : {}), - ...(deduplicationId ? { deduplicationId } : {}) }; } diff --git a/apps/backend/src/lib/external-db-sync-queue.ts b/apps/backend/src/lib/external-db-sync-queue.ts index 047eaa9d14..93593acedf 100644 --- a/apps/backend/src/lib/external-db-sync-queue.ts +++ b/apps/backend/src/lib/external-db-sync-queue.ts @@ -33,8 +33,7 @@ export async function enqueueExternalDbSyncBatch(tenancyIds: string[]): Promise< json_build_object( 'url', '/api/latest/internal/external-db-sync/sync-engine', 'body', json_build_object('tenancyId', t.tenancy_id), - 'flowControl', json_build_object('key', 'sentinel-sync-key', 'parallelism', 20), - 'deduplicationId', t.tenancy_id + 'flowControl', json_build_object('key', 'sentinel-sync-key', 'parallelism', 20) ), NULL, 'sentinel-sync-key-' || t.tenancy_id diff --git a/apps/backend/src/lib/external-db-sync.ts b/apps/backend/src/lib/external-db-sync.ts index 7a78dfbd0a..62ac6536bd 100644 --- a/apps/backend/src/lib/external-db-sync.ts +++ b/apps/backend/src/lib/external-db-sync.ts @@ -566,6 +566,9 @@ async function syncPostgresMapping( if (rows.length === 0) { break; } + if (rows.length > 1) { + console.log("db-sync-postgres: more than 1 row returned from source db fetch", { tenancyId, numRows: rows.length }); + } await pushRowsToExternalDb( externalClient, @@ -644,6 +647,9 @@ async function syncClickhouseMapping( if (rows.length === 0) { break; } + if (rows.length > 1) { + console.log("db-sync-clickhouse: more than 1 row returned from source db fetch", { tenancyId, numRows: rows.length }); + } await pushRowsToClickhouse( client, diff --git a/apps/e2e/tests/backend/endpoints/api/v1/auth/sign-up-rules.test.ts b/apps/e2e/tests/backend/endpoints/api/v1/auth/sign-up-rules.test.ts index 9d22c004d6..0c8c1cb5db 100644 --- a/apps/e2e/tests/backend/endpoints/api/v1/auth/sign-up-rules.test.ts +++ b/apps/e2e/tests/backend/endpoints/api/v1/auth/sign-up-rules.test.ts @@ -266,8 +266,8 @@ describe("sign-up rules", () => { "status": 403, "body": { "code": "SIGN_UP_REJECTED", - "details": { "message": "Your sign up was rejected. Please contact us for more information." }, - "error": "Your sign up was rejected. Please contact us for more information.", + "details": { "message": "Your sign up was rejected by an administrator's sign-up rule." }, + "error": "Your sign up was rejected by an administrator's sign-up rule.", }, "headers": Headers { "x-stack-known-error": "SIGN_UP_REJECTED", diff --git a/package.json b/package.json index 3c625515d5..6ed82b4f40 100644 --- a/package.json +++ b/package.json @@ -26,7 +26,8 @@ "stop-deps": "POSTGRES_DELAY_MS=0 pnpm run deps-compose kill && POSTGRES_DELAY_MS=0 pnpm run deps-compose down -v", "wait-until-postgres-is-ready:pg_isready": "until pg_isready -h localhost -p ${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}28 && pg_isready -h localhost -p ${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}34; do sleep 1; done", "wait-until-postgres-is-ready": "command -v pg_isready >/dev/null 2>&1 && pnpm run wait-until-postgres-is-ready:pg_isready || sleep 10 # not everyone has pg_isready installed, so we fallback to sleeping", - "start-deps:no-delay": "pnpm pre && pnpm run deps-compose up --detach --build && pnpm run wait-until-postgres-is-ready && pnpm run db:init && echo \"\\nDependencies started in the background as Docker containers. 'pnpm run stop-deps' to stop them\"n", + "wait-until-clickhouse-is-ready": "pnpx wait-on http://localhost:${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}36/ping", + "start-deps:no-delay": "pnpm pre && pnpm run deps-compose up --detach --build && pnpm run wait-until-postgres-is-ready && pnpm run wait-until-clickhouse-is-ready && pnpm run db:init && echo \"\\nDependencies started in the background as Docker containers. 'pnpm run stop-deps' to stop them\"n", "start-deps": "POSTGRES_DELAY_MS=${POSTGRES_DELAY_MS:-0} pnpm run start-deps:no-delay", "restart-deps": "pnpm pre && pnpm run stop-deps && pnpm run start-deps", "restart-deps:no-delay": "pnpm pre && pnpm run stop-deps && pnpm run start-deps:no-delay", diff --git a/packages/stack-shared/src/config/db-sync-mappings.ts b/packages/stack-shared/src/config/db-sync-mappings.ts index 2a6d2e1abd..65e839446c 100644 --- a/packages/stack-shared/src/config/db-sync-mappings.ts +++ b/packages/stack-shared/src/config/db-sync-mappings.ts @@ -35,9 +35,9 @@ export const DEFAULT_DB_SYNC_MAPPINGS = { primary_email Nullable(String), primary_email_verified UInt8, signed_up_at DateTime64(3, 'UTC'), - client_metadata JSON, - client_read_only_metadata JSON, - server_metadata JSON, + client_metadata String, + client_read_only_metadata String, + server_metadata String, is_anonymous UInt8, restricted_by_admin UInt8, restricted_by_admin_reason Nullable(String),