diff --git a/.github/workflows/docker-server-build-run.yaml b/.github/workflows/docker-server-build-run.yaml index 648d95646a..a14aa2817e 100644 --- a/.github/workflows/docker-server-build-run.yaml +++ b/.github/workflows/docker-server-build-run.yaml @@ -22,6 +22,13 @@ jobs: docker run -d --name db -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=password -e POSTGRES_DB=stackframe -p 8128:5432 postgres:latest sleep 5 docker logs db + + - name: Setup clickhouse + run: | + docker run -d --name clickhouse -e CLICKHOUSE_DB=analytics -e CLICKHOUSE_USER=stackframe -e CLICKHOUSE_PASSWORD=password -e CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT=1 -p 8133:8123 clickhouse/clickhouse-server:25.10 + sleep 5 + docker logs clickhouse + - name: Build Docker image run: docker build -f docker/server/Dockerfile -t server . diff --git a/apps/backend/.env b/apps/backend/.env index bc243d9344..00225682a9 100644 --- a/apps/backend/.env +++ b/apps/backend/.env @@ -81,6 +81,13 @@ STACK_QSTASH_TOKEN= STACK_QSTASH_CURRENT_SIGNING_KEY= STACK_QSTASH_NEXT_SIGNING_KEY= +# Clickhouse +STACK_CLICKHOUSE_URL=# URL of the Clickhouse instance +STACK_CLICKHOUSE_ADMIN_USER=# username of the admin account +STACK_CLICKHOUSE_ADMIN_PASSWORD=# password of the admin account +STACK_CLICKHOUSE_EXTERNAL_PASSWORD=# a randomly generated secure string. The user account will be created automatically + + # Misc STACK_ACCESS_TOKEN_EXPIRATION_TIME=# enter the expiration time for the access token here. Optional, don't specify it for default value STACK_SETUP_ADMIN_GITHUB_ID=# enter the account ID of the admin user here, and after running the seed script they will be able to access the internal project in the Stack dashboard. Optional, don't specify it for default value diff --git a/apps/backend/.env.development b/apps/backend/.env.development index 574ae52fd4..a885fe4a0e 100644 --- a/apps/backend/.env.development +++ b/apps/backend/.env.development @@ -73,3 +73,9 @@ STACK_QSTASH_URL=http://localhost:${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}25 STACK_QSTASH_TOKEN=eyJVc2VySUQiOiJkZWZhdWx0VXNlciIsIlBhc3N3b3JkIjoiZGVmYXVsdFBhc3N3b3JkIn0= STACK_QSTASH_CURRENT_SIGNING_KEY=sig_7kYjw48mhY7kAjqNGcy6cr29RJ6r STACK_QSTASH_NEXT_SIGNING_KEY=sig_5ZB6DVzB1wjE8S6rZ7eenA8Pdnhs + +# Clickhouse +STACK_CLICKHOUSE_URL=http://localhost:${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}36 +STACK_CLICKHOUSE_ADMIN_USER=stackframe +STACK_CLICKHOUSE_ADMIN_PASSWORD=PASSWORD-PLACEHOLDER--9gKyMxJeMx +STACK_CLICKHOUSE_EXTERNAL_PASSWORD=PASSWORD-PLACEHOLDER--EZeHscBMzE diff --git a/apps/backend/package.json b/apps/backend/package.json index 4bff40acff..a1bf6529d0 100644 --- a/apps/backend/package.json +++ b/apps/backend/package.json @@ -25,6 +25,7 @@ "codegen": "pnpm run with-env pnpm run generate-migration-imports && pnpm run with-env bash -c 'if [ \"$STACK_ACCELERATE_ENABLED\" = \"true\" ]; then pnpm run prisma generate --no-engine; else pnpm run codegen-prisma; fi' && pnpm run codegen-docs && pnpm run codegen-route-info", "codegen:watch": "concurrently -n \"prisma,docs,route-info,migration-imports\" -k \"pnpm run codegen-prisma:watch\" \"pnpm run codegen-docs:watch\" \"pnpm run codegen-route-info:watch\" \"pnpm run generate-migration-imports:watch\"", "psql-inner": "psql $(echo $STACK_DATABASE_CONNECTION_STRING | sed 's/\\?.*$//')", + "clickhouse": "pnpm run with-env clickhouse-client --host localhost --port ${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}37 --user stackframe --password PASSWORD-PLACEHOLDER--9gKyMxJeMx", "psql": "pnpm run with-env:dev pnpm run psql-inner", "prisma-studio": "pnpm run with-env:dev prisma studio --port ${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}06 --browser none", "prisma:dev": "pnpm run with-env:dev prisma", @@ -50,6 +51,7 @@ "dependencies": { "@ai-sdk/openai": "^1.3.23", "@aws-sdk/client-s3": "^3.855.0", + "@clickhouse/client": "^1.14.0", "@node-oauth/oauth2-server": "^5.1.0", "@opentelemetry/api": "^1.9.0", "@opentelemetry/api-logs": "^0.53.0", diff --git a/apps/backend/scripts/clickhouse-migrations.ts b/apps/backend/scripts/clickhouse-migrations.ts new file mode 100644 index 0000000000..4d78fdef2d --- /dev/null +++ b/apps/backend/scripts/clickhouse-migrations.ts @@ -0,0 +1,61 @@ +import { getClickhouseAdminClient } from "@/lib/clickhouse"; +import { getEnvVariable } from "@stackframe/stack-shared/dist/utils/env"; + +export async function runClickhouseMigrations() { + console.log("[Clickhouse] Running Clickhouse migrations..."); + const client = getClickhouseAdminClient(); + const clickhouseExternalPassword = getEnvVariable("STACK_CLICKHOUSE_EXTERNAL_PASSWORD"); + await client.exec({ + query: "CREATE USER IF NOT EXISTS limited_user IDENTIFIED WITH sha256_password BY {clickhouseExternalPassword:String}", + query_params: { clickhouseExternalPassword }, + }); + // todo: create migration files + await client.exec({ query: EXTERNAL_ANALYTICS_DB_SQL }); + await client.exec({ query: EVENTS_TABLE_BASE_SQL }); + await client.exec({ query: EVENTS_VIEW_SQL }); + const queries = [ + "REVOKE ALL PRIVILEGES ON *.* FROM limited_user;", + "REVOKE ALL FROM limited_user;", + "GRANT SELECT ON default.events TO limited_user;", + ]; + await client.exec({ + query: "CREATE ROW POLICY IF NOT EXISTS events_project_isolation ON default.events FOR SELECT USING project_id = getSetting('SQL_project_id') AND branch_id = getSetting('SQL_branch_id') TO limited_user", + }); + for (const query of queries) { + await client.exec({ query }); + } + console.log("[Clickhouse] Clickhouse migrations complete"); + await client.close(); +} + +const EVENTS_TABLE_BASE_SQL = ` +CREATE TABLE IF NOT EXISTS analytics_internal.events ( + event_type LowCardinality(String), + event_at DateTime64(3, 'UTC'), + data JSON, + project_id String, + branch_id String, + user_id String, + team_id String, + refresh_token_id String, + is_anonymous Boolean, + session_id String, + ip_address String, + created_at DateTime64(3, 'UTC') DEFAULT now64(3) +) +ENGINE MergeTree +PARTITION BY toYYYYMM(event_at) +ORDER BY (project_id, branch_id, event_at); +`; + +const EVENTS_VIEW_SQL = ` +CREATE OR REPLACE VIEW default.events +SQL SECURITY DEFINER +AS +SELECT * +FROM analytics_internal.events; +`; + +const EXTERNAL_ANALYTICS_DB_SQL = ` +CREATE DATABASE IF NOT EXISTS analytics_internal; +`; diff --git a/apps/backend/scripts/db-migrations.ts b/apps/backend/scripts/db-migrations.ts index 07cd5a1050..14733a6873 100644 --- a/apps/backend/scripts/db-migrations.ts +++ b/apps/backend/scripts/db-migrations.ts @@ -2,18 +2,25 @@ import { applyMigrations } from "@/auto-migrations"; import { MIGRATION_FILES_DIR, getMigrationFiles } from "@/auto-migrations/utils"; import { Prisma } from "@/generated/prisma/client"; import { globalPrismaClient, globalPrismaSchema, sqlQuoteIdent } from "@/prisma-client"; -import { getEnvVariable } from "@stackframe/stack-shared/dist/utils/env"; import { spawnSync } from "child_process"; import fs from "fs"; import path from "path"; import * as readline from "readline"; import { seed } from "../prisma/seed"; +import { getEnvVariable } from "@stackframe/stack-shared/dist/utils/env"; +import { runClickhouseMigrations } from "./clickhouse-migrations"; +import { getClickhouseAdminClient } from "@/lib/clickhouse"; + +const getClickhouseClient = () => getClickhouseAdminClient(); const dropSchema = async () => { await globalPrismaClient.$executeRaw(Prisma.sql`DROP SCHEMA ${sqlQuoteIdent(globalPrismaSchema)} CASCADE`); await globalPrismaClient.$executeRaw(Prisma.sql`CREATE SCHEMA ${sqlQuoteIdent(globalPrismaSchema)}`); await globalPrismaClient.$executeRaw(Prisma.sql`GRANT ALL ON SCHEMA ${sqlQuoteIdent(globalPrismaSchema)} TO postgres`); await globalPrismaClient.$executeRaw(Prisma.sql`GRANT ALL ON SCHEMA ${sqlQuoteIdent(globalPrismaSchema)} TO public`); + const clickhouseClient = getClickhouseClient(); + await clickhouseClient.command({ query: "DROP DATABASE IF EXISTS analytics_internal" }); + await clickhouseClient.command({ query: "CREATE DATABASE IF NOT EXISTS analytics_internal" }); }; @@ -163,6 +170,8 @@ const migrate = async (selectedMigrationFiles?: { migrationName: string, sql: st console.log('='.repeat(60) + '\n'); + await runClickhouseMigrations(); + return result; }; diff --git a/apps/backend/src/app/api/latest/internal/analytics/query/route.ts b/apps/backend/src/app/api/latest/internal/analytics/query/route.ts new file mode 100644 index 0000000000..44ebb4b1c3 --- /dev/null +++ b/apps/backend/src/app/api/latest/internal/analytics/query/route.ts @@ -0,0 +1,120 @@ +import { getClickhouseExternalClient, getQueryTimingStats, isClickhouseConfigured } from "@/lib/clickhouse"; +import { createSmartRouteHandler } from "@/route-handlers/smart-route-handler"; +import { KnownErrors } from "@stackframe/stack-shared"; +import { adaptSchema, adminAuthTypeSchema, jsonSchema, yupBoolean, yupMixed, yupNumber, yupObject, yupRecord, yupString } from "@stackframe/stack-shared/dist/schema-fields"; +import { StackAssertionError } from "@stackframe/stack-shared/dist/utils/errors"; +import { Result } from "@stackframe/stack-shared/dist/utils/results"; +import { randomUUID } from "crypto"; + +export const POST = createSmartRouteHandler({ + metadata: { hidden: true }, + request: yupObject({ + auth: yupObject({ + type: adminAuthTypeSchema, + tenancy: adaptSchema, + }).defined(), + body: yupObject({ + include_all_branches: yupBoolean().default(false), + query: yupString().defined().nonEmpty(), + params: yupRecord(yupString().defined(), yupMixed().defined()).default({}), + timeout_ms: yupNumber().integer().min(1_000).default(10_000), + }).defined(), + }), + response: yupObject({ + statusCode: yupNumber().oneOf([200]).defined(), + bodyType: yupString().oneOf(["json"]).defined(), + body: yupObject({ + result: jsonSchema.defined(), + stats: yupObject({ + cpu_time: yupNumber().defined(), + wall_clock_time: yupNumber().defined(), + }).defined(), + }).defined(), + }), + async handler({ body, auth }) { + if (body.include_all_branches) { + throw new StackAssertionError("include_all_branches is not supported yet"); + } + if (!isClickhouseConfigured()) { + throw new StackAssertionError("ClickHouse is not configured"); + } + const client = getClickhouseExternalClient(); + const queryId = randomUUID(); + const resultSet = await Result.fromPromise(client.query({ + query: body.query, + query_id: queryId, + query_params: body.params, + clickhouse_settings: { + SQL_project_id: auth.tenancy.project.id, + SQL_branch_id: auth.tenancy.branchId, + max_execution_time: body.timeout_ms / 1000, + readonly: "1", + allow_ddl: 0, + max_result_rows: MAX_RESULT_ROWS.toString(), + max_result_bytes: MAX_RESULT_BYTES.toString(), + result_overflow_mode: "throw", + }, + format: "JSONEachRow", + })); + + if (resultSet.status === "error") { + const message = getSafeClickhouseErrorMessage(resultSet.error); + if (message === null) { + throw new StackAssertionError("Unknown Clickhouse error", { cause: resultSet.error }); + } + throw new KnownErrors.AnalyticsQueryError(message); + } + + const rows = await resultSet.data.json[]>(); + const stats = await getQueryTimingStats(client, queryId); + + return { + statusCode: 200, + bodyType: "json", + body: { + result: rows, + stats: { + cpu_time: stats.cpu_time_ms, + wall_clock_time: stats.wall_clock_time_ms, + }, + }, + }; + }, +}); + +const SAFE_CLICKHOUSE_ERROR_CODES = [ + 62, // SYNTAX_ERROR + 159, // TIMEOUT_EXCEEDED + 164, // READONLY + 158, // TOO_MANY_ROWS + 396, // TOO_MANY_ROWS_OR_BYTES + 636, // CANNOT_EXTRACT_TABLE_STRUCTURE +]; + +const UNSAFE_CLICKHOUSE_ERROR_CODES = [ + 36, // BAD_ARGUMENTS + 60, // UNKNOWN_TABLE + 497, // ACCESS_DENIED +]; + +const DEFAULT_CLICKHOUSE_ERROR_MESSAGE = "Error during execution of this query."; +const MAX_RESULT_ROWS = 10_000; +const MAX_RESULT_BYTES = 10 * 1024 * 1024; + +function getSafeClickhouseErrorMessage(error: unknown): string | null { + if (typeof error !== "object" || error === null || !("code" in error) || typeof error.code !== "string") { + return null; + } + const errorCode = Number(error.code); + if (isNaN(errorCode)) { + return null; + } + const message = "message" in error && typeof error.message === "string" ? error.message : null; + if (SAFE_CLICKHOUSE_ERROR_CODES.includes(errorCode)) { + return message; + } + if (UNSAFE_CLICKHOUSE_ERROR_CODES.includes(errorCode)) { + return DEFAULT_CLICKHOUSE_ERROR_MESSAGE; + } + return null; +} diff --git a/apps/backend/src/app/api/latest/internal/clickhouse/migrate-events/route.tsx b/apps/backend/src/app/api/latest/internal/clickhouse/migrate-events/route.tsx new file mode 100644 index 0000000000..6f1e329ac7 --- /dev/null +++ b/apps/backend/src/app/api/latest/internal/clickhouse/migrate-events/route.tsx @@ -0,0 +1,211 @@ +import { getClickhouseAdminClient, isClickhouseConfigured } from "@/lib/clickhouse"; +import { DEFAULT_BRANCH_ID } from "@/lib/tenancies"; +import { globalPrismaClient } from "@/prisma-client"; +import { createSmartRouteHandler } from "@/route-handlers/smart-route-handler"; +import { yupNumber, yupObject, yupString } from "@stackframe/stack-shared/dist/schema-fields"; +import { StatusError } from "@stackframe/stack-shared/dist/utils/errors"; +import type { Prisma } from "@/generated/prisma/client"; + +type Cursor = { + created_at_millis: number, + id: string, +}; + +const parseMillisOrThrow = (value: number | undefined, field: string) => { + if (typeof value !== "number" || !Number.isFinite(value)) { + throw new StatusError(400, `Invalid ${field}`); + } + const parsed = new Date(value); + if (Number.isNaN(parsed.getTime())) { + throw new StatusError(400, `Invalid ${field}`); + } + return parsed; +}; + +const createClickhouseRows = (event: { + id: string, + systemEventTypeIds: string[], + data: any, + eventEndedAt: Date, + eventStartedAt: Date, + isWide: boolean, +}) => { + const dataRecord = typeof event.data === "object" && event.data !== null ? event.data as Record : {}; + const clickhouseEventData = { + ...dataRecord, + is_wide: event.isWide, + event_started_at: event.eventStartedAt, + event_ended_at: event.eventEndedAt, + }; + const projectId = typeof dataRecord.projectId === "string" ? dataRecord.projectId : ""; + const branchId = DEFAULT_BRANCH_ID; + const userId = typeof dataRecord.userId === "string" ? dataRecord.userId : ""; + const teamId = typeof dataRecord.teamId === "string" ? dataRecord.teamId : ""; + const sessionId = typeof dataRecord.sessionId === "string" ? dataRecord.sessionId : ""; + const isAnonymous = typeof dataRecord.isAnonymous === "boolean" ? dataRecord.isAnonymous : false; + + const eventTypes = [...new Set(event.systemEventTypeIds)]; + + return eventTypes.map(eventType => ({ + event_type: eventType, + event_at: event.eventEndedAt, + data: clickhouseEventData, + project_id: projectId, + branch_id: branchId, + user_id: userId, + team_id: teamId, + session_id: sessionId, + is_anonymous: isAnonymous, + })); +}; + +export const POST = createSmartRouteHandler({ + metadata: { + summary: "Migrate analytics events from Postgres to ClickHouse", + description: "Internal-only endpoint to backfill existing events into ClickHouse.", + hidden: true, + }, + request: yupObject({ + auth: yupObject({ + project: yupObject({ + id: yupString().oneOf(["internal"]).defined(), + }).defined(), + user: yupObject({ + id: yupString().defined(), + }).optional(), + }).defined(), + body: yupObject({ + min_created_at_millis: yupNumber().integer().defined(), + max_created_at_millis: yupNumber().integer().defined(), + cursor: yupObject({ + created_at_millis: yupNumber().integer().defined(), + id: yupString().uuid().defined(), + }).optional(), + limit: yupNumber().integer().min(1).default(1000), + }).defined(), + }), + response: yupObject({ + statusCode: yupNumber().oneOf([200]).defined(), + bodyType: yupString().oneOf(["json"]).defined(), + body: yupObject({ + total_events: yupNumber().defined(), + processed_events: yupNumber().defined(), + remaining_events: yupNumber().defined(), + migrated_events: yupNumber().defined(), + skipped_existing_events: yupNumber().defined(), + inserted_rows: yupNumber().defined(), + progress: yupNumber().min(0).max(1).defined(), + next_cursor: yupObject({ + created_at_millis: yupNumber().integer().defined(), + id: yupString().defined(), + }).nullable().defined(), + }).defined(), + }), + async handler({ body }) { + const minCreatedAt = parseMillisOrThrow(body.min_created_at_millis, "min_created_at_millis"); + const maxCreatedAt = parseMillisOrThrow(body.max_created_at_millis, "max_created_at_millis"); + if (minCreatedAt >= maxCreatedAt) { + throw new StatusError(400, "min_created_at_millis must be before max_created_at_millis"); + } + const cursorCreatedAt = body.cursor ? parseMillisOrThrow(body.cursor.created_at_millis, "cursor.created_at_millis") : undefined; + const cursorId = body.cursor?.id; + const limit = body.limit; + + const baseWhere: Prisma.EventWhereInput = { + createdAt: { + gte: minCreatedAt, + lt: maxCreatedAt, + }, + }; + + const cursorFilter: Prisma.EventWhereInput | undefined = (cursorCreatedAt && cursorId) ? { + OR: [ + { createdAt: { gt: cursorCreatedAt } }, + { createdAt: cursorCreatedAt, id: { gt: cursorId } }, + ], + } : undefined; + + const where: Prisma.EventWhereInput = cursorFilter + ? { AND: [baseWhere, cursorFilter] } + : baseWhere; + + const totalEvents = await globalPrismaClient.event.count({ where: baseWhere }); + + const events = await globalPrismaClient.event.findMany({ + where, + orderBy: [ + { createdAt: "asc" }, + { id: "asc" }, + ], + take: limit, + }); + + let insertedRows = 0; + let migratedEvents = 0; + + if (events.length) { + if (!isClickhouseConfigured()) { + throw new StatusError(StatusError.ServiceUnavailable, "ClickHouse is not configured"); + } + const clickhouseClient = getClickhouseAdminClient(); + const rowsByEvent = events.map(createClickhouseRows); + const rowsToInsert = rowsByEvent.flat(); + migratedEvents = rowsByEvent.reduce((acc, rows) => acc + (rows.length ? 1 : 0), 0); + + if (rowsToInsert.length) { + await clickhouseClient.insert({ + table: "analytics_internal.events", + values: rowsToInsert, + format: "JSONEachRow", + clickhouse_settings: { + date_time_input_format: "best_effort", + async_insert: 1, + }, + }); + insertedRows = rowsToInsert.length; + } + } + + const lastEvent = events.at(-1); + const nextCursor: Cursor | null = lastEvent ? { + created_at_millis: lastEvent.createdAt.getTime(), + id: lastEvent.id, + } : null; + const progressCursor: Cursor | null = nextCursor ?? (cursorCreatedAt && body.cursor ? { + created_at_millis: body.cursor.created_at_millis, + id: body.cursor.id, + } : null); + + const progressCursorCreatedAt = progressCursor ? new Date(progressCursor.created_at_millis) : null; + const remainingWhere = progressCursor ? { + AND: [ + baseWhere, + { + OR: [ + { createdAt: { gt: progressCursorCreatedAt! } }, + { createdAt: progressCursorCreatedAt!, id: { gt: progressCursor.id } }, + ], + }, + ], + } : baseWhere; + + const remainingEvents = await globalPrismaClient.event.count({ where: remainingWhere }); + const processedEvents = totalEvents - remainingEvents; + const progress = totalEvents === 0 ? 1 : processedEvents / totalEvents; + + return { + statusCode: 200, + bodyType: "json", + body: { + total_events: totalEvents, + processed_events: processedEvents, + remaining_events: remainingEvents, + migrated_events: migratedEvents, + skipped_existing_events: 0, + inserted_rows: insertedRows, + progress, + next_cursor: nextCursor, + }, + }; + }, +}); diff --git a/apps/backend/src/lib/clickhouse.tsx b/apps/backend/src/lib/clickhouse.tsx new file mode 100644 index 0000000000..f4e62daf54 --- /dev/null +++ b/apps/backend/src/lib/clickhouse.tsx @@ -0,0 +1,77 @@ +import { createClient, type ClickHouseClient } from "@clickhouse/client"; +import { getEnvVariable } from "@stackframe/stack-shared/dist/utils/env"; +import { StackAssertionError } from "@stackframe/stack-shared/dist/utils/errors"; + +const clickhouseUrl = getEnvVariable("STACK_CLICKHOUSE_URL", ""); +const clickhouseAdminUser = getEnvVariable("STACK_CLICKHOUSE_ADMIN_USER", "stackframe"); +const clickhouseExternalUser = "limited_user"; +const clickhouseAdminPassword = getEnvVariable("STACK_CLICKHOUSE_ADMIN_PASSWORD", ""); +const clickhouseExternalPassword = getEnvVariable("STACK_CLICKHOUSE_EXTERNAL_PASSWORD", ""); +const clickhouseDefaultDatabase = getEnvVariable("STACK_CLICKHOUSE_DATABASE", "default"); +const HAS_CLICKHOUSE = !!clickhouseUrl && !!clickhouseAdminPassword && !!clickhouseExternalPassword; + +if (!HAS_CLICKHOUSE) { + console.warn("ClickHouse is not configured. Analytics features will not be available."); +} + +export function isClickhouseConfigured() { + return HAS_CLICKHOUSE; +} + +export function createClickhouseClient(authType: "admin" | "external", database?: string) { + if (!HAS_CLICKHOUSE) { + throw new StackAssertionError("ClickHouse is not configured"); + } + return createClient({ + url: clickhouseUrl, + username: authType === "admin" ? clickhouseAdminUser : clickhouseExternalUser, + password: authType === "admin" ? clickhouseAdminPassword : clickhouseExternalPassword, + database, + }); +} + +export function getClickhouseAdminClient() { + return createClickhouseClient("admin", clickhouseDefaultDatabase); +} + +export function getClickhouseExternalClient() { + return createClickhouseClient("external", clickhouseDefaultDatabase); +} + +export const getQueryTimingStats = async (client: ClickHouseClient, queryId: string) => { + // Flush logs to ensure system.query_log has latest query result. + // Todo: for performance we should instead poll for this row to become available asynchronously after returning result. Flushed every 7.5 seconds by default + await client.exec({ + query: "SYSTEM FLUSH LOGS", + auth: { + username: clickhouseAdminUser, + password: clickhouseAdminPassword, + }, + }); + const profile = await client.query({ + query: ` + SELECT + ProfileEvents['CPUTimeMicroseconds'] / 1000 AS cpu_time_ms, + ProfileEvents['RealTimeMicroseconds'] / 1000 AS wall_clock_time_ms + FROM system.query_log + WHERE query_id = {query_id:String} AND type = 'QueryFinish' + ORDER BY event_time DESC + LIMIT 1 + `, + query_params: { query_id: queryId }, + auth: { + username: clickhouseAdminUser, + password: clickhouseAdminPassword, + }, + format: "JSON", + }); + + const stats = await profile.json<{ + cpu_time_ms: number, + wall_clock_time_ms: number, + }>(); + if (stats.data.length !== 1) { + throw new StackAssertionError(`Unexpected number of query log results: ${stats.data.length}`, { data: stats.data }); + } + return stats.data[0]; +}; diff --git a/apps/backend/src/lib/events.tsx b/apps/backend/src/lib/events.tsx index 8c6e321e91..d79c1ca2fe 100644 --- a/apps/backend/src/lib/events.tsx +++ b/apps/backend/src/lib/events.tsx @@ -9,6 +9,7 @@ import { filterUndefined, typedKeys } from "@stackframe/stack-shared/dist/utils/ import { UnionToIntersection } from "@stackframe/stack-shared/dist/utils/types"; import { generateUuid } from "@stackframe/stack-shared/dist/utils/uuids"; import * as yup from "yup"; +import { getClickhouseAdminClient, isClickhouseConfigured } from "./clickhouse"; import { getEndUserInfo } from "./end-users"; import { DEFAULT_BRANCH_ID } from "./tenancies"; @@ -51,6 +52,7 @@ const UserActivityEventType = { userId: yupString().uuid().defined(), // old events of this type may not have an isAnonymous field, so we default to false isAnonymous: yupBoolean().defined().default(false), + teamId: yupString().optional().default(""), }), inherits: [ProjectActivityEventType], } as const satisfies SystemEventTypeBase; @@ -152,6 +154,20 @@ export async function logEvent( // get end user information const endUserInfo = await getEndUserInfo(); // this is a dynamic API, can't run it asynchronously const endUserInfoInner = endUserInfo?.maybeSpoofed ? endUserInfo.spoofedInfo : endUserInfo?.exactInfo; + const eventTypesArray = [...allEventTypes]; + const clickhouseEventData = { + ...data as Record, + is_wide: isWide, + event_started_at: timeRange.start, + event_ended_at: timeRange.end, + }; + const dataRecord = data as Record | null | undefined; + const projectId = typeof dataRecord === "object" && dataRecord && typeof dataRecord.projectId === "string" ? dataRecord.projectId : ""; + const branchId = typeof dataRecord === "object" && dataRecord && typeof dataRecord.branchId === "string" ? dataRecord.branchId : DEFAULT_BRANCH_ID; + const userId = typeof dataRecord === "object" && dataRecord && typeof dataRecord.userId === "string" ? dataRecord.userId : ""; + const teamId = typeof dataRecord === "object" && dataRecord && typeof dataRecord.teamId === "string" ? dataRecord.teamId : ""; + const sessionId = typeof dataRecord === "object" && dataRecord && typeof dataRecord.sessionId === "string" ? dataRecord.sessionId : ""; + const isAnonymous = typeof dataRecord === "object" && dataRecord && typeof dataRecord.isAnonymous === "boolean" ? dataRecord.isAnonymous : false; // rest is no more dynamic APIs so we can run it asynchronously @@ -159,7 +175,7 @@ export async function logEvent( // log event in DB await globalPrismaClient.event.create({ data: { - systemEventTypeIds: [...allEventTypes].map(eventType => eventType.id), + systemEventTypeIds: eventTypesArray.map(eventType => eventType.id), data: data as any, isEndUserIpInfoGuessTrusted: !endUserInfo?.maybeSpoofed, endUserIpInfoGuess: endUserInfoInner ? { @@ -179,6 +195,29 @@ export async function logEvent( }, }); + if (isClickhouseConfigured()) { + const clickhouseClient = getClickhouseAdminClient(); + await clickhouseClient.insert({ + table: "analytics_internal.events", + values: eventTypesArray.map(eventType => ({ + event_type: eventType.id, + event_at: timeRange.end, + data: clickhouseEventData, + project_id: projectId, + branch_id: branchId, + user_id: userId, + team_id: teamId, + is_anonymous: isAnonymous, + session_id: sessionId, + })), + format: "JSONEachRow", + clickhouse_settings: { + date_time_input_format: "best_effort", + async_insert: 1, + }, + }); + } + // log event in PostHog if (getNodeEnvironment().includes("production") && !getEnvVariable("CI", "")) { await withPostHog(async posthog => { diff --git a/apps/backend/src/lib/tokens.tsx b/apps/backend/src/lib/tokens.tsx index 365b28489f..7e550c9f8a 100644 --- a/apps/backend/src/lib/tokens.tsx +++ b/apps/backend/src/lib/tokens.tsx @@ -252,6 +252,7 @@ export async function generateAccessTokenFromRefreshTokenIfValid(options: { userId: options.refreshTokenObj.projectUserId, sessionId: options.refreshTokenObj.id, isAnonymous: user.is_anonymous, + teamId: "", } ); diff --git a/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/clickhouse-migration/page-client.tsx b/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/clickhouse-migration/page-client.tsx new file mode 100644 index 0000000000..aacb85566b --- /dev/null +++ b/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/clickhouse-migration/page-client.tsx @@ -0,0 +1,269 @@ +"use client"; + +import { ClickhouseMigrationRequest, ClickhouseMigrationResponse } from "@stackframe/stack-shared/dist/interface/admin-interface"; +import { Button, Card, CardContent, CardHeader, CardTitle, Input, Typography, Alert } from "@/components/ui"; +import React from "react"; +import { PageLayout } from "../page-layout"; +import { useAdminApp } from "../use-admin-app"; +import { notFound } from "next/navigation"; + +type MigrationCursor = { + createdAtMillis: number, + id: string, +}; + +type MigrationSnapshot = { + totalEvents: number, + processedEvents: number, + remainingEvents: number, + migratedEvents: number, + skippedExistingEvents: number, + insertedRows: number, + progress: number, + nextCursor: MigrationCursor | null, +}; + +const normalizeResponse = (response: ClickhouseMigrationResponse): MigrationSnapshot => ({ + totalEvents: response.total_events, + processedEvents: response.processed_events, + remainingEvents: response.remaining_events, + migratedEvents: response.migrated_events, + skippedExistingEvents: response.skipped_existing_events, + insertedRows: response.inserted_rows, + progress: response.progress, + nextCursor: response.next_cursor ? { + createdAtMillis: response.next_cursor.created_at_millis, + id: response.next_cursor.id, + } : null, +}); + +export default function PageClient() { + const stackAdminApp = useAdminApp(); + const adminInterface = React.useMemo(() => (stackAdminApp as any)._interface as { + migrateEventsToClickhouse: (options: ClickhouseMigrationRequest) => Promise, + }, [stackAdminApp]); + + const [minCreatedAt, setMinCreatedAt] = React.useState(""); + const [maxCreatedAt, setMaxCreatedAt] = React.useState(""); + const [limit, setLimit] = React.useState(1000); + const [stats, setStats] = React.useState(null); + const [cursor, setCursor] = React.useState(null); + const [running, setRunning] = React.useState(false); + const runningRef = React.useRef(false); + const cursorRef = React.useRef(null); + const timeWindowRef = React.useRef<{ minCreatedAtMillis: number, maxCreatedAtMillis: number } | null>(null); + const [error, setError] = React.useState(null); + + const parseCreatedAtMillis = React.useCallback((value: string | undefined) => { + if (!value) return null; + const trimmed = value.trim(); + if (!trimmed) return null; + if (/^-?\d+$/.test(trimmed)) { + const parsed = Number(trimmed); + return Number.isFinite(parsed) ? parsed : null; + } + const parsed = new Date(trimmed).getTime(); + return Number.isNaN(parsed) ? null : parsed; + }, []); + + const buildRequestBody = React.useCallback(() => { + const safeLimit = Number.isFinite(limit) && limit > 0 ? Math.min(1000, limit) : 1000; + const minCreatedAtMillis = timeWindowRef.current?.minCreatedAtMillis ?? parseCreatedAtMillis(minCreatedAt); + const maxCreatedAtMillis = timeWindowRef.current?.maxCreatedAtMillis ?? parseCreatedAtMillis(maxCreatedAt); + if (minCreatedAtMillis === null || maxCreatedAtMillis === null) { + throw new Error("Please provide valid unix millis (Date.now()) or ISO/datetime-local values for min/max created at."); + } + return { + min_created_at_millis: minCreatedAtMillis, + max_created_at_millis: maxCreatedAtMillis, + cursor: cursorRef.current ? { + created_at_millis: cursorRef.current.createdAtMillis, + id: cursorRef.current.id, + } : undefined, + limit: safeLimit, + }; + }, [limit, maxCreatedAt, minCreatedAt, parseCreatedAtMillis]); + + const runBatch = React.useCallback(async () => { + const response = await adminInterface.migrateEventsToClickhouse(buildRequestBody()); + const snapshot = normalizeResponse(response); + setStats(snapshot); + cursorRef.current = snapshot.nextCursor; + setCursor(snapshot.nextCursor); + return snapshot; + }, [adminInterface, buildRequestBody]); + + const stopMigration = React.useCallback(() => { + runningRef.current = false; + setRunning(false); + }, []); + + const resetMigration = React.useCallback(() => { + stopMigration(); + cursorRef.current = null; + timeWindowRef.current = null; + setCursor(null); + setStats(null); + setError(null); + }, [stopMigration]); + + const startMigration = React.useCallback(async () => { + if (runningRef.current) return; + const minCreatedAtMillis = parseCreatedAtMillis(minCreatedAt); + const maxCreatedAtMillis = parseCreatedAtMillis(maxCreatedAt); + if (minCreatedAtMillis === null || maxCreatedAtMillis === null) { + setError("Please provide valid unix millis (Date.now()) or ISO/datetime-local values for min/max created at."); + return; + } + if (minCreatedAtMillis >= maxCreatedAtMillis) { + setError("Min created at must be before max created at."); + return; + } + setError(null); + timeWindowRef.current = { minCreatedAtMillis, maxCreatedAtMillis }; + runningRef.current = true; + setRunning(true); + + try { + while (runningRef.current) { + const snapshot = await runBatch(); + if (!snapshot.nextCursor) { + stopMigration(); + break; + } + } + } catch (e: any) { + setError(e?.message ?? "Migration failed"); + stopMigration(); + } + }, [maxCreatedAt, minCreatedAt, parseCreatedAtMillis, runBatch, stopMigration]); + + const progressPercent = Math.min(100, Math.max(0, Math.round((stats?.progress ?? 0) * 100))); + + if (stackAdminApp.projectId !== "internal") { + return notFound(); + } + + return ( + +
+ + + Controls + + +
+
+ Min created at (unix millis or ISO/datetime-local) + { + setMinCreatedAt(e.target.value); + resetMigration(); + }} + placeholder="1735689600000 or 2024-08-01T00:00" + /> +
+
+ Max created at (use to exclude new dual-written events) + { + setMaxCreatedAt(e.target.value); + resetMigration(); + }} + placeholder="1767225600000 or 2024-12-01T00:00" + /> +
+
+ Batch size + { + setLimit(Number(e.target.value) || 0); + resetMigration(); + }} + /> +
+
+ Cursor + + {cursor ? `${cursor.createdAtMillis} ยท ${cursor.id}` : "Not started"} + +
+
+ + {error && ( + {error} + )} + +
+ + + +
+
+
+ + + + Status + + +
+
+
+
+ Progress + {progressPercent}% +
+
+
+ Processed + {stats?.processedEvents ?? 0} +
+
+ Remaining + {stats?.remainingEvents ?? 0} +
+
+ Migrated this run + {stats?.migratedEvents ?? 0} +
+
+ Inserted rows + {stats?.insertedRows ?? 0} +
+
+ Total in scope + {stats?.totalEvents ?? 0} +
+
+ State + {running ? "Running" : "Idle"} +
+
+ + +
+ + ); +} diff --git a/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/clickhouse-migration/page.tsx b/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/clickhouse-migration/page.tsx new file mode 100644 index 0000000000..0c48ee3379 --- /dev/null +++ b/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/clickhouse-migration/page.tsx @@ -0,0 +1,9 @@ +import PageClient from "./page-client"; + +export const metadata = { + title: "ClickHouse Event Migration", +}; + +export default function Page() { + return ; +} diff --git a/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/query-analytics/monaco-clickhouse.ts b/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/query-analytics/monaco-clickhouse.ts new file mode 100644 index 0000000000..ff68f57e2f --- /dev/null +++ b/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/query-analytics/monaco-clickhouse.ts @@ -0,0 +1,1376 @@ +import type { Monaco } from "@monaco-editor/react"; + +type LanguageConfiguration = Parameters[1]; +type MonarchLanguage = Parameters[1]; + +const aggregateFunctions = [ + 'count', + 'min', + 'max', + 'sum', + 'avg', + 'any', + 'stddevPop', + 'stddevSamp', + 'varPop', + 'varSamp', + 'covarPop', + 'covarSamp', + 'anyHeavy', + 'anyLast', + 'argMin', + 'argMax', + 'avgWeighted', + 'topK', + 'topKWeighted', + 'groupArray', + 'groupUniqArray', + 'groupArrayInsertAt', + 'groupArrayMovingAvg', + 'groupArrayMovingSum', + 'groupBitAnd', + 'groupBitOr', + 'groupBitXor', + 'groupBitmap', + 'groupBitmapAnd', + 'groupBitmapOr', + 'groupBitmapXor', + 'sumWithOverflow', + 'sumMap', + 'minMap', + 'maxMap', + 'skewSamp', + 'skewPop', + 'kurtSamp', + 'kurtPop', + 'uniq', + 'uniqExact', + 'uniqCombined', + 'uniqCombined64', + 'uniqHLL12', + 'quantile', + 'quantiles', + 'quantileExact', + 'quantileExactLow', + 'quantileExactHigh', + 'quantileExactWeighted', + 'quantileTiming', + 'quantileTimingWeighted', + 'quantileDeterministic', + 'quantileTDigest', + 'quantileTDigestWeighted', + 'quantileBFloat16', + 'quantileBFloat16Weighted', + 'simpleLinearRegression', + 'stochasticLinearRegression', + 'stochasticLogisticRegression', + 'categoricalInformationValue', +].reduce((acc, cur) => { + acc.push(cur); + // Aggregate Function Combinators + [ + 'If', + 'Array', + 'Map', + 'SimpleState', + 'State', + 'Merge', + 'MergeState', + 'ForEach', + 'Distinct', + 'OrDefault', + 'OrNull', + 'Resample', + ].forEach((suffix) => { + acc.push(`${cur}${suffix}`); + }); + return acc; +}, []); + +export const clickhouseTables = { + users: [ + "id", + "display_name", + "profile_image_url", + "primary_email", + "primary_email_verified", + "signed_up_at", + "client_metadata", + "client_read_only_metadata", + "server_metadata", + "is_anonymous", + "project_id", + "branch_id", + "sequence_id", + "is_deleted", + ], + events: [ + "event_type", + "event_at", + "data", + "project_id", + "branch_id", + "user_id", + "team_id", + "created_at", + ], +} as const; + +export const conf: LanguageConfiguration = { + comments: { + lineComment: '--', + blockComment: ['/*', '*/'], + }, + brackets: [ + ['{', '}'], + ['[', ']'], + ['(', ')'], + ], + autoClosingPairs: [ + { open: '{', close: '}' }, + { open: '[', close: ']' }, + { open: '(', close: ')' }, + { open: '"', close: '"' }, + { open: "'", close: "'" }, + ], + surroundingPairs: [ + { open: '{', close: '}' }, + { open: '[', close: ']' }, + { open: '(', close: ')' }, + { open: '"', close: '"' }, + { open: "'", close: "'" }, + ], +}; + +export const language: MonarchLanguage = { + defaultToken: '', + tokenPostfix: '.sql', + ignoreCase: true, + + brackets: [ + { open: '[', close: ']', token: 'delimiter.square' }, + { open: '(', close: ')', token: 'delimiter.parenthesis' }, + ], + keywords: [ + 'AES128', + 'AES256', + 'ALL', + 'ALLOWOVERWRITE', + 'ANALYSE', + 'ANALYZE', + 'AND', + 'ANY', + 'ARRAY', + 'AS', + 'ASC', + 'AUTHORIZATION', + 'AZ64', + 'BACKUP', + 'BETWEEN', + 'BINARY', + 'BLANKSASNULL', + 'BOTH', + 'BYTEDICT', + 'BZIP2', + 'CASE', + 'CAST', + 'CHECK', + 'CLUSTER', + 'COLLATE', + 'COLUMN', + 'CONSTRAINT', + 'CREATE', + 'CREDENTIALS', + 'CROSS', + 'CURRENT_DATE', + 'CURRENT_TIME', + 'CURRENT_TIMESTAMP', + 'CURRENT_USER', + 'CURRENT_USER_ID', + 'DEFAULT', + 'DEFERRABLE', + 'DEFLATE', + 'DEFRAG', + 'DELTA', + 'DELTA32K', + 'DESC', + 'DISABLE', + 'DISTINCT', + 'DO', + 'ELSE', + 'EMPTYASNULL', + 'ENABLE', + 'ENCODE', + 'ENCRYPT', + 'ENCRYPTION', + 'END', + 'ENGINE', + 'EXCEPT', + 'EXPLICIT', + 'FALSE', + 'FOR', + 'FOREIGN', + 'FREEZE', + 'FROM', + 'FULL', + 'GLOBALDICT256', + 'GLOBALDICT64K', + 'GRANT', + 'GROUP', + 'GZIP', + 'HAVING', + 'IDENTITY', + 'IGNORE', + 'ILIKE', + 'IN', + 'INITIALLY', + 'INNER', + 'INTERSECT', + 'INTO', + 'IS', + 'ISNULL', + 'JOIN', + 'LANGUAGE', + 'LEADING', + 'LEFT', + 'LIKE', + 'LIMIT', + 'LOCALTIME', + 'LOCALTIMESTAMP', + 'LUN', + 'LUNS', + 'LZO', + 'LZOP', + 'MINUS', + 'MOSTLY16', + 'MOSTLY32', + 'MOSTLY8', + 'NATURAL', + 'NEW', + 'NOT', + 'NOTNULL', + 'NULL', + 'NULLS', + 'OFF', + 'OFFLINE', + 'OFFSET', + 'OID', + 'OLD', + 'ON', + 'ONLY', + 'OPEN', + 'OR', + 'ORDER', + 'OUTER', + 'OVERLAPS', + 'PARALLEL', + 'PARTITION', + 'PERCENT', + 'PERMISSIONS', + 'PLACING', + 'PRIMARY', + 'RAW', + 'READRATIO', + 'RECOVER', + 'REFERENCES', + 'RESPECT', + 'REJECTLOG', + 'RESORT', + 'RESTORE', + 'RIGHT', + 'SELECT', + 'SESSION_USER', + 'SIMILAR', + 'SNAPSHOT', + 'SOME', + 'SYSDATE', + 'SYSTEM', + 'TABLE', + 'TAG', + 'TDES', + 'TEXT255', + 'TEXT32K', + 'THEN', + 'TIMESTAMP', + 'TO', + 'TOP', + 'TRAILING', + 'TRUE', + 'TRUNCATECOLUMNS', + 'UNION', + 'UNIQUE', + 'USER', + 'USING', + 'VERBOSE', + 'WALLET', + 'WHEN', + 'WHERE', + 'WITH', + 'WITHOUT', + ], + operators: [ + 'AND', + 'BETWEEN', + 'IN', + 'LIKE', + 'NOT', + 'OR', + 'IS', + 'NULL', + 'INTERSECT', + 'UNION', + 'INNER', + 'JOIN', + 'LEFT', + 'OUTER', + 'RIGHT', + 'GLOBAL', + ], + builtinFunctions: [ + // arithmetic + 'plus', + 'minus', + 'multiply', + 'divide', + 'intDiv', + 'intDivOrZero', + 'modulo', + 'moduloOrZero', + 'negate', + 'abs', + 'gcd', + 'lcm', + 'max2', + 'min2', + // arrays + 'empty', // also in strings and uuids + 'notEmpty', // also in strings and uuids + 'length', + 'emptyArrayUInt8', + 'emptyArrayUInt16', + 'emptyArrayUInt32', + 'emptyArrayUInt64', + 'emptyArrayInt8', + 'emptyArrayInt16', + 'emptyArrayInt32', + 'emptyArrayInt64', + 'emptyArrayFloat32', + 'emptyArrayFloat64', + 'emptyArrayDate', + 'emptyArrayDateTime', + 'emptyArrayString', + 'emptyArrayToSingle', + 'range', + 'array', + 'arrayConcat', + 'arrayElement', + 'has', + 'hasAll', + 'hasAny', + 'hasSubstr', + 'indexOf', + 'arrayCount', + 'countEqual', + 'arrayEnumerate', + 'arrayEnumerateUniq', + 'arrayPopBack', + 'arrayPopFront', + 'arrayPushBack', + 'arrayPushFront', + 'arrayResize', + 'arraySlice', + 'arraySort', + 'arrayReverseSort', + 'arrayUniq', + 'arrayJoin', + 'arrayDifference', + 'arrayDistinct', + 'arrayEnumerateDense', + 'arrayIntersect', + 'arrayReduce', + 'arrayReduceInRanges', + 'arrayReverse', + 'reverse', + 'arrayFlatten', + 'arrayCompact', + 'arrayZip', + 'arrayAUC', + 'arrayMap', + 'arrayFilter', + 'arrayFill', + 'arrayReverseFill', + 'arraySplit', + 'arrayReverseSplit', + 'arrayExists', + 'arrayAll', + 'arrayFirst', + 'arrayFirstOrNull', + 'arrayLast', + 'arrayLastOrNull', + 'arrayFirstIndex', + 'arrayLastIndex', + 'arrayMin', + 'arrayMax', + 'arraySum', + 'arrayAvg', + 'arrayCumSum', + 'arrayCumSumNonNegative', + 'arrayProduct', + // comparison + 'equals', + 'notEquals', + 'less', + 'greater', + 'lessOrEquals', + 'greaterOrEquals', + // logical + 'and', + 'or', + 'not', + 'xor', + // type conversion + 'toInt8', + 'toInt16', + 'toInt32', + 'toInt64', + 'toInt128', + 'toInt256', + 'toInt8OrZero', + 'toInt16OrZero', + 'toInt32OrZero', + 'toInt64OrZero', + 'toInt128OrZero', + 'toInt256OrZero', + 'toInt8OrNull', + 'toInt16OrNull', + 'toInt32OrNull', + 'toInt64OrNull', + 'toInt128OrNull', + 'toInt256OrNull', + 'toInt8OrDefault', + 'toInt16OrDefault', + 'toInt32OrDefault', + 'toInt64OrDefault', + 'toInt128OrDefault', + 'toInt256OrDefault', + 'toUInt8', + 'toUInt16', + 'toUInt32', + 'toUInt64', + 'toUInt128', + 'toUInt256', + 'toUInt8OrZero', + 'toUInt16OrZero', + 'toUInt32OrZero', + 'toUInt64OrZero', + 'toUInt128OrZero', + 'toUInt256OrZero', + 'toUInt8OrNull', + 'toUInt16OrNull', + 'toUInt32OrNull', + 'toUInt64OrNull', + 'toUInt128OrNull', + 'toUInt256OrNull', + 'toUInt8OrDefault', + 'toUInt16OrDefault', + 'toUInt32OrDefault', + 'toUInt64OrDefault', + 'toUInt128OrDefault', + 'toUInt256OrDefault', + 'toFloat32', + 'toFloat64', + 'toFloat32OrZero', + 'toFloat64OrZero', + 'toFloat32OrNull', + 'toFloat64OrNull', + 'toFloat32OrDefault', + 'toFloat64OrDefault', + 'toDate', + 'toDateOrZero', + 'toDateOrNull', + 'toDateOrDefault', + 'toDateTime', + 'toDateTimeOrZero', + 'toDateTimeOrNull', + 'toDateTimeOrDefault', + 'toDate32', + 'toDate32OrZero', + 'toDate32OrNull', + 'toDate32OrDefault', + 'toDecimal32', + 'toDecimal64', + 'toDecimal128', + 'toDecimal256', + 'toDecimal32OrNull', + 'toDecimal64OrNull', + 'toDecimal128OrNull', + 'toDecimal256OrNull', + 'toDecimal32OrDefault', + 'toDecimal64OrDefault', + 'toDecimal128OrDefault', + 'toDecimal256OrDefault', + 'toDecimal32OrZero', + 'toDecimal64OrZero', + 'toDecimal128OrZero', + 'toDecimal256OrZero', + 'toString', + 'toFixedString', + 'toStringCutToZero', + 'reinterpretAsUInt8', + 'reinterpretAsUInt16', + 'reinterpretAsUInt32', + 'reinterpretAsUInt64', + 'reinterpretAsInt8', + 'reinterpretAsInt16', + 'reinterpretAsInt32', + 'reinterpretAsInt64', + 'reinterpretAsFloat32', + 'reinterpretAsFloat64', + 'reinterpretAsDate', + 'reinterpretAsDateTime', + 'reinterpretAsString', + 'reinterpretAsFixedString', + 'reinterpretAsUUID', + 'reinterpret', + 'CAST', + 'accurateCast', + 'accurateCastOrNull', + 'accurateCastOrDefault', + 'toInterval', + 'parseDateTimeBestEffort', + 'parseDateTime32BestEffort', + 'parseDateTimeBestEffortUS', + 'parseDateTimeBestEffortOrNull', + 'parseDateTime32BestEffortOrNull', + 'parseDateTimeBestEffortOrZero', + 'parseDateTime32BestEffortOrZero', + 'parseDateTimeBestEffortUSOrNull', + 'parseDateTimeBestEffortUSOrZero', + 'parseDateTime64BestEffort', + 'parseDateTime64BestEffortOrNull', + 'parseDateTime64BestEffortOrZero', + 'toLowCardinality', + 'toUnixTimestamp64Milli', + 'toUnixTimestamp64Micro', + 'toUnixTimestamp64Nano', + 'fromUnixTimestamp64Milli', + 'fromUnixTimestamp64Micro', + 'fromUnixTimestamp64Nano', + 'formatRow', + 'formatRowNoNewline', + 'snowflakeToDateTime', + 'snowflakeToDateTime64', + 'dateTimeToSnowflake', + 'dateTime64ToSnowflake', + // dates and times + 'timeZone', + 'toTimeZone', + 'timeZoneOf', + 'timeZoneOffset', + 'toYear', + 'toQuarter', + 'toMonth', + 'toDayOfYear', + 'toDayOfMonth', + 'toDayOfWeek', + 'toHour', + 'toMinute', + 'toSecond', + 'toUnixTimestamp', + 'toStartOfYear', + 'toStartOfISOYear', + 'toStartOfQuarter', + 'toStartOfMonth', + 'toMonday', + 'toStartOfWeek', + 'toStartOfDay', + 'toStartOfHour', + 'toStartOfMinute', + 'toStartOfSecond', + 'toStartOfFiveMinute', + 'toStartOfTenMinutes', + 'toStartOfFifteenMinutes', + 'toStartOfInterval', + 'toTime', + 'toRelativeYearNum', + 'toRelativeQuarterNum', + 'toRelativeMonthNum', + 'toRelativeWeekNum', + 'toRelativeDayNum', + 'toRelativeHourNum', + 'toRelativeMinuteNum', + 'toRelativeSecondNum', + 'toISOYear', + 'toISOWeek', + 'toWeek', + 'toYearWeek', + 'date_trunc', + 'date_add', + 'date_diff', + 'date_sub', + 'timestamp_add', + 'timestamp_sub', + 'now', + 'today', + 'yesterday', + 'timeSlot', + 'toYYYYMM', + 'toYYYYMMDD', + 'toYYYYMMDDhhmmss', + 'addYears', + 'addMonths', + 'addWeeks', + 'addDays', + 'addHours', + 'addMinutes', + 'addSeconds', + 'addQuarters', + 'subtractYears', + 'subtractMonths', + 'subtractWeeks', + 'subtractDays', + 'subtractHours', + 'subtractMinutes', + 'subtractSeconds', + 'subtractQuarters', + 'timeSlots', + 'formatDateTime', + 'dateName', + 'FROM_UNIXTIME', + 'toModifiedJulianDay', + 'toModifiedJulianDayOrNull', + 'fromModifiedJulianDay', + 'fromModifiedJulianDayOrNull', + // strings + // 'empty', // also in arrays and uuids + // 'notEmpty', // also in arrays and uuids + 'length', + 'lengthUTF8', + 'char_length', + 'CHAR_LENGTH', + 'character_length', + 'CHARACTER_LENGTH', + 'leftPad', + 'leftPadUTF8', + 'rightPad', + 'rightPadUTF8', + 'lower', + 'lcase', + 'upper', + 'ucase', + 'lowerUTF8', + 'upperUTF8', + 'isValidUTF8', + 'toValidUTF8', + 'repeat', + 'reverse', + 'reverseUTF8', + 'format', + 'concat', + 'concatAssumeInjective', + 'substring', + 'mid', + 'substr', + 'substringUTF8', + 'appendTrailingCharIfAbsent', + 'convertCharset', + 'base64Encode', + 'base64Decode', + 'tryBase64Decode', + 'endsWith', + 'startsWith', + 'trim', + 'trimLeft', + 'trimRight', + 'trimBoth', + 'CRC32', + 'CRC32IEEE', + 'CRC64', + 'normalizeQuery', + 'normalizedQueryHash', + 'normalizeUTF8NFC', + 'normalizeUTF8NFD', + 'normalizeUTF8NFKC', + 'normalizeUTF8NFKD', + 'encodeXMLComponent', + 'decodeXMLComponent', + 'extractTextFromHTML', + // for searching in strings + 'position', + 'locate', + 'positionCaseInsensitive', + 'positionUTF8', + 'positionCaseInsensitiveUTF8', + 'multiSearchAllPositions', + 'multiSearchAllPositionsUTF8', + 'multiSearchFirstPosition', + 'multiSearchFirstIndex', + 'multiSearchAny', + 'match', + 'multiMatchAny', + 'multiMatchAnyIndex', + 'multiMatchAllIndices', + 'multiFuzzyMatchAny', + 'multiFuzzyMatchAnyIndex', + 'multiFuzzyMatchAllIndices', + 'extract', + 'extractAll', + 'extractAllGroupsHorizontal', + 'extractAllGroupsVertical', + 'like', + 'notLike', + 'ilike', + 'ngramDistance', + 'ngramSearch', + 'countSubstrings', + 'countSubstringsCaseInsensitive', + 'countSubstringsCaseInsensitiveUTF8', + 'countMatches', + // for replacing in strings + 'replaceOne', + 'replaceAll', + 'replace', + 'replaceRegexpOne', + 'replaceRegexpAll', + 'regexpQuoteMeta', + // conditional + 'if', + 'multiIf', + // functions for working with files + 'file', + // mathematical + 'e', + 'pi', + 'exp', + 'log', + 'ln', + 'exp2', + 'log2', + 'exp10', + 'log10', + 'sqrt', + 'cbrt', + 'erf', + 'erfc', + 'lgamma', + 'tgamma', + 'sin', + 'cos', + 'tan', + 'asin', + 'acos', + 'atan', + 'pow', + 'power', + 'intExp2', + 'intExp10', + 'cosh', + 'acosh', + 'sinh', + 'asinh', + 'atanh', + 'atan2', + 'hypot', + 'log1p', + 'sign', + 'degrees', + 'radians', + // rounding + 'floor', + 'ceil', + 'ceiling', + 'trunc', + 'truncate', + 'round', + 'roundBankers', + 'roundToExp2', + 'roundDuration', + 'roundAge', + 'roundDown', + // working with maps + 'map', + 'mapAdd', + 'mapSubstract', + 'mapPopulateSeries', + 'mapContains', + 'mapKeys', + 'mapValues', + 'mapContainsKeyLike', + 'mapExtractKeyLike', + // splitting and merging strings and arrays + 'splitByChar', + 'splitByString', + 'splitByRegexp', + 'splitByWhitespace', + 'splitByNonAlpha', + 'arrayStringConcat', + 'alphaTokens', + 'extractAllGroups', + 'ngrams', + 'tokens', + // bit + 'bitAnd', + 'bitOr', + 'bitXor', + 'bitNot', + 'bitShiftLeft', + 'bitShiftRight', + 'bitRotateLeft', + 'bitRotateRight', + 'bitSlice', + 'bitTest', + 'bitTestAll', + 'bitTestAny', + 'bitCount', + 'bitHammingDistance', + // bitmap + 'bitmapBuild', + 'bitmapToArray', + 'bitmapSubsetInRange', + 'bitmapSubsetLimit', + 'subBitmap', + 'bitmapContains', + 'bitmapHasAny', + 'bitmapHasAll', + 'bitmapCardinality', + 'bitmapMin', + 'bitmapMax', + 'bitmapTransform', + 'bitmapAnd', + 'bitmapOr', + 'bitmapXor', + 'bitmapAndnot', + 'bitmapAndCardinality', + 'bitmapOrCardinality', + 'bitmapXorCardinality', + 'bitmapAndnotCardinality', + // hash + 'halfMD5', + 'MD4', + 'MD5', + 'sipHash64', + 'sipHash128', + 'cityHash64', + 'intHash32', + 'intHash64', + 'SHA1', + 'SHA224', + 'SHA256', + 'SHA512', + 'BLAKE3', + 'URLHash', + 'farmFingerprint64', + 'farmHash64', + 'javaHash', + 'javaHashUTF16LE', + 'hiveHash', + 'metroHash64', + 'jumpConsistentHash', + 'murmurHash2_32', + 'murmurHash2_64', + 'gccMurmurHash', + 'murmurHash3_32', + 'murmurHash3_64', + 'murmurHash3_128', + 'xxHash32', + 'xxHash64', + 'ngramSimHash', + 'ngramSimHashCaseInsensitive', + 'ngramSimHashUTF8', + 'ngramSimHashCaseInsensitiveUTF8', + 'wordShingleSimHash', + 'wordShingleSimHashCaseInsensitive', + 'wordShingleSimHashUTF8', + 'wordShingleSimHashCaseInsensitiveUTF8', + 'ngramMinHash', + 'ngramMinHashCaseInsensitive', + 'ngramMinHashUTF8', + 'ngramMinHashCaseInsensitiveUTF8', + 'ngramMinHashArg', + 'ngramMinHashArgCaseInsensitive', + 'ngramMinHashArgUTF8', + 'ngramMinHashArgCaseInsensitiveUTF8', + 'wordShingleMinHash', + 'wordShingleMinHashCaseInsensitive', + 'wordShingleMinHashUTF8', + 'wordShingleMinHashCaseInsensitiveUTF8', + 'wordShingleMinHashArg', + 'wordShingleMinHashArgCaseInsensitive', + 'wordShingleMinHashArgUTF8', + 'wordShingleMinHashArgCaseInsensitiveUTF8', + // pseudo-random numbers + 'rand', + 'rand32', + 'rand64', + 'randConstant', + 'randomString', + 'randomFixedString', + 'randomPrintableASCII', + 'randomStringUTF8', + 'fuzzBits', + // encoding + 'char', + 'hex', + 'unhex', + 'bin', + 'unbin', + 'UUIDStringToNum', + 'UUIDNumToString', + 'bitmaskToList', + 'bitmaskToArray', + 'bitPositionsToArray', + // uuid + 'generateUUIDv4', + // 'empty', // also in arrays and strings + // 'notEmpty', // also in arrays and strings + 'toUUID', + 'toUUIDOrNull', + 'toUUIDOrZero', + 'UUIDStringToNum', + 'UUIDNumToString', + 'serverUUID', + // urls + 'protocol', + 'domain', + 'domainWithoutWWW', + 'topLevelDomain', + 'firstSignificantSubdomain', + 'cutToFirstSignificantSubdomain', + 'cutToFirstSignificantSubdomainWithWWW', + 'cutToFirstSignificantSubdomainCustom', + 'cutToFirstSignificantSubdomainCustomWithWWW', + 'firstSignificantSubdomainCustom', + 'port', + 'path', + 'pathFull', + 'queryString', + 'fragment', + 'queryStringAndFragment', + 'extractURLParameter', + 'extractURLParameters', + 'extractURLParameterNames', + 'URLHierarchy', + 'URLPathHierarchy', + 'encodeURLComponent', + 'decodeURLComponent', + 'encodeURLFormComponent', + 'decodeURLFormComponent', + 'netloc', + 'cutWWW', + 'cutQueryString', + 'cutFragment', + 'cutQueryStringAndFragment', + 'cutURLParameter', + // ip addresses + 'IPv4NumToString', + 'IPv4StringToNum', + 'IPv4StringToNumOrDefault', + 'IPv4StringToNumOrNull', + 'IPv4NumToStringClassC', + 'IPv6NumToString', + 'IPv6StringToNum', + 'IPv6StringToNumOrDefault', + 'IPv6StringToNumOrNull', + 'IPv4ToIPv6', + 'cutIPv6', + 'IPv4CIDRToRange', + 'IPv6CIDRToRange', + 'toIPv4', + 'toIPv4OrDefault', + 'toIPv4OrNull', + 'toIPv6', + 'IPv6StringToNumOrDefault', + 'IPv6StringToNumOrNull', + 'isIPv4String', + 'isIPv6String', + 'isIPAddressInRange', + // json + 'visitParamHas', + 'visitParamExtractUInt', + 'visitParamExtractInt', + 'visitParamExtractFloat', + 'visitParamExtractBool', + 'visitParamExtractRaw', + 'visitParamExtractString', + 'isValidJSON', + 'JSONHas', + 'JSONLength', + 'JSONType', + 'JSONExtractUInt', + 'JSONExtractInt', + 'JSONExtractFloat', + 'JSONExtractBool', + 'JSONExtractString', + 'JSONExtract', + 'JSONExtractKeysAndValues', + 'JSONExtractKeys', + 'JSONExtractRaw', + 'JSONExtractArrayRaw', + 'JSONExtractKeysAndValuesRaw', + 'JSON_EXISTS', + 'JSON_QUERY', + 'JSON_VALUE', + 'toJSONString', + // external dictonaries + 'dictGet', + 'dictGetOrDefault', + 'dictGetOrNull', + 'dictHas', + 'dictGetHierarchy', + 'dictIsIn', + 'dictGetChildren', + 'dictGetDescendant', + 'dictGetInt8', + 'dictGetInt8OrDefault', + 'dictGetInt16', + 'dictGetInt16OrDefault', + 'dictGetInt32', + 'dictGetInt32OrDefault', + 'dictGetInt64', + 'dictGetInt64OrDefault', + 'dictGetUInt8', + 'dictGetUInt8OrDefault', + 'dictGetUInt16', + 'dictGetUInt16OrDefault', + 'dictGetUInt32', + 'dictGetUInt32OrDefault', + 'dictGetUInt64', + 'dictGetUInt64OrDefault', + 'dictGetFloat32', + 'dictGetFloat32OrDefault', + 'dictGetFloat64', + 'dictGetFloat64OrDefault', + 'dictGetDate', + 'dictGetDateOrDefault', + 'dictGetDateTime', + 'dictGetDateTimeOrDefault', + 'dictGetUUID', + 'dictGetUUIDOrDefault', + 'dictGetString', + 'dictGetStringOrDefault', + // embedded dictionaries + 'regionToCity', + 'regionToArea', + 'regionToDistrict', + 'regionToCountry', + 'regionToContinent', + 'regionToTopContinent', + 'regionToPopulation', + 'regionIn', + 'regionHierarchy', + 'regionToName', + // geo + // geographical coordinates + 'greatCircleDistance', + 'geoDistance', + 'greatCircleAngle', + 'pointInEllipses', + 'pointInPolygon', + // geohash + 'geohashEncode', + 'geohashDecode', + 'geohashesInBox', + // h3 indexes + 'h3IsValid', + 'h3GetResolution', + 'h3EdgeAngle', + 'h3EdgeLengthM', + 'h3EdgeLengthKm', + 'geoToH3', + 'h3ToGeo', + 'h3ToGeoBoundary', + 'h3kRing', + 'h3GetBaseCell', + 'h3HexAreaM2', + 'h3HexAreaKm2', + 'h3IndexesAreNeighbors', + 'h3ToChildren', + 'h3ToParent', + 'h3ToString', + 'stringToH3', + 'h3GetResolution', + 'h3EdgeAngle', + 'h3EdgeLengthM', + 'h3EdgeLengthKm', + 'geoToH3', + 'h3ToGeo', + 'h3ToGeoBoundary', + 'h3kRing', + 'h3GetBaseCell', + 'h3HexAreaM2', + 'h3HexAreaKm2', + 'h3IndexesAreNeighbors', + 'h3ToChildren', + 'h3ToParent', + 'h3ToString', + 'stringToH3', + 'h3GetResolution', + 'h3IsResClassIII', + 'h3IsPentagon', + 'h3GetFaces', + 'h3CellAreaM2', + 'h3CellAreaRads2', + 'h3ToCenterChild', + 'h3ExactEdgeLengthM', + 'h3ExactEdgeLengthKm', + 'h3ExactEdgeLengthRads', + 'h3NumHexagons', + 'h3Line', + 'h3Distance', + 'h3HexRing', + // s2 geometry + 'geoToS2', + 's2ToGeo', + 's2GetNeighbors', + 's2CellsIntersect', + 's2CapContains', + 's2CapUnion', + 's2RectAdd', + 's2RectContains', + 's2RectUinion', + 's2RectIntersection', + // nullable + 'isNull', + 'isNotNull', + 'coalesce', + 'ifNull', + 'nullIf', + 'assumeNotNull', + 'toNullable', + // machine learning + 'evalMLMethod', + 'stochasticLinearRegression', + 'stochasticLogisticRegression', + // introspection + // skipping this as not enabled by default, and is not suggested to + // use as they are slow and impose security concerns + // tuples + 'tuple', + 'tupleElement', + 'untuple', + 'tupleHammingDistance', + 'tupleToNameValuePairs', + 'tuplePlus', + 'tupleMinus', + 'tupleMultiply', + 'tupleDivide', + 'tupleNegate', + 'tupleMultiplyByNumber', + 'tupleDivideByNumber', + 'dotProduct', + 'L1Norm', + 'L2Norm', + 'LinfNorm', + 'LpNorm', + 'L1Distance', + 'L2Distance', + 'LinfDistance', + 'LpDistance', + 'L1Normalize', + 'L2Normalize', + 'LinfNormalize', + 'LpNormalize', + 'cosineDistance', + // encryption + 'encrypt', + 'aes_encrypt_mysql', + 'decrypt', + 'aes_decrypt_mysql', + // other + 'hostName', + 'getMacro', + 'FQDN', + 'basename', + 'visibleWidth', + 'toTypeName', + 'blockSize', + 'byteSize', + 'materialize', + 'ignore', + 'sleep', + 'sleepEachRow', + 'currentDatabase', + 'currentUser', + 'isConstant', + 'isFinite', + 'isInfinite', + 'ifNotFinite', + 'isNaN', + 'hasColumnInTable', + 'bar', + 'transform', + 'formatReadableSize', + 'formatReadableQuantity', + 'formatReadableTimeDelta', + 'least', + 'greatest', + 'uptime', + 'version', + 'buildId', + 'blockNumber', + 'rowNumberInBlock', + 'rowNumberInAllBlocks', + 'neighbor', + 'runningDifference', + 'runningDifferenceStartingWithFirstValue', + 'runningConcurrency', + 'MACNumToString', + 'MACStringToNum', + 'MACStringToOUI', + 'getSizeOfEnumType', + 'blockSerializedSize', + 'toColumnTypeName', + 'dumpColumnStructure', + 'defaultValueOfArgumentType', + 'defaultValueOfTypeName', + 'indexHint', + 'replicate', + 'filesystemAvailable', + 'filesystemFree', + 'filesystemCapacity', + 'initializeAggregation', + 'finalizeAggregation', + 'runningAccumulate', + 'joinGet', + 'modelEvaluate', + 'throwIf', + 'identity', + 'getSetting', + 'isDecimalOverflow', + 'countDigits', + 'errorCodeToName', + 'tcpPort', + 'currentProfiles', + 'enabledProfiles', + 'defaultProfiles', + 'currentRoles', + 'enabledRoles', + 'defaultRoles', + 'getServerPort', + 'queryID', + 'initialQueryID', + 'shardNum', + 'shardCount', + 'getOSKernelVersion', + 'zookeeperSessionUptime', + // time window + 'tumble', + 'hop', + 'tumbleStart', + 'tumbleEnd', + 'hopStart', + 'hopEnd', + // aggregate functions + ...aggregateFunctions, + // aggregate functions - parametric + 'histogram', + 'sequenceMatch', + 'sequenceCount', + 'windowFunnel', + 'retention', + 'uniqUpTo', + 'sumMapFiltered', + 'sequenceNextNode', + // table functions + 'merge', + 'numbers', + 'remote', + 'url', + 'mysql', + 'postgresql', + 'jdbc', + 'odbc', + 'hdfs', + 's3', + 'input', + 'generateRandom', + 'cluster', + 'clusterAllReplicas', + 'view', + 'null', + 'dictionary', + 'hdfsCluster', + 's3Cluster', + 'sqlite', + ], + builtinVariables: [ + // NOT SUPPORTED + ], + pseudoColumns: [ + // NOT SUPPORTED + ], + tokenizer: { + root: [ + { include: '@comments' }, + { include: '@whitespace' }, + { include: '@pseudoColumns' }, + { include: '@numbers' }, + { include: '@strings' }, + { include: '@complexIdentifiers' }, + { include: '@scopes' }, + [/[;,.]/, 'delimiter'], + [/[()]/, '@brackets'], + [ + /[\w@#$]+/, + { + cases: { + '@operators': 'operator', + '@builtinVariables': 'predefined', + '@builtinFunctions': 'predefined', + '@keywords': 'keyword', + '@default': 'identifier', + }, + }, + ], + [/[<>=!%&+\-*/|~^]/, 'operator'], + ], + whitespace: [[/\s+/, 'white']], + comments: [ + [/--+.*/, 'comment'], + [/\/\*/, { token: 'comment.quote', next: '@comment' }], + ], + comment: [ + [/[^*/]+/, 'comment'], + // Not supporting nested comments, as nested comments seem to not be standard? + // i.e. http://stackoverflow.com/questions/728172/are-there-multiline-comment-delimiters-in-sql-that-are-vendor-agnostic + // [/\/\*/, { token: 'comment.quote', next: '@push' }], // nested comment not allowed :-( + [/\*\//, { token: 'comment.quote', next: '@pop' }], + [/./, 'comment'], + ], + pseudoColumns: [ + [ + /[$][A-Za-z_][\w@#$]*/, + { + cases: { + '@pseudoColumns': 'predefined', + '@default': 'identifier', + }, + }, + ], + ], + numbers: [ + [/0[xX][0-9a-fA-F]*/, 'number'], + [/[$][+-]*\d*(\.\d*)?/, 'number'], + [/((\d+(\.\d*)?)|(\.\d+))([eE][-+]?\d+)?/, 'number'], + ], + strings: [[/'/, { token: 'string', next: '@string' }]], + string: [ + [/[^']+/, 'string'], + [/''/, 'string'], + [/'/, { token: 'string', next: '@pop' }], + ], + complexIdentifiers: [ + [/"/, { token: 'identifier.quote', next: '@quotedIdentifier' }], + ], + quotedIdentifier: [ + [/[^"]+/, 'identifier'], + [/""/, 'identifier'], + [/"/, { token: 'identifier.quote', next: '@pop' }], + ], + scopes: [ + // NOT SUPPORTED + ], + }, +}; + +export const clickhouseKeywords = language.keywords as string[]; diff --git a/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/query-analytics/page-client.tsx b/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/query-analytics/page-client.tsx new file mode 100644 index 0000000000..3465a91676 --- /dev/null +++ b/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/query-analytics/page-client.tsx @@ -0,0 +1,215 @@ +"use client"; + +import Editor from "@monaco-editor/react"; +import type { Monaco } from "@monaco-editor/react"; +import React, { useEffect, useMemo, useRef } from "react"; +import { Alert, Button, Textarea, Typography } from "@/components/ui"; +import { PageLayout } from "../page-layout"; +import { useAdminApp } from "../use-admin-app"; +import { clickhouseKeywords, clickhouseTables, conf, language } from "./monaco-clickhouse"; + +const CLICKHOUSE_LANGUAGE_ID = "clickhouse-sql"; + +type Disposable = { dispose: () => void }; +type CompletionItem = Parameters[1]["provideCompletionItems"] extends ( + ...args: any +) => infer R + ? R extends { suggestions: Array } + ? U + : never + : never; + +export default function PageClient() { + const adminApp = useAdminApp(); + const [query, setQuery] = React.useState("SELECT 1 AS value;"); + const [resultText, setResultText] = React.useState(""); + const [error, setError] = React.useState(null); + const [loading, setLoading] = React.useState(false); + const disposables = useRef([]); + const queryRef = useRef(query); + + const tableColumnSuggestions = useMemo(() => { + return Object.entries(clickhouseTables).flatMap(([table, columns]) => + columns.map((column) => ({ table, column })), + ); + }, []); + + useEffect(() => { + const disposablesToDispose = disposables.current; + return () => { + disposablesToDispose.forEach((d) => d.dispose()); + }; + }, []); + + const runQuery = () => { + const currentQuery = queryRef.current.trim(); + if (!currentQuery) { + return; + } + + const execute = async () => { + setLoading(true); + setError(null); + try { + const response = await adminApp.queryAnalytics({ + query: currentQuery, + include_all_branches: false, + }); + setResultText(JSON.stringify(response.result, null, 2)); + } catch (e: any) { + setError(e?.message ?? "Failed to run analytics query."); + setResultText(""); + } finally { + setLoading(false); + } + }; + + // eslint-disable-next-line @typescript-eslint/no-floating-promises + void execute(); + }; + + const handleEditorMount: Parameters[0]["onMount"] = (instance, monaco: Monaco) => { + if (!monaco.languages.getLanguages().some((lang) => lang.id === CLICKHOUSE_LANGUAGE_ID)) { + monaco.languages.register({ id: CLICKHOUSE_LANGUAGE_ID }); + monaco.languages.setLanguageConfiguration(CLICKHOUSE_LANGUAGE_ID, conf); + monaco.languages.setMonarchTokensProvider(CLICKHOUSE_LANGUAGE_ID, language); + } + + disposables.current.push( + monaco.languages.registerCompletionItemProvider(CLICKHOUSE_LANGUAGE_ID, { + triggerCharacters: [".", " "], + provideCompletionItems: (model, position) => { + const word = model.getWordUntilPosition(position); + const range = { + startLineNumber: position.lineNumber, + endLineNumber: position.lineNumber, + startColumn: word.startColumn, + endColumn: word.endColumn, + }; + + const linePrefix = model.getValueInRange({ + startLineNumber: position.lineNumber, + startColumn: 1, + endLineNumber: position.lineNumber, + endColumn: position.column, + }); + + const tableMatch = /([a-zA-Z_][\w]*)\.\s*$/.exec(linePrefix); + const suggestions: CompletionItem[] = []; + + if (tableMatch) { + const tableName = tableMatch[1].toLowerCase(); + const columns = (clickhouseTables as Record)[tableName]; + if (columns) { + columns.forEach((column) => { + suggestions.push({ + label: column, + kind: monaco.languages.CompletionItemKind.Field, + insertText: column, + range, + detail: `${tableName}.${column}`, + }); + }); + } + } else { + Object.keys(clickhouseTables).forEach((table) => { + suggestions.push({ + label: table, + kind: monaco.languages.CompletionItemKind.Class, + insertText: table, + range, + detail: "Table", + }); + }); + + tableColumnSuggestions.forEach(({ table, column }) => { + suggestions.push({ + label: `${table}.${column}`, + kind: monaco.languages.CompletionItemKind.Field, + insertText: `${table}.${column}`, + range, + }); + }); + + clickhouseKeywords.forEach((keyword) => { + suggestions.push({ + label: keyword, + kind: monaco.languages.CompletionItemKind.Keyword, + insertText: keyword, + range, + }); + }); + } + + return { suggestions }; + }, + }), + ); + + const model = instance.getModel(); + if (model) { + monaco.editor.setModelLanguage(model, CLICKHOUSE_LANGUAGE_ID); + } + + instance.addCommand(monaco.KeyMod.CtrlCmd | monaco.KeyCode.Enter, () => { + void runQuery(); + }); + }; + + return ( + +
+ +
+ + {error && {error}} + +
+
+
+ { + const next = value ?? ""; + setQuery(next); + queryRef.current = next; + }} + onMount={handleEditorMount} + options={{ + minimap: { enabled: false }, + fontSize: 14, + scrollBeyondLastLine: false, + wordWrap: "on", + fixedOverflowWidgets: true, + padding: { top: 10, bottom: 10 }, + }} + theme="vs-dark" + /> +
+
+
+