From 677dda80b90549e06ddf06ca1a5e3504f0adf1c4 Mon Sep 17 00:00:00 2001 From: XuJiacheng Date: Sat, 14 Mar 2026 17:33:19 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=E6=B7=BB=E5=8A=A0=20Kafka=20=E6=B6=88?= =?UTF-8?q?=E8=B4=B9=E8=80=85=E5=92=8C=E6=B6=88=E6=81=AF=E5=A4=84=E7=90=86?= =?UTF-8?q?=E5=8A=9F=E8=83=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - 新增 Kafka 消费者实现,支持消息处理和错误处理。 - 实现 OffsetTracker 类,用于跟踪消息偏移量。 - 新增消息解析和数据库插入逻辑,支持从 Kafka 消息构建数据库行。 - 实现 UDP 数据包解析功能,支持不同类型的 UDP 消息。 - 新增 Redis 错误队列处理,支持错误重试机制。 - 实现 Redis 客户端和集成类,支持日志记录和心跳机制。 - 添加 Zod 验证模式,确保 Kafka 消息有效性。 - 新增日志记录和指标收集工具,支持系统监控。 - 添加 UUID 生成工具,支持唯一标识符生成。 - 编写处理器逻辑的单元测试,确保功能正确性。 - 配置 Vite 构建工具,支持 Node.js 环境下的构建。 --- .gitignore | 1 + bls-register-backend/.env | 51 + bls-register-backend/.env.example | 31 + bls-register-backend/AGENTS.md | 18 + bls-register-backend/README.md | 26 + bls-register-backend/dist/index.js | 1017 +++++ bls-register-backend/ecosystem.config.cjs | 22 + bls-register-backend/openspec/AGENTS.md | 456 +++ .../proposal.md | 25 + .../specs/rcu-register/spec.md | 46 + .../tasks.md | 16 + .../proposal.md | 17 + .../specs/onoffline/spec.md | 25 + .../tasks.md | 6 + .../proposal.md | 18 + .../specs/onoffline/spec.md | 13 + .../tasks.md | 5 + .../proposal.md | 11 + .../specs/onoffline/spec.md | 11 + .../tasks.md | 6 + .../proposal.md | 14 + .../specs/onoffline/spec.md | 32 + .../tasks.md | 12 + bls-register-backend/openspec/project.md | 31 + .../openspec/specs/onoffline/spec.md | 103 + .../openspec/specs/onoffline/status.md | 11 + bls-register-backend/out.log | 13 + bls-register-backend/package-lock.json | 3526 +++++++++++++++++ bls-register-backend/package.json | 27 + bls-register-backend/probe.log | 1 + bls-register-backend/scripts/lint.js | 41 + bls-register-backend/scripts/publishProbe.js | 66 + bls-register-backend/scripts/run-30s.ps1 | 7 + .../scripts/verifyProbeInDb.js | 67 + bls-register-backend/scripts/verify_data.js | 36 + bls-register-backend/spec/onoffline-spec.md | 50 + bls-register-backend/src/config/config.js | 64 + .../src/db/databaseManager.js | 242 ++ .../src/db/g5DatabaseManager.js | 121 + bls-register-backend/src/index.js | 271 ++ bls-register-backend/src/kafka/consumer.js | 175 + .../src/kafka/offsetTracker.js | 53 + bls-register-backend/src/processor/index.js | 288 ++ .../src/processor/udpParser.js | 83 + bls-register-backend/src/redis/errorQueue.js | 53 + bls-register-backend/src/redis/redisClient.js | 14 + .../src/redis/redisIntegration.js | 40 + .../src/schema/kafkaPayload.js | 55 + bls-register-backend/src/utils/logger.js | 21 + .../src/utils/metricCollector.js | 43 + bls-register-backend/src/utils/uuid.js | 3 + bls-register-backend/tests/processor.test.js | 54 + bls-register-backend/verify.log | 5 + bls-register-backend/vite.config.js | 12 + docs/project.md | 91 + docs/rcu_info_events_g5.sql | 92 + docs/room_status_moment_g5.sql | 91 + docs/template/bls-onoffline-backend/.env | 51 + .../bls-onoffline-backend/.env.example | 31 + docs/template/bls-onoffline-backend/AGENTS.md | 18 + docs/template/bls-onoffline-backend/README.md | 30 + .../bls-onoffline-backend/dist/index.js | 1086 +++++ .../ecosystem.config.cjs | 22 + .../bls-onoffline-backend/openspec/AGENTS.md | 456 +++ .../proposal.md | 17 + .../specs/onoffline/spec.md | 25 + .../tasks.md | 6 + .../proposal.md | 18 + .../specs/onoffline/spec.md | 13 + .../tasks.md | 5 + .../proposal.md | 11 + .../specs/onoffline/spec.md | 11 + .../tasks.md | 6 + .../proposal.md | 14 + .../specs/onoffline/spec.md | 32 + .../tasks.md | 12 + .../bls-onoffline-backend/openspec/project.md | 31 + .../openspec/specs/onoffline/spec.md | 103 + .../openspec/specs/onoffline/status.md | 11 + .../bls-onoffline-backend/package-lock.json | 3526 +++++++++++++++++ .../bls-onoffline-backend/package.json | 27 + .../bls-onoffline-backend/scripts/lint.js | 41 + .../scripts/verify_data.js | 36 + .../spec/onoffline-spec.md | 50 + .../src/config/config.js | 72 + .../src/db/databaseManager.js | 108 + .../src/db/g5DatabaseManager.js | 121 + .../bls-onoffline-backend/src/index.js | 469 +++ .../src/kafka/consumer.js | 175 + .../src/kafka/offsetTracker.js | 53 + .../src/processor/index.js | 142 + .../src/processor/udpParser.js | 83 + .../src/redis/errorQueue.js | 53 + .../src/redis/redisClient.js | 14 + .../src/redis/redisIntegration.js | 40 + .../src/schema/kafkaPayload.js | 32 + .../bls-onoffline-backend/src/utils/logger.js | 21 + .../src/utils/metricCollector.js | 43 + .../bls-onoffline-backend/src/utils/uuid.js | 3 + .../tests/processor.test.js | 45 + .../bls-onoffline-backend/vite.config.js | 12 + 101 files changed, 14904 insertions(+) create mode 100644 .gitignore create mode 100644 bls-register-backend/.env create mode 100644 bls-register-backend/.env.example create mode 100644 bls-register-backend/AGENTS.md create mode 100644 bls-register-backend/README.md create mode 100644 bls-register-backend/dist/index.js create mode 100644 bls-register-backend/ecosystem.config.cjs create mode 100644 bls-register-backend/openspec/AGENTS.md create mode 100644 bls-register-backend/openspec/changes/2026-03-14-build-rcu-register-consumer/proposal.md create mode 100644 bls-register-backend/openspec/changes/2026-03-14-build-rcu-register-consumer/specs/rcu-register/spec.md create mode 100644 bls-register-backend/openspec/changes/2026-03-14-build-rcu-register-consumer/tasks.md create mode 100644 bls-register-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/proposal.md create mode 100644 bls-register-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/specs/onoffline/spec.md create mode 100644 bls-register-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/tasks.md create mode 100644 bls-register-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/proposal.md create mode 100644 bls-register-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/specs/onoffline/spec.md create mode 100644 bls-register-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/tasks.md create mode 100644 bls-register-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/proposal.md create mode 100644 bls-register-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/specs/onoffline/spec.md create mode 100644 bls-register-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/tasks.md create mode 100644 bls-register-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/proposal.md create mode 100644 bls-register-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/specs/onoffline/spec.md create mode 100644 bls-register-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/tasks.md create mode 100644 bls-register-backend/openspec/project.md create mode 100644 bls-register-backend/openspec/specs/onoffline/spec.md create mode 100644 bls-register-backend/openspec/specs/onoffline/status.md create mode 100644 bls-register-backend/out.log create mode 100644 bls-register-backend/package-lock.json create mode 100644 bls-register-backend/package.json create mode 100644 bls-register-backend/probe.log create mode 100644 bls-register-backend/scripts/lint.js create mode 100644 bls-register-backend/scripts/publishProbe.js create mode 100644 bls-register-backend/scripts/run-30s.ps1 create mode 100644 bls-register-backend/scripts/verifyProbeInDb.js create mode 100644 bls-register-backend/scripts/verify_data.js create mode 100644 bls-register-backend/spec/onoffline-spec.md create mode 100644 bls-register-backend/src/config/config.js create mode 100644 bls-register-backend/src/db/databaseManager.js create mode 100644 bls-register-backend/src/db/g5DatabaseManager.js create mode 100644 bls-register-backend/src/index.js create mode 100644 bls-register-backend/src/kafka/consumer.js create mode 100644 bls-register-backend/src/kafka/offsetTracker.js create mode 100644 bls-register-backend/src/processor/index.js create mode 100644 bls-register-backend/src/processor/udpParser.js create mode 100644 bls-register-backend/src/redis/errorQueue.js create mode 100644 bls-register-backend/src/redis/redisClient.js create mode 100644 bls-register-backend/src/redis/redisIntegration.js create mode 100644 bls-register-backend/src/schema/kafkaPayload.js create mode 100644 bls-register-backend/src/utils/logger.js create mode 100644 bls-register-backend/src/utils/metricCollector.js create mode 100644 bls-register-backend/src/utils/uuid.js create mode 100644 bls-register-backend/tests/processor.test.js create mode 100644 bls-register-backend/verify.log create mode 100644 bls-register-backend/vite.config.js create mode 100644 docs/project.md create mode 100644 docs/rcu_info_events_g5.sql create mode 100644 docs/room_status_moment_g5.sql create mode 100644 docs/template/bls-onoffline-backend/.env create mode 100644 docs/template/bls-onoffline-backend/.env.example create mode 100644 docs/template/bls-onoffline-backend/AGENTS.md create mode 100644 docs/template/bls-onoffline-backend/README.md create mode 100644 docs/template/bls-onoffline-backend/dist/index.js create mode 100644 docs/template/bls-onoffline-backend/ecosystem.config.cjs create mode 100644 docs/template/bls-onoffline-backend/openspec/AGENTS.md create mode 100644 docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/proposal.md create mode 100644 docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/specs/onoffline/spec.md create mode 100644 docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/tasks.md create mode 100644 docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/proposal.md create mode 100644 docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/specs/onoffline/spec.md create mode 100644 docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/tasks.md create mode 100644 docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/proposal.md create mode 100644 docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/specs/onoffline/spec.md create mode 100644 docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/tasks.md create mode 100644 docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/proposal.md create mode 100644 docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/specs/onoffline/spec.md create mode 100644 docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/tasks.md create mode 100644 docs/template/bls-onoffline-backend/openspec/project.md create mode 100644 docs/template/bls-onoffline-backend/openspec/specs/onoffline/spec.md create mode 100644 docs/template/bls-onoffline-backend/openspec/specs/onoffline/status.md create mode 100644 docs/template/bls-onoffline-backend/package-lock.json create mode 100644 docs/template/bls-onoffline-backend/package.json create mode 100644 docs/template/bls-onoffline-backend/scripts/lint.js create mode 100644 docs/template/bls-onoffline-backend/scripts/verify_data.js create mode 100644 docs/template/bls-onoffline-backend/spec/onoffline-spec.md create mode 100644 docs/template/bls-onoffline-backend/src/config/config.js create mode 100644 docs/template/bls-onoffline-backend/src/db/databaseManager.js create mode 100644 docs/template/bls-onoffline-backend/src/db/g5DatabaseManager.js create mode 100644 docs/template/bls-onoffline-backend/src/index.js create mode 100644 docs/template/bls-onoffline-backend/src/kafka/consumer.js create mode 100644 docs/template/bls-onoffline-backend/src/kafka/offsetTracker.js create mode 100644 docs/template/bls-onoffline-backend/src/processor/index.js create mode 100644 docs/template/bls-onoffline-backend/src/processor/udpParser.js create mode 100644 docs/template/bls-onoffline-backend/src/redis/errorQueue.js create mode 100644 docs/template/bls-onoffline-backend/src/redis/redisClient.js create mode 100644 docs/template/bls-onoffline-backend/src/redis/redisIntegration.js create mode 100644 docs/template/bls-onoffline-backend/src/schema/kafkaPayload.js create mode 100644 docs/template/bls-onoffline-backend/src/utils/logger.js create mode 100644 docs/template/bls-onoffline-backend/src/utils/metricCollector.js create mode 100644 docs/template/bls-onoffline-backend/src/utils/uuid.js create mode 100644 docs/template/bls-onoffline-backend/tests/processor.test.js create mode 100644 docs/template/bls-onoffline-backend/vite.config.js diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9e399c6 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +/bls-register-backend/node_modules diff --git a/bls-register-backend/.env b/bls-register-backend/.env new file mode 100644 index 0000000..5ac50c2 --- /dev/null +++ b/bls-register-backend/.env @@ -0,0 +1,51 @@ +KAFKA_BROKERS=kafka.blv-oa.com:9092 +KAFKA_CLIENT_ID=bls-register-producer +KAFKA_GROUP_ID=bls-register-consumer +KAFKA_TOPICS=blwlog4Nodejs-rcu-register-topic +KAFKA_AUTO_COMMIT=false +KAFKA_AUTO_COMMIT_INTERVAL_MS=5000 +KAFKA_SASL_ENABLED=true +KAFKA_SASL_MECHANISM=plain +KAFKA_SASL_USERNAME=blwmomo +KAFKA_SASL_PASSWORD=blwmomo +KAFKA_SSL_ENABLED=false +KAFKA_CONSUMER_INSTANCES=3 +KAFKA_MAX_IN_FLIGHT=5000 +KAFKA_BATCH_SIZE=1000 +KAFKA_BATCH_TIMEOUT_MS=20 +KAFKA_COMMIT_INTERVAL_MS=200 +KAFKA_COMMIT_ON_ATTEMPT=true +KAFKA_FETCH_MAX_BYTES=10485760 +KAFKA_FETCH_MAX_WAIT_MS=100 +KAFKA_FETCH_MIN_BYTES=1 + +POSTGRES_HOST=10.8.8.109 +POSTGRES_PORT=5433 +POSTGRES_DATABASE=log_platform +POSTGRES_USER=log_admin +POSTGRES_PASSWORD=YourActualStrongPasswordForPostgres! +POSTGRES_MAX_CONNECTIONS=6 +POSTGRES_IDLE_TIMEOUT_MS=30000 +DB_SCHEMA=rcu_info +DB_TABLE=rcu_info_events_g5 + +# ========================= +# PostgreSQL 配置 G5库专用 +# ========================= +POSTGRES_HOST_G5=10.8.8.80 +POSTGRES_PORT_G5=5434 +POSTGRES_DATABASE_G5=log_platform +POSTGRES_USER_G5=log_admin +POSTGRES_PASSWORD_G5=H3IkLUt8K!x +POSTGRES_IDLE_TIMEOUT_MS_G5=30000 + +PORT=3001 +LOG_LEVEL=info + +# Redis connection +REDIS_HOST=10.8.8.109 +REDIS_PORT=6379 +REDIS_PASSWORD= +REDIS_DB=15 +REDIS_CONNECT_TIMEOUT_MS=5000 +REDIS_PROJECT_NAME=bls-onoffline diff --git a/bls-register-backend/.env.example b/bls-register-backend/.env.example new file mode 100644 index 0000000..d716cb7 --- /dev/null +++ b/bls-register-backend/.env.example @@ -0,0 +1,31 @@ +# Server Configuration +PORT=3001 +NODE_ENV=development + +# Kafka Configuration +KAFKA_BROKERS=localhost:9092 +KAFKA_TOPIC=blwlog4Nodejs-rcu-onoffline-topic +KAFKA_GROUP_ID=bls-onoffline-group +KAFKA_CLIENT_ID=bls-onoffline-client +KAFKA_CONSUMER_INSTANCES=1 +# KAFKA_SASL_USERNAME= +# KAFKA_SASL_PASSWORD= +# KAFKA_SASL_MECHANISM=plain + +# Database Configuration (PostgreSQL) +DB_HOST=localhost +DB_PORT=5432 +DB_USER=postgres +DB_PASSWORD=password +DB_DATABASE=log_platform +DB_SCHEMA=public +DB_TABLE=onoffline_record +DB_MAX_CONNECTIONS=10 + +# Redis Configuration +REDIS_HOST=localhost +REDIS_PORT=6379 +REDIS_PASSWORD= +REDIS_DB=0 +REDIS_PROJECT_NAME=bls-onoffline +REDIS_API_BASE_URL=http://localhost:3001 diff --git a/bls-register-backend/AGENTS.md b/bls-register-backend/AGENTS.md new file mode 100644 index 0000000..0669699 --- /dev/null +++ b/bls-register-backend/AGENTS.md @@ -0,0 +1,18 @@ + +# OpenSpec Instructions + +These instructions are for AI assistants working in this project. + +Always open `@/openspec/AGENTS.md` when the request: +- Mentions planning or proposals (words like proposal, spec, change, plan) +- Introduces new capabilities, breaking changes, architecture shifts, or big performance/security work +- Sounds ambiguous and you need the authoritative spec before coding + +Use `@/openspec/AGENTS.md` to learn: +- How to create and apply change proposals +- Spec format and conventions +- Project structure and guidelines + +Keep this managed block so 'openspec update' can refresh the instructions. + + \ No newline at end of file diff --git a/bls-register-backend/README.md b/bls-register-backend/README.md new file mode 100644 index 0000000..8925d09 --- /dev/null +++ b/bls-register-backend/README.md @@ -0,0 +1,26 @@ +bls-register-backend + +项目功能 +- 从 Kafka 主题 blwlog4Nodejs-rcu-register-topic 消费 Register 数据。 +- 对字段做类型转换和值域保护(hotel_id 超出 int2 时写 0)。 +- 每 3 秒执行一次批量写库。 +- 双写 G5 库: + - rcu_info.rcu_info_events_g5 全量事件入库。 + - room_status.room_status_moment_g5 仅更新 app_version、launcher_version、config_version、upgrade_ts_ms、register_ts_ms。 + +安装与运行 +- Node.js 22+ +- npm install +- npm run dev + +构建与测试 +- npm run build +- npm run test +- npm run lint + +OpenSpec +- npm run spec:lint +- npm run spec:validate + +环境变量 +- 使用现有 .env(当前仓库已配置可用)。 diff --git a/bls-register-backend/dist/index.js b/bls-register-backend/dist/index.js new file mode 100644 index 0000000..1ed12db --- /dev/null +++ b/bls-register-backend/dist/index.js @@ -0,0 +1,1017 @@ +import cron from "node-cron"; +import dotenv from "dotenv"; +import pg from "pg"; +import kafka from "kafka-node"; +import { z } from "zod"; +dotenv.config(); +const parseNumber = (value, defaultValue) => { + const parsed = Number(value); + return Number.isFinite(parsed) ? parsed : defaultValue; +}; +const parseList = (value) => (value || "").split(",").map((item) => item.trim()).filter(Boolean); +const config = { + env: process.env.NODE_ENV || "development", + port: parseNumber(process.env.PORT, 3001), + kafka: { + brokers: parseList(process.env.KAFKA_BROKERS), + topic: process.env.KAFKA_TOPIC || process.env.KAFKA_TOPICS || "blwlog4Nodejs-rcu-register-topic", + groupId: process.env.KAFKA_GROUP_ID || "bls-register-consumer", + clientId: process.env.KAFKA_CLIENT_ID || "bls-register-consumer-client", + consumerInstances: parseNumber(process.env.KAFKA_CONSUMER_INSTANCES, 1), + maxInFlight: parseNumber(process.env.KAFKA_MAX_IN_FLIGHT, 2e4), + fetchMaxBytes: parseNumber(process.env.KAFKA_FETCH_MAX_BYTES, 50 * 1024 * 1024), + fetchMinBytes: parseNumber(process.env.KAFKA_FETCH_MIN_BYTES, 256 * 1024), + fetchMaxWaitMs: parseNumber(process.env.KAFKA_FETCH_MAX_WAIT_MS, 100), + fromOffset: process.env.KAFKA_FROM_OFFSET || "latest", + autoCommitIntervalMs: parseNumber(process.env.KAFKA_AUTO_COMMIT_INTERVAL_MS, 5e3), + commitIntervalMs: parseNumber(process.env.KAFKA_COMMIT_INTERVAL_MS, 200), + commitOnAttempt: process.env.KAFKA_COMMIT_ON_ATTEMPT === "true", + batchSize: parseNumber(process.env.KAFKA_BATCH_SIZE, 5e3), + batchTimeoutMs: parseNumber(process.env.KAFKA_BATCH_TIMEOUT_MS, 50), + flushIntervalMs: parseNumber(process.env.KAFKA_FLUSH_INTERVAL_MS, 3e3), + logMessages: process.env.KAFKA_LOG_MESSAGES === "true", + sasl: process.env.KAFKA_SASL_USERNAME && process.env.KAFKA_SASL_PASSWORD ? { + mechanism: process.env.KAFKA_SASL_MECHANISM || "plain", + username: process.env.KAFKA_SASL_USERNAME, + password: process.env.KAFKA_SASL_PASSWORD + } : void 0 + }, + db: { + host: process.env.POSTGRES_HOST_G5, + port: parseNumber(process.env.POSTGRES_PORT_G5, 5434), + user: process.env.POSTGRES_USER_G5, + password: process.env.POSTGRES_PASSWORD_G5, + database: process.env.POSTGRES_DATABASE_G5, + max: parseNumber(process.env.POSTGRES_MAX_CONNECTIONS_G5, 6), + ssl: process.env.POSTGRES_SSL_G5 === "true" ? { rejectUnauthorized: false } : void 0, + schema: process.env.DB_SCHEMA || "rcu_info", + table: process.env.DB_TABLE || "rcu_info_events_g5", + roomStatusSchema: process.env.DB_ROOM_STATUS_SCHEMA || "room_status", + roomStatusTable: process.env.DB_ROOM_STATUS_TABLE || "room_status_moment_g5" + }, + redis: { + host: process.env.REDIS_HOST || "localhost", + port: parseNumber(process.env.REDIS_PORT, 6379), + password: process.env.REDIS_PASSWORD || void 0, + db: parseNumber(process.env.REDIS_DB, 0), + projectName: process.env.REDIS_PROJECT_NAME || "bls-onoffline", + apiBaseUrl: process.env.REDIS_API_BASE_URL || `http://localhost:${parseNumber(process.env.PORT, 3001)}` + } +}; +const format = (level, message, context) => { + const payload = { + level, + message, + timestamp: Date.now(), + ...context ? { context } : {} + }; + return JSON.stringify(payload); +}; +const logger = { + info(message, context) { + process.stdout.write(`${format("info", message, context)} +`); + }, + error(message, context) { + process.stderr.write(`${format("error", message, context)} +`); + }, + warn(message, context) { + process.stderr.write(`${format("warn", message, context)} +`); + } +}; +const { Pool } = pg; +const registerColumns = [ + "ts_ms", + "hotel_id", + "room_id", + "device_id", + "write_ts_ms", + "is_send", + "udp_raw", + "extra", + "ip_type", + "model_num", + "server_ip", + "ip", + "subnet_mask", + "gateway", + "dns", + "app_version", + "rcu_time", + "launcher_version", + "mac", + "room_type_id", + "config_version", + "room_status", + "season", + "sys_lock_status", + "authorization_time", + "authorization_days", + "room_num_remark", + "room_type_remark", + "room_remark", + "mcu_name", + "central_control_name", + "configure_hotel_name", + "configure_room_type_name" +]; +const roomStatusColumns = [ + "hotel_id", + "room_id", + "app_version", + "launcher_version", + "config_version", + "upgrade_ts_ms", + "register_ts_ms" +]; +class DatabaseManager { + constructor(dbConfig) { + this.pool = new Pool({ + host: dbConfig.host, + port: dbConfig.port, + user: dbConfig.user, + password: dbConfig.password, + database: dbConfig.database, + max: dbConfig.max, + ssl: dbConfig.ssl + }); + } + async insertRegisterRows({ schema, table, rows }) { + if (!rows || rows.length === 0) { + return; + } + const statement = ` + INSERT INTO ${schema}.${table} (${registerColumns.join(", ")}) + SELECT * + FROM UNNEST( + $1::int8[], + $2::int2[], + $3::text[], + $4::text[], + $5::int8[], + $6::int2[], + $7::text[], + $8::jsonb[], + $9::int2[], + $10::text[], + $11::text[], + $12::text[], + $13::text[], + $14::text[], + $15::text[], + $16::text[], + $17::text[], + $18::text[], + $19::text[], + $20::int8[], + $21::text[], + $22::int4[], + $23::int4[], + $24::int4[], + $25::text[], + $26::text[], + $27::text[], + $28::text[], + $29::text[], + $30::text[], + $31::text[], + $32::text[], + $33::text[] + ) + ON CONFLICT DO NOTHING + `; + try { + const params = registerColumns.map((column) => rows.map((row) => row[column] ?? null)); + await this.pool.query(statement, params); + } catch (error) { + logger.error("Register table insert failed", { + error: error?.message, + schema, + table, + rowsLength: rows.length + }); + throw error; + } + } + async updateRoomStatusRows({ schema, table, rows }) { + if (!rows || rows.length === 0) { + return; + } + const statement = ` + WITH incoming AS ( + SELECT * + FROM UNNEST( + $1::int2[], + $2::text[], + $3::text[], + $4::text[], + $5::text[], + $6::int8[], + $7::int8[] + ) AS u(${roomStatusColumns.join(", ")}) + ), dedup AS ( + SELECT DISTINCT ON (hotel_id, room_id) + hotel_id, + room_id, + app_version, + launcher_version, + config_version, + upgrade_ts_ms, + register_ts_ms + FROM incoming + ORDER BY hotel_id, room_id, register_ts_ms DESC + ), existing AS ( + SELECT i.*, t.device_id + FROM dedup i + INNER JOIN ${schema}.${table} t + ON t.hotel_id = i.hotel_id + AND t.room_id = i.room_id + ) + INSERT INTO ${schema}.${table} ( + hotel_id, + room_id, + device_id, + app_version, + launcher_version, + config_version, + upgrade_ts_ms, + register_ts_ms + ) + SELECT + hotel_id, + room_id, + device_id, + app_version, + launcher_version, + config_version, + upgrade_ts_ms, + register_ts_ms + FROM existing + ON CONFLICT (hotel_id, room_id) DO UPDATE + SET + app_version = EXCLUDED.app_version, + launcher_version = EXCLUDED.launcher_version, + config_version = EXCLUDED.config_version, + upgrade_ts_ms = EXCLUDED.upgrade_ts_ms, + register_ts_ms = EXCLUDED.register_ts_ms + `; + try { + const params = roomStatusColumns.map((column) => rows.map((row) => row[column] ?? null)); + await this.pool.query(statement, params); + } catch (error) { + logger.error("Room status table update failed", { + error: error?.message, + schema, + table, + rowsLength: rows.length + }); + throw error; + } + } + async checkConnection() { + let client; + try { + const connectPromise = this.pool.connect(); + const timeoutPromise = new Promise((_, reject) => { + setTimeout(() => reject(new Error("Connection timeout")), 5e3); + }); + try { + client = await Promise.race([connectPromise, timeoutPromise]); + } catch (raceError) { + connectPromise.then((c) => c.release()).catch(() => { + }); + throw raceError; + } + await client.query("SELECT 1"); + return true; + } catch (err) { + logger.error("Database check connection failed", { error: err.message }); + return false; + } finally { + if (client) { + client.release(); + } + } + } + async close() { + await this.pool.end(); + } +} +const dbManager = new DatabaseManager(config.db); +class OffsetTracker { + constructor() { + this.partitions = /* @__PURE__ */ new Map(); + } + // Called when a message is received (before processing) + add(topic, partition, offset) { + const key = `${topic}-${partition}`; + if (!this.partitions.has(key)) { + this.partitions.set(key, { nextCommitOffset: null, done: /* @__PURE__ */ new Set() }); + } + const state = this.partitions.get(key); + const numericOffset = Number(offset); + if (!Number.isFinite(numericOffset)) return; + if (state.nextCommitOffset === null) { + state.nextCommitOffset = numericOffset; + } else if (numericOffset < state.nextCommitOffset) { + state.nextCommitOffset = numericOffset; + } + } + // Called when a message is successfully processed + // Returns the next offset to commit (if any advancement is possible), or null + markDone(topic, partition, offset) { + const key = `${topic}-${partition}`; + const state = this.partitions.get(key); + if (!state) return null; + const numericOffset = Number(offset); + if (!Number.isFinite(numericOffset)) return null; + state.done.add(numericOffset); + if (state.nextCommitOffset === null) { + state.nextCommitOffset = numericOffset; + } + let advanced = false; + while (state.nextCommitOffset !== null && state.done.has(state.nextCommitOffset)) { + state.done.delete(state.nextCommitOffset); + state.nextCommitOffset += 1; + advanced = true; + } + if (!advanced) return null; + return state.nextCommitOffset; + } + clear() { + this.partitions.clear(); + } +} +const { ConsumerGroup } = kafka; +const createOneConsumer = ({ kafkaConfig, onMessage, onError, instanceIndex }) => { + const kafkaHost = kafkaConfig.brokers.join(","); + const clientId = instanceIndex === 0 ? kafkaConfig.clientId : `${kafkaConfig.clientId}-${instanceIndex}`; + const id = `${clientId}-${process.pid}-${Date.now()}`; + const maxInFlight = Number.isFinite(kafkaConfig.maxInFlight) ? kafkaConfig.maxInFlight : 5e3; + const commitIntervalMs = Number.isFinite(kafkaConfig.commitIntervalMs) ? kafkaConfig.commitIntervalMs : 200; + let inFlight = 0; + const tracker = new OffsetTracker(); + let pendingCommits = /* @__PURE__ */ new Map(); + let commitTimer = null; + const flushCommits = () => { + if (pendingCommits.size === 0) return; + const batch = pendingCommits; + pendingCommits = /* @__PURE__ */ new Map(); + consumer.sendOffsetCommitRequest( + Array.from(batch.values()), + (err) => { + if (err) { + for (const [k, v] of batch.entries()) { + pendingCommits.set(k, v); + } + logger.error("Kafka commit failed", { error: err?.message, count: batch.size }); + } + } + ); + }; + const scheduleCommitFlush = () => { + if (commitTimer) return; + commitTimer = setTimeout(() => { + commitTimer = null; + flushCommits(); + }, commitIntervalMs); + }; + const consumer = new ConsumerGroup( + { + kafkaHost, + groupId: kafkaConfig.groupId, + clientId, + id, + fromOffset: kafkaConfig.fromOffset || "latest", + protocol: ["roundrobin"], + outOfRangeOffset: "latest", + autoCommit: false, + autoCommitIntervalMs: kafkaConfig.autoCommitIntervalMs, + fetchMaxBytes: kafkaConfig.fetchMaxBytes, + fetchMinBytes: kafkaConfig.fetchMinBytes, + fetchMaxWaitMs: kafkaConfig.fetchMaxWaitMs, + sasl: kafkaConfig.sasl + }, + kafkaConfig.topic + ); + const tryResume = () => { + if (inFlight < maxInFlight && consumer.paused) { + consumer.resume(); + } + }; + consumer.on("message", (message) => { + inFlight += 1; + tracker.add(message.topic, message.partition, message.offset); + if (inFlight >= maxInFlight) { + consumer.pause(); + } + Promise.resolve(onMessage(message)).then(() => { + }).catch((error) => { + logger.error("Kafka message handling failed", { error: error?.message }); + if (onError) { + onError(error, message); + } + }).finally(() => { + const commitOffset = tracker.markDone(message.topic, message.partition, message.offset); + if (commitOffset !== null) { + const key = `${message.topic}-${message.partition}`; + pendingCommits.set(key, { + topic: message.topic, + partition: message.partition, + offset: commitOffset, + metadata: "m" + }); + scheduleCommitFlush(); + } + inFlight -= 1; + tryResume(); + }); + }); + consumer.on("error", (error) => { + logger.error("Kafka consumer error", { error: error?.message }); + if (onError) { + onError(error); + } + }); + consumer.on("connect", () => { + logger.info(`Kafka Consumer connected`, { + groupId: kafkaConfig.groupId, + clientId + }); + }); + consumer.on("rebalancing", () => { + logger.info(`Kafka Consumer rebalancing`, { + groupId: kafkaConfig.groupId, + clientId + }); + tracker.clear(); + pendingCommits.clear(); + if (commitTimer) { + clearTimeout(commitTimer); + commitTimer = null; + } + }); + consumer.on("rebalanced", () => { + logger.info("Kafka Consumer rebalanced", { clientId, groupId: kafkaConfig.groupId }); + }); + consumer.on("error", (err) => { + logger.error("Kafka Consumer Error", { error: err.message }); + }); + consumer.on("offsetOutOfRange", (err) => { + logger.warn("Offset out of range", { error: err.message, topic: err.topic, partition: err.partition }); + }); + consumer.on("offsetOutOfRange", (error) => { + logger.warn(`Kafka Consumer offset out of range`, { + error: error?.message, + groupId: kafkaConfig.groupId, + clientId + }); + }); + consumer.on("close", () => { + if (commitTimer) { + clearTimeout(commitTimer); + commitTimer = null; + } + flushCommits(); + logger.warn(`Kafka Consumer closed`, { + groupId: kafkaConfig.groupId, + clientId + }); + }); + return consumer; +}; +const createKafkaConsumers = ({ kafkaConfig, onMessage, onError }) => { + const instances = Number.isFinite(kafkaConfig.consumerInstances) ? kafkaConfig.consumerInstances : 1; + const count = Math.max(1, instances); + return Array.from( + { length: count }, + (_, idx) => createOneConsumer({ kafkaConfig, onMessage, onError, instanceIndex: idx }) + ); +}; +const toNumber = (value) => { + if (value === void 0 || value === null || value === "") { + return null; + } + if (typeof value === "number") { + return value; + } + const parsed = Number(value); + return Number.isFinite(parsed) ? parsed : null; +}; +const toStringAllowEmpty = (value) => { + if (value === void 0 || value === null) { + return value; + } + return String(value); +}; +const kafkaPayloadSchema = z.object({ + ts_ms: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + upgrade_ts_ms: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + hotel_id: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + room_id: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + device_id: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + is_send: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + udp_raw: z.any().optional().nullable(), + extra: z.any().optional().nullable(), + ip_type: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + model_num: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + server_ip: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + ip: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + subnet_mask: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + gateway: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + dns: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + app_version: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + rcu_time: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + launcher_version: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + mac: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + room_type_id: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + config_version: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + room_status: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + season: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + sys_lock_status: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + authorization_time: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + authorization_days: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + room_num_remark: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + room_type_remark: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + room_remark: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + mcu_name: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + central_control_name: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + configure_hotel_name: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + configure_room_type_name: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable() +}); +const normalizeText = (value, maxLength) => { + if (value === void 0 || value === null) { + return null; + } + const str = String(value).replace(/\u0000/g, ""); + if (maxLength && str.length > maxLength) { + return str.substring(0, maxLength); + } + return str; +}; +const sanitizeJsonValue = (value) => { + if (value === void 0 || value === null) { + return value; + } + if (typeof value === "string") { + return value.replace(/\u0000/g, ""); + } + if (Array.isArray(value)) { + return value.map((item) => sanitizeJsonValue(item)); + } + if (typeof value === "object") { + const out = {}; + for (const [k, v] of Object.entries(value)) { + out[k] = sanitizeJsonValue(v); + } + return out; + } + return value; +}; +const isLikelyBase64 = (text) => { + if (!text || text.length % 4 !== 0) { + return false; + } + return /^[A-Za-z0-9+/]+={0,2}$/.test(text); +}; +const normalizeInteger = (value) => { + if (value === void 0 || value === null || value === "") { + return null; + } + const numeric = typeof value === "number" ? value : Number(value); + if (!Number.isFinite(numeric)) { + return null; + } + return Math.trunc(numeric); +}; +const inRangeOr = (value, min, max, fallback) => { + if (typeof value !== "number" || Number.isNaN(value) || value < min || value > max) { + return fallback; + } + return value; +}; +const normalizeTsMs = (value) => { + const numeric = normalizeInteger(value); + if (numeric === null) { + return Date.now(); + } + if (numeric > 0 && numeric < 1e11) { + return numeric * 1e3; + } + return numeric; +}; +const normalizeUdpRaw = (value) => { + if (value === void 0 || value === null) { + return null; + } + if (typeof value === "string") { + const text = value.replace(/\u0000/g, ""); + if (isLikelyBase64(text)) { + return text; + } + return Buffer.from(text, "utf8").toString("base64"); + } + if (Buffer.isBuffer(value)) { + return value.toString("base64"); + } + if (Array.isArray(value)) { + return Buffer.from(value).toString("base64"); + } + return Buffer.from(String(value), "utf8").toString("base64"); +}; +const normalizeExtra = (value) => { + if (value === void 0 || value === null || value === "") { + return null; + } + if (typeof value === "object") { + return sanitizeJsonValue(value); + } + if (typeof value === "string") { + try { + const parsed = JSON.parse(value); + if (parsed && typeof parsed === "object") { + return sanitizeJsonValue(parsed); + } + return sanitizeJsonValue({ value: parsed }); + } catch { + return sanitizeJsonValue({ raw: value }); + } + } + return sanitizeJsonValue({ raw: String(value) }); +}; +const pick = (payload, snakeKey, pascalKey) => { + if (payload[snakeKey] !== void 0) { + return payload[snakeKey]; + } + if (payload[pascalKey] !== void 0) { + return payload[pascalKey]; + } + return void 0; +}; +const buildRowsFromPayload = (rawPayload) => { + const normalizedInput = { + ts_ms: pick(rawPayload, "ts_ms", "ts_ms"), + upgrade_ts_ms: pick(rawPayload, "upgrade_ts_ms", "upgrade_ts_ms"), + hotel_id: pick(rawPayload, "hotel_id", "hotel_id"), + room_id: pick(rawPayload, "room_id", "room_id"), + device_id: pick(rawPayload, "device_id", "device_id"), + is_send: pick(rawPayload, "is_send", "is_send"), + udp_raw: pick(rawPayload, "udp_raw", "udp_raw"), + extra: pick(rawPayload, "extra", "extra"), + ip_type: pick(rawPayload, "ip_type", "ip_type"), + model_num: pick(rawPayload, "model_num", "model_num"), + server_ip: pick(rawPayload, "server_ip", "server_ip"), + ip: pick(rawPayload, "ip", "ip"), + subnet_mask: pick(rawPayload, "subnet_mask", "subnet_mask"), + gateway: pick(rawPayload, "gateway", "gateway"), + dns: pick(rawPayload, "dns", "dns"), + app_version: pick(rawPayload, "app_version", "app_version"), + rcu_time: pick(rawPayload, "rcu_time", "rcu_time"), + launcher_version: pick(rawPayload, "launcher_version", "launcher_version"), + mac: pick(rawPayload, "mac", "mac"), + room_type_id: pick(rawPayload, "room_type_id", "room_type_id"), + config_version: pick(rawPayload, "config_version", "config_version"), + room_status: pick(rawPayload, "room_status", "room_status"), + season: pick(rawPayload, "season", "season"), + sys_lock_status: pick(rawPayload, "sys_lock_status", "sys_lock_status"), + authorization_time: pick(rawPayload, "authorization_time", "authorization_time"), + authorization_days: pick(rawPayload, "authorization_days", "authorization_days"), + room_num_remark: pick(rawPayload, "room_num_remark", "room_num_remark"), + room_type_remark: pick(rawPayload, "room_type_remark", "room_type_remark"), + room_remark: pick(rawPayload, "room_remark", "room_remark"), + mcu_name: pick(rawPayload, "mcu_name", "mcu_name"), + central_control_name: pick(rawPayload, "central_control_name", "central_control_name"), + configure_hotel_name: pick(rawPayload, "configure_hotel_name", "configure_hotel_name"), + configure_room_type_name: pick(rawPayload, "configure_room_type_name", "configure_room_type_name") + }; + const payload = kafkaPayloadSchema.parse(normalizedInput); + const tsMs = normalizeTsMs(payload.ts_ms); + const hotelId = inRangeOr(normalizeInteger(payload.hotel_id), -32768, 32767, 0); + const roomId = normalizeText(payload.room_id, 50) || ""; + const registerRow = { + ts_ms: tsMs, + hotel_id: hotelId, + room_id: roomId, + device_id: normalizeText(payload.device_id, 64), + write_ts_ms: Date.now(), + is_send: inRangeOr(normalizeInteger(payload.is_send), -32768, 32767, 0), + udp_raw: normalizeUdpRaw(payload.udp_raw), + extra: normalizeExtra(payload.extra), + ip_type: inRangeOr(normalizeInteger(payload.ip_type), -32768, 32767, null), + model_num: normalizeText(payload.model_num, 32), + server_ip: normalizeText(payload.server_ip, 21), + ip: normalizeText(payload.ip, 21), + subnet_mask: normalizeText(payload.subnet_mask, 15), + gateway: normalizeText(payload.gateway, 15), + dns: normalizeText(payload.dns, 15), + app_version: normalizeText(payload.app_version, 64), + rcu_time: normalizeText(payload.rcu_time, 25), + launcher_version: normalizeText(payload.launcher_version, 64), + mac: normalizeText(payload.mac, 17), + room_type_id: normalizeInteger(payload.room_type_id), + config_version: normalizeText(payload.config_version, 32), + room_status: inRangeOr(normalizeInteger(payload.room_status), -2147483648, 2147483647, null), + season: inRangeOr(normalizeInteger(payload.season), -2147483648, 2147483647, null), + sys_lock_status: inRangeOr(normalizeInteger(payload.sys_lock_status), -2147483648, 2147483647, null), + authorization_time: normalizeText(payload.authorization_time, 10), + authorization_days: normalizeText(payload.authorization_days, 10), + room_num_remark: normalizeText(payload.room_num_remark, 255), + room_type_remark: normalizeText(payload.room_type_remark, 64), + room_remark: normalizeText(payload.room_remark, 64), + mcu_name: normalizeText(payload.mcu_name, 255), + central_control_name: normalizeText(payload.central_control_name, 255), + configure_hotel_name: normalizeText(payload.configure_hotel_name, 255), + configure_room_type_name: normalizeText(payload.configure_room_type_name, 255) + }; + const roomStatusUpdateRow = { + hotel_id: hotelId, + room_id: roomId, + app_version: registerRow.app_version, + launcher_version: registerRow.launcher_version, + config_version: registerRow.config_version, + upgrade_ts_ms: normalizeTsMs(payload.upgrade_ts_ms), + register_ts_ms: tsMs + }; + return { + registerRows: [registerRow], + roomStatusRows: [roomStatusUpdateRow] + }; +}; +const parseMessageToRows = (message) => { + const rawValue = message.value.toString(); + let payload; + try { + payload = JSON.parse(rawValue); + } catch (e) { + const error = new Error(`JSON Parse Error: ${e.message}`); + error.type = "PARSE_ERROR"; + throw error; + } + const validationResult = kafkaPayloadSchema.safeParse(payload); + if (!validationResult.success) { + const error = new Error(`Schema Validation Failed: ${JSON.stringify(validationResult.error.errors)}`); + error.type = "VALIDATION_ERROR"; + throw error; + } + return buildRowsFromPayload(payload); +}; +class MetricCollector { + constructor() { + this.reset(); + } + reset() { + this.metrics = { + kafka_pulled: 0, + parse_error: 0, + db_inserted: 0, + db_failed: 0, + db_insert_count: 0, + db_insert_ms_sum: 0, + batch_flush_count: 0, + batch_flush_ms_sum: 0 + }; + this.keyed = {}; + } + increment(metric, count = 1) { + if (this.metrics.hasOwnProperty(metric)) { + this.metrics[metric] += count; + } + } + incrementKeyed(metric, key, count = 1) { + if (!key) return; + if (!this.keyed[metric]) { + this.keyed[metric] = {}; + } + if (!Object.prototype.hasOwnProperty.call(this.keyed[metric], key)) { + this.keyed[metric][key] = 0; + } + this.keyed[metric][key] += count; + } + getAndReset() { + const current = { ...this.metrics }; + const keyed = JSON.parse(JSON.stringify(this.keyed)); + this.reset(); + return { ...current, keyed }; + } +} +const NETWORK_CODES = /* @__PURE__ */ new Set([ + "ECONNREFUSED", + "ECONNRESET", + "EPIPE", + "ETIMEDOUT", + "ENOTFOUND", + "EHOSTUNREACH", + "ENETUNREACH", + "57P03", + "08006", + "08001", + "08000", + "08003" +]); +const isDbConnectionError = (err) => { + if (typeof err?.code === "string" && NETWORK_CODES.has(err.code)) { + return true; + } + const message = typeof err?.message === "string" ? err.message.toLowerCase() : ""; + return message.includes("connection timeout") || message.includes("connection terminated") || message.includes("connection refused") || message.includes("terminating connection") || message.includes("econnrefused") || message.includes("econnreset") || message.includes("etimedout") || message.includes("could not connect") || message.includes("the database system is starting up") || message.includes("no pg_hba.conf entry"); +}; +const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); +const bootstrap = async () => { + logger.info("Starting register consumer", { + env: config.env, + kafka: { + brokers: config.kafka.brokers, + topic: config.kafka.topic, + groupId: config.kafka.groupId + }, + db: { + host: config.db.host, + port: config.db.port, + database: config.db.database, + schema: config.db.schema, + table: config.db.table, + roomStatusSchema: config.db.roomStatusSchema, + roomStatusTable: config.db.roomStatusTable + }, + flushIntervalMs: config.kafka.flushIntervalMs + }); + const metricCollector = new MetricCollector(); + const totals = { + kafkaPulled: 0, + dbInserted: 0, + parseError: 0, + dbFailed: 0 + }; + const flushIntervalMs = Math.max(3e3, Number.isFinite(config.kafka.flushIntervalMs) ? config.kafka.flushIntervalMs : 3e3); + const queue = []; + let flushTimer = null; + let flushing = false; + const runCounterTimer = setInterval(() => { + logger.info("Run counters", { + kafkaPulled: totals.kafkaPulled, + dbInserted: totals.dbInserted, + parseError: totals.parseError, + dbFailed: totals.dbFailed + }); + }, 1e4); + const handleError = (error, message) => { + logger.error("Kafka processing error", { + error: error?.message, + type: error?.type, + topic: message?.topic, + partition: message?.partition, + offset: message?.offset + }); + }; + cron.schedule("* * * * *", () => { + const metrics = metricCollector.getAndReset(); + const flushAvgMs = metrics.batch_flush_count > 0 ? (metrics.batch_flush_ms_sum / metrics.batch_flush_count).toFixed(1) : "0.0"; + const dbAvgMs = metrics.db_insert_count > 0 ? (metrics.db_insert_ms_sum / metrics.db_insert_count).toFixed(1) : "0.0"; + logger.info("Minute metrics", { + kafkaPulled: metrics.kafka_pulled, + parseError: metrics.parse_error, + dbInserted: metrics.db_inserted, + dbFailed: metrics.db_failed, + flushAvgMs, + dbAvgMs + }); + }); + const processValidRowsWithRetry = async (registerRows, roomStatusRows) => { + const startedAt = Date.now(); + while (true) { + try { + await dbManager.insertRegisterRows({ + schema: config.db.schema, + table: config.db.table, + rows: registerRows + }); + await dbManager.updateRoomStatusRows({ + schema: config.db.roomStatusSchema, + table: config.db.roomStatusTable, + rows: roomStatusRows + }); + metricCollector.increment("db_insert_count", 1); + metricCollector.increment("db_insert_ms_sum", Date.now() - startedAt); + metricCollector.increment("db_inserted", registerRows.length); + totals.dbInserted += registerRows.length; + return; + } catch (err) { + if (!isDbConnectionError(err)) { + throw err; + } + logger.warn("Database unavailable, retrying in 5s", { error: err?.message }); + await sleep(5e3); + } + } + }; + const scheduleFlush = () => { + if (flushTimer) { + return; + } + flushTimer = setTimeout(() => { + flushTimer = null; + void flushQueue(); + }, flushIntervalMs); + }; + const flushQueue = async () => { + if (flushing) { + return; + } + if (queue.length === 0) { + return; + } + flushing = true; + const startedAt = Date.now(); + const currentBatch = queue.splice(0, queue.length); + const parsedItems = []; + for (const item of currentBatch) { + try { + const parsed = parseMessageToRows(item.message); + parsedItems.push({ item, parsed }); + } catch (err) { + metricCollector.increment("parse_error"); + totals.parseError += 1; + handleError(err, item.message); + item.resolve(); + } + } + const insertParsedItems = async (items) => { + if (items.length === 0) { + return; + } + const registerRows = items.flatMap((it) => it.parsed.registerRows); + const roomStatusRows = items.flatMap((it) => it.parsed.roomStatusRows); + try { + await processValidRowsWithRetry(registerRows, roomStatusRows); + } catch (err) { + if (items.length > 1) { + const mid = Math.floor(items.length / 2); + await insertParsedItems(items.slice(0, mid)); + await insertParsedItems(items.slice(mid)); + return; + } + metricCollector.increment("db_failed", 1); + totals.dbFailed += 1; + handleError(err, items[0].item.message); + } + }; + if (parsedItems.length > 0) { + await insertParsedItems(parsedItems); + for (const parsedItem of parsedItems) { + parsedItem.item.resolve(); + } + } + metricCollector.increment("batch_flush_count", 1); + metricCollector.increment("batch_flush_ms_sum", Date.now() - startedAt); + flushing = false; + if (queue.length > 0) { + scheduleFlush(); + } + }; + const handleMessage = (message) => { + metricCollector.increment("kafka_pulled"); + totals.kafkaPulled += 1; + return new Promise((resolve) => { + queue.push({ message, resolve }); + scheduleFlush(); + }); + }; + const consumers = createKafkaConsumers({ + kafkaConfig: config.kafka, + onMessage: handleMessage, + onError: handleError + }); + const shutdown = async (signal) => { + logger.info(`Received ${signal}, shutting down...`); + try { + if (flushTimer) { + clearTimeout(flushTimer); + flushTimer = null; + } + clearInterval(runCounterTimer); + await flushQueue(); + if (consumers && consumers.length > 0) { + await Promise.all(consumers.map((consumer) => new Promise((resolve) => consumer.close(true, resolve)))); + } + await dbManager.close(); + logger.info("Run summary", { + kafkaPulled: totals.kafkaPulled, + dbInserted: totals.dbInserted, + parseError: totals.parseError, + dbFailed: totals.dbFailed + }); + process.exit(0); + } catch (err) { + logger.error("Error during shutdown", { error: err?.message }); + process.exit(1); + } + }; + process.on("SIGTERM", () => shutdown("SIGTERM")); + process.on("SIGINT", () => shutdown("SIGINT")); +}; +bootstrap().catch((error) => { + logger.error("Service bootstrap failed", { error: error?.message }); + process.exit(1); +}); diff --git a/bls-register-backend/ecosystem.config.cjs b/bls-register-backend/ecosystem.config.cjs new file mode 100644 index 0000000..ecd2d19 --- /dev/null +++ b/bls-register-backend/ecosystem.config.cjs @@ -0,0 +1,22 @@ +module.exports = { + apps: [{ + name: 'bls-register', + script: 'dist/index.js', + instances: 1, + exec_mode: 'fork', + autorestart: true, + watch: false, + max_memory_restart: '1G', + env_file: '.env', + env: { + NODE_ENV: 'production', + PORT: 3001 + }, + error_file: './logs/error.log', + out_file: './logs/out.log', + log_date_format: 'YYYY-MM-DD HH:mm:ss Z', + merge_logs: true, + kill_timeout: 5000, + time: true + }] +}; diff --git a/bls-register-backend/openspec/AGENTS.md b/bls-register-backend/openspec/AGENTS.md new file mode 100644 index 0000000..96ab0bb --- /dev/null +++ b/bls-register-backend/openspec/AGENTS.md @@ -0,0 +1,456 @@ +# OpenSpec Instructions + +Instructions for AI coding assistants using OpenSpec for spec-driven development. + +## TL;DR Quick Checklist + +- Search existing work: `openspec spec list --long`, `openspec list` (use `rg` only for full-text search) +- Decide scope: new capability vs modify existing capability +- Pick a unique `change-id`: kebab-case, verb-led (`add-`, `update-`, `remove-`, `refactor-`) +- Scaffold: `proposal.md`, `tasks.md`, `design.md` (only if needed), and delta specs per affected capability +- Write deltas: use `## ADDED|MODIFIED|REMOVED|RENAMED Requirements`; include at least one `#### Scenario:` per requirement +- Validate: `openspec validate [change-id] --strict` and fix issues +- Request approval: Do not start implementation until proposal is approved + +## Three-Stage Workflow + +### Stage 1: Creating Changes +Create proposal when you need to: +- Add features or functionality +- Make breaking changes (API, schema) +- Change architecture or patterns +- Optimize performance (changes behavior) +- Update security patterns + +Triggers (examples): +- "Help me create a change proposal" +- "Help me plan a change" +- "Help me create a proposal" +- "I want to create a spec proposal" +- "I want to create a spec" + +Loose matching guidance: +- Contains one of: `proposal`, `change`, `spec` +- With one of: `create`, `plan`, `make`, `start`, `help` + +Skip proposal for: +- Bug fixes (restore intended behavior) +- Typos, formatting, comments +- Dependency updates (non-breaking) +- Configuration changes +- Tests for existing behavior + +**Workflow** +1. Review `openspec/project.md`, `openspec list`, and `openspec list --specs` to understand current context. +2. Choose a unique verb-led `change-id` and scaffold `proposal.md`, `tasks.md`, optional `design.md`, and spec deltas under `openspec/changes//`. +3. Draft spec deltas using `## ADDED|MODIFIED|REMOVED Requirements` with at least one `#### Scenario:` per requirement. +4. Run `openspec validate --strict` and resolve any issues before sharing the proposal. + +### Stage 2: Implementing Changes +Track these steps as TODOs and complete them one by one. +1. **Read proposal.md** - Understand what's being built +2. **Read design.md** (if exists) - Review technical decisions +3. **Read tasks.md** - Get implementation checklist +4. **Implement tasks sequentially** - Complete in order +5. **Confirm completion** - Ensure every item in `tasks.md` is finished before updating statuses +6. **Update checklist** - After all work is done, set every task to `- [x]` so the list reflects reality +7. **Approval gate** - Do not start implementation until the proposal is reviewed and approved + +### Stage 3: Archiving Changes +After deployment, create separate PR to: +- Move `changes/[name]/` → `changes/archive/YYYY-MM-DD-[name]/` +- Update `specs/` if capabilities changed +- Use `openspec archive --skip-specs --yes` for tooling-only changes (always pass the change ID explicitly) +- Run `openspec validate --strict` to confirm the archived change passes checks + +## Before Any Task + +**Context Checklist:** +- [ ] Read relevant specs in `specs/[capability]/spec.md` +- [ ] Check pending changes in `changes/` for conflicts +- [ ] Read `openspec/project.md` for conventions +- [ ] Run `openspec list` to see active changes +- [ ] Run `openspec list --specs` to see existing capabilities + +**Before Creating Specs:** +- Always check if capability already exists +- Prefer modifying existing specs over creating duplicates +- Use `openspec show [spec]` to review current state +- If request is ambiguous, ask 1–2 clarifying questions before scaffolding + +### Search Guidance +- Enumerate specs: `openspec spec list --long` (or `--json` for scripts) +- Enumerate changes: `openspec list` (or `openspec change list --json` - deprecated but available) +- Show details: + - Spec: `openspec show --type spec` (use `--json` for filters) + - Change: `openspec show --json --deltas-only` +- Full-text search (use ripgrep): `rg -n "Requirement:|Scenario:" openspec/specs` + +## Quick Start + +### CLI Commands + +```bash +# Essential commands +openspec list # List active changes +openspec list --specs # List specifications +openspec show [item] # Display change or spec +openspec validate [item] # Validate changes or specs +openspec archive [--yes|-y] # Archive after deployment (add --yes for non-interactive runs) + +# Project management +openspec init [path] # Initialize OpenSpec +openspec update [path] # Update instruction files + +# Interactive mode +openspec show # Prompts for selection +openspec validate # Bulk validation mode + +# Debugging +openspec show [change] --json --deltas-only +openspec validate [change] --strict +``` + +### Command Flags + +- `--json` - Machine-readable output +- `--type change|spec` - Disambiguate items +- `--strict` - Comprehensive validation +- `--no-interactive` - Disable prompts +- `--skip-specs` - Archive without spec updates +- `--yes`/`-y` - Skip confirmation prompts (non-interactive archive) + +## Directory Structure + +``` +openspec/ +├── project.md # Project conventions +├── specs/ # Current truth - what IS built +│ └── [capability]/ # Single focused capability +│ ├── spec.md # Requirements and scenarios +│ └── design.md # Technical patterns +├── changes/ # Proposals - what SHOULD change +│ ├── [change-name]/ +│ │ ├── proposal.md # Why, what, impact +│ │ ├── tasks.md # Implementation checklist +│ │ ├── design.md # Technical decisions (optional; see criteria) +│ │ └── specs/ # Delta changes +│ │ └── [capability]/ +│ │ └── spec.md # ADDED/MODIFIED/REMOVED +│ └── archive/ # Completed changes +``` + +## Creating Change Proposals + +### Decision Tree + +``` +New request? +├─ Bug fix restoring spec behavior? → Fix directly +├─ Typo/format/comment? → Fix directly +├─ New feature/capability? → Create proposal +├─ Breaking change? → Create proposal +├─ Architecture change? → Create proposal +└─ Unclear? → Create proposal (safer) +``` + +### Proposal Structure + +1. **Create directory:** `changes/[change-id]/` (kebab-case, verb-led, unique) + +2. **Write proposal.md:** +```markdown +# Change: [Brief description of change] + +## Why +[1-2 sentences on problem/opportunity] + +## What Changes +- [Bullet list of changes] +- [Mark breaking changes with **BREAKING**] + +## Impact +- Affected specs: [list capabilities] +- Affected code: [key files/systems] +``` + +3. **Create spec deltas:** `specs/[capability]/spec.md` +```markdown +## ADDED Requirements +### Requirement: New Feature +The system SHALL provide... + +#### Scenario: Success case +- **WHEN** user performs action +- **THEN** expected result + +## MODIFIED Requirements +### Requirement: Existing Feature +[Complete modified requirement] + +## REMOVED Requirements +### Requirement: Old Feature +**Reason**: [Why removing] +**Migration**: [How to handle] +``` +If multiple capabilities are affected, create multiple delta files under `changes/[change-id]/specs//spec.md`—one per capability. + +4. **Create tasks.md:** +```markdown +## 1. Implementation +- [ ] 1.1 Create database schema +- [ ] 1.2 Implement API endpoint +- [ ] 1.3 Add frontend component +- [ ] 1.4 Write tests +``` + +5. **Create design.md when needed:** +Create `design.md` if any of the following apply; otherwise omit it: +- Cross-cutting change (multiple services/modules) or a new architectural pattern +- New external dependency or significant data model changes +- Security, performance, or migration complexity +- Ambiguity that benefits from technical decisions before coding + +Minimal `design.md` skeleton: +```markdown +## Context +[Background, constraints, stakeholders] + +## Goals / Non-Goals +- Goals: [...] +- Non-Goals: [...] + +## Decisions +- Decision: [What and why] +- Alternatives considered: [Options + rationale] + +## Risks / Trade-offs +- [Risk] → Mitigation + +## Migration Plan +[Steps, rollback] + +## Open Questions +- [...] +``` + +## Spec File Format + +### Critical: Scenario Formatting + +**CORRECT** (use #### headers): +```markdown +#### Scenario: User login success +- **WHEN** valid credentials provided +- **THEN** return JWT token +``` + +**WRONG** (don't use bullets or bold): +```markdown +- **Scenario: User login** ❌ +**Scenario**: User login ❌ +### Scenario: User login ❌ +``` + +Every requirement MUST have at least one scenario. + +### Requirement Wording +- Use SHALL/MUST for normative requirements (avoid should/may unless intentionally non-normative) + +### Delta Operations + +- `## ADDED Requirements` - New capabilities +- `## MODIFIED Requirements` - Changed behavior +- `## REMOVED Requirements` - Deprecated features +- `## RENAMED Requirements` - Name changes + +Headers matched with `trim(header)` - whitespace ignored. + +#### When to use ADDED vs MODIFIED +- ADDED: Introduces a new capability or sub-capability that can stand alone as a requirement. Prefer ADDED when the change is orthogonal (e.g., adding "Slash Command Configuration") rather than altering the semantics of an existing requirement. +- MODIFIED: Changes the behavior, scope, or acceptance criteria of an existing requirement. Always paste the full, updated requirement content (header + all scenarios). The archiver will replace the entire requirement with what you provide here; partial deltas will drop previous details. +- RENAMED: Use when only the name changes. If you also change behavior, use RENAMED (name) plus MODIFIED (content) referencing the new name. + +Common pitfall: Using MODIFIED to add a new concern without including the previous text. This causes loss of detail at archive time. If you aren’t explicitly changing the existing requirement, add a new requirement under ADDED instead. + +Authoring a MODIFIED requirement correctly: +1) Locate the existing requirement in `openspec/specs//spec.md`. +2) Copy the entire requirement block (from `### Requirement: ...` through its scenarios). +3) Paste it under `## MODIFIED Requirements` and edit to reflect the new behavior. +4) Ensure the header text matches exactly (whitespace-insensitive) and keep at least one `#### Scenario:`. + +Example for RENAMED: +```markdown +## RENAMED Requirements +- FROM: `### Requirement: Login` +- TO: `### Requirement: User Authentication` +``` + +## Troubleshooting + +### Common Errors + +**"Change must have at least one delta"** +- Check `changes/[name]/specs/` exists with .md files +- Verify files have operation prefixes (## ADDED Requirements) + +**"Requirement must have at least one scenario"** +- Check scenarios use `#### Scenario:` format (4 hashtags) +- Don't use bullet points or bold for scenario headers + +**Silent scenario parsing failures** +- Exact format required: `#### Scenario: Name` +- Debug with: `openspec show [change] --json --deltas-only` + +### Validation Tips + +```bash +# Always use strict mode for comprehensive checks +openspec validate [change] --strict + +# Debug delta parsing +openspec show [change] --json | jq '.deltas' + +# Check specific requirement +openspec show [spec] --json -r 1 +``` + +## Happy Path Script + +```bash +# 1) Explore current state +openspec spec list --long +openspec list +# Optional full-text search: +# rg -n "Requirement:|Scenario:" openspec/specs +# rg -n "^#|Requirement:" openspec/changes + +# 2) Choose change id and scaffold +CHANGE=add-two-factor-auth +mkdir -p openspec/changes/$CHANGE/{specs/auth} +printf "## Why\n...\n\n## What Changes\n- ...\n\n## Impact\n- ...\n" > openspec/changes/$CHANGE/proposal.md +printf "## 1. Implementation\n- [ ] 1.1 ...\n" > openspec/changes/$CHANGE/tasks.md + +# 3) Add deltas (example) +cat > openspec/changes/$CHANGE/specs/auth/spec.md << 'EOF' +## ADDED Requirements +### Requirement: Two-Factor Authentication +Users MUST provide a second factor during login. + +#### Scenario: OTP required +- **WHEN** valid credentials are provided +- **THEN** an OTP challenge is required +EOF + +# 4) Validate +openspec validate $CHANGE --strict +``` + +## Multi-Capability Example + +``` +openspec/changes/add-2fa-notify/ +├── proposal.md +├── tasks.md +└── specs/ + ├── auth/ + │ └── spec.md # ADDED: Two-Factor Authentication + └── notifications/ + └── spec.md # ADDED: OTP email notification +``` + +auth/spec.md +```markdown +## ADDED Requirements +### Requirement: Two-Factor Authentication +... +``` + +notifications/spec.md +```markdown +## ADDED Requirements +### Requirement: OTP Email Notification +... +``` + +## Best Practices + +### Simplicity First +- Default to <100 lines of new code +- Single-file implementations until proven insufficient +- Avoid frameworks without clear justification +- Choose boring, proven patterns + +### Complexity Triggers +Only add complexity with: +- Performance data showing current solution too slow +- Concrete scale requirements (>1000 users, >100MB data) +- Multiple proven use cases requiring abstraction + +### Clear References +- Use `file.ts:42` format for code locations +- Reference specs as `specs/auth/spec.md` +- Link related changes and PRs + +### Capability Naming +- Use verb-noun: `user-auth`, `payment-capture` +- Single purpose per capability +- 10-minute understandability rule +- Split if description needs "AND" + +### Change ID Naming +- Use kebab-case, short and descriptive: `add-two-factor-auth` +- Prefer verb-led prefixes: `add-`, `update-`, `remove-`, `refactor-` +- Ensure uniqueness; if taken, append `-2`, `-3`, etc. + +## Tool Selection Guide + +| Task | Tool | Why | +|------|------|-----| +| Find files by pattern | Glob | Fast pattern matching | +| Search code content | Grep | Optimized regex search | +| Read specific files | Read | Direct file access | +| Explore unknown scope | Task | Multi-step investigation | + +## Error Recovery + +### Change Conflicts +1. Run `openspec list` to see active changes +2. Check for overlapping specs +3. Coordinate with change owners +4. Consider combining proposals + +### Validation Failures +1. Run with `--strict` flag +2. Check JSON output for details +3. Verify spec file format +4. Ensure scenarios properly formatted + +### Missing Context +1. Read project.md first +2. Check related specs +3. Review recent archives +4. Ask for clarification + +## Quick Reference + +### Stage Indicators +- `changes/` - Proposed, not yet built +- `specs/` - Built and deployed +- `archive/` - Completed changes + +### File Purposes +- `proposal.md` - Why and what +- `tasks.md` - Implementation steps +- `design.md` - Technical decisions +- `spec.md` - Requirements and behavior + +### CLI Essentials +```bash +openspec list # What's in progress? +openspec show [item] # View details +openspec validate --strict # Is it correct? +openspec archive [--yes|-y] # Mark complete (add --yes for automation) +``` + +Remember: Specs are truth. Changes are proposals. Keep them in sync. diff --git a/bls-register-backend/openspec/changes/2026-03-14-build-rcu-register-consumer/proposal.md b/bls-register-backend/openspec/changes/2026-03-14-build-rcu-register-consumer/proposal.md new file mode 100644 index 0000000..14520c4 --- /dev/null +++ b/bls-register-backend/openspec/changes/2026-03-14-build-rcu-register-consumer/proposal.md @@ -0,0 +1,25 @@ +# Proposal: Build RCU Register Kafka Consumer (G5) + +## Why +当前 `bls-register-backend` 需要从 Kafka 主题 `blwlog4Nodejs-rcu-register-topic` 消费高吞吐注册数据,并写入 G5 库。现有模板项目的字段模型与落库目标不匹配,且缺少 3 秒固定频率落库和双写更新策略。 + +## What Changes +1. 新增 Register 数据模型解析与字段类型校验,兼容 C# RegisterInfo 字段命名。 +2. 新增值域保护策略:`hotel_id` 超出 PostgreSQL `int2` 值域时强制写 `0`。 +3. 落库改为固定 3 秒全局批量 flush 一次。 +4. 双写策略: + - 完整写入 `rcu_info.rcu_info_events_g5`。 + - 更新 `room_status.room_status_moment_g5` 的 `app_version`、`launcher_version`、`config_version`、`upgrade_ts_ms`、`register_ts_ms`,按 `(hotel_id, room_id)` 定位,缺失记录忽略。 +5. 保持 Kafka 至少一次语义:消息处理 Promise 在 flush 完成后才 resolve。 + +## npm Package Strategy +- 继续复用成熟依赖,不重复造轮子: + - `kafka-node@^5.0.0` 负责 Kafka ConsumerGroup。 + - `pg@^8.11.5` 负责 PostgreSQL 批量写入。 + - `zod@^4.3.6` 负责结构与类型预处理校验。 +- 本次不新增第三方依赖,降低变更风险。 + +## Impact +- 提升入库一致性(按库字段约束进行落库转换)。 +- 控制数据库写入频率,满足生产限制。 +- 降低失效数据对消费位点推进的阻塞风险。 diff --git a/bls-register-backend/openspec/changes/2026-03-14-build-rcu-register-consumer/specs/rcu-register/spec.md b/bls-register-backend/openspec/changes/2026-03-14-build-rcu-register-consumer/specs/rcu-register/spec.md new file mode 100644 index 0000000..805855e --- /dev/null +++ b/bls-register-backend/openspec/changes/2026-03-14-build-rcu-register-consumer/specs/rcu-register/spec.md @@ -0,0 +1,46 @@ +# Spec: rcu-register-consumer + +## Requirement: Kafka Register 消费 +系统 SHALL 从 `blwlog4Nodejs-rcu-register-topic` 消费 Register 数据并执行结构化校验。 + +### Scenario: 解析 C# RegisterInfo 字段 +- **GIVEN** Kafka 消息为 RegisterInfo JSON +- **WHEN** 消息被解析 +- **THEN** 字段转换为数据库字段类型后参与落库 + +## Requirement: 值域与类型保护 +系统 SHALL 对写库字段执行类型转换和值域保护。 + +### Scenario: hotel_id 超范围保护 +- **GIVEN** `hotel_id` 超出 `int2` 范围 +- **WHEN** 数据准备写入数据库 +- **THEN** `hotel_id` 被写为 `0` + +## Requirement: 固定 3 秒写库 +系统 SHALL 采用全局缓冲并每 3 秒执行一次批量写库。 + +### Scenario: 高频消息输入 +- **GIVEN** 3 秒内收到多条消息 +- **WHEN** 到达 flush 时刻 +- **THEN** 系统执行一次批量写入并统一确认消费 + +## Requirement: 双写目标库 +系统 SHALL 双写 G5 库中的两个目标表。 + +### Scenario: 完整事件入库 +- **GIVEN** 合法 Register 数据 +- **WHEN** 执行写库 +- **THEN** 完整写入 `rcu_info.rcu_info_events_g5` + +### Scenario: 房态瞬时表追加更新 +- **GIVEN** 合法 Register 数据 +- **WHEN** 执行写库 +- **THEN** 更新 `room_status.room_status_moment_g5` 的 `app_version`、`launcher_version`、`config_version`、`upgrade_ts_ms`、`register_ts_ms` + +## Requirement: room_status 不新增行 +系统 SHALL 仅更新已存在的 `(hotel_id, room_id)` 行,不允许新增记录。 + +### Scenario: 主键不存在 +- **GIVEN** `room_status_moment_g5` 中不存在对应 `(hotel_id, room_id)` +- **WHEN** 执行更新 +- **THEN** 忽略该条 room_status 更新且继续推进 Kafka 消费位点 diff --git a/bls-register-backend/openspec/changes/2026-03-14-build-rcu-register-consumer/tasks.md b/bls-register-backend/openspec/changes/2026-03-14-build-rcu-register-consumer/tasks.md new file mode 100644 index 0000000..899a32f --- /dev/null +++ b/bls-register-backend/openspec/changes/2026-03-14-build-rcu-register-consumer/tasks.md @@ -0,0 +1,16 @@ +## 1. Specification +- [x] 1.1 定义 Register 消费与双写需求规格 +- [x] 1.2 定义 3 秒固定写库频率需求 +- [x] 1.3 定义 room_status 仅更新不新增行需求 + +## 2. Implementation +- [x] 2.1 改造配置默认值为 register 主题与 G5 目标表 +- [x] 2.2 实现 Register payload 类型转换与值域保护 +- [x] 2.3 实现 rcu_info_events_g5 批量插入 +- [x] 2.4 实现 room_status_moment_g5 批量更新(仅已存在记录) +- [x] 2.5 实现全局 3 秒 flush 与 Kafka 回调对齐 + +## 3. Verification +- [x] 3.1 更新处理器单测(hotel_id、字段映射、类型转换) +- [x] 3.2 运行 `npm run test` +- [x] 3.3 运行 `npm run spec:validate` diff --git a/bls-register-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/proposal.md b/bls-register-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/proposal.md new file mode 100644 index 0000000..b01bdeb --- /dev/null +++ b/bls-register-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/proposal.md @@ -0,0 +1,17 @@ +# Change: Fix Kafka Partitioning and Schema Issues + +## Why +Production deployment revealed issues with data ingestion: +1. Kafka Topic name changed to include partition suffix. +2. Legacy data contains second-level timestamps (1970s) causing partition lookup failures in PostgreSQL (which expects ms). +3. Variable-length fields (reboot reason, status) exceeded VARCHAR(10) limits, causing crashes. + +## What Changes +- **Modified Requirement**: Update Kafka Topic to `blwlog4Nodejs-rcu-onoffline-topic-0`. +- **New Requirement**: Implement heuristic timestamp conversion (Sec -> MS) for values < 100B. +- **New Requirement**: Truncate specific fields to VARCHAR(255) to prevent DB rejection. +- **Modified Requirement**: Update DB Schema to VARCHAR(255) for robustness. + +## Impact +- Affected specs: `onoffline` +- Affected code: `src/processor/index.js`, `scripts/init_db.sql` diff --git a/bls-register-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/specs/onoffline/spec.md b/bls-register-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/specs/onoffline/spec.md new file mode 100644 index 0000000..d5d68f5 --- /dev/null +++ b/bls-register-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/specs/onoffline/spec.md @@ -0,0 +1,25 @@ +## MODIFIED Requirements +### Requirement: 消费并落库 +系统 SHALL 从 blwlog4Nodejs-rcu-onoffline-topic-0 消费消息,并写入 log_platform.onoffline.onoffline_record。 + +#### Scenario: 非重启数据写入 +- **GIVEN** RebootReason 为空或不存在 +- **WHEN** 消息被处理 +- **THEN** current_status 等于 CurrentStatus (截断至 255 字符) + +## ADDED Requirements +### Requirement: 字段长度限制与截断 +系统 SHALL 将部分变长字段截断至数据库允许的最大长度 (VARCHAR(255)),防止写入失败。 + +#### Scenario: 超长字段处理 +- **GIVEN** LauncherVersion, CurrentStatus 或 RebootReason 超过 255 字符 +- **WHEN** 消息被处理 +- **THEN** 字段被截断为前 255 个字符并入库 + +### Requirement: 时间戳单位自动识别 +系统 SHALL 自动识别 UnixTime 字段是秒还是毫秒,并统一转换为毫秒。 + +#### Scenario: 秒级时间戳转换 +- **GIVEN** UnixTime < 100000000000 (约 1973 年前) +- **WHEN** 解析时间戳 +- **THEN** 自动乘以 1000 转换为毫秒 diff --git a/bls-register-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/tasks.md b/bls-register-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/tasks.md new file mode 100644 index 0000000..4cd00c5 --- /dev/null +++ b/bls-register-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/tasks.md @@ -0,0 +1,6 @@ +## 1. Implementation +- [x] Update Kafka Topic in .env and config +- [x] Implement timestamp unit detection and conversion in processor +- [x] Implement field truncation logic in processor +- [x] Update database schema definition (init_db.sql) to VARCHAR(255) +- [x] Verify data ingestion with production stream diff --git a/bls-register-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/proposal.md b/bls-register-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/proposal.md new file mode 100644 index 0000000..1ef4611 --- /dev/null +++ b/bls-register-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/proposal.md @@ -0,0 +1,18 @@ +# Change: Optimize Kafka Consumption Performance + +## Why +User reports extremely slow Kafka consumption. Current implementation processes and inserts messages one-by-one, which creates a bottleneck at the database network round-trip time (RTT). + +## What Changes +- **New Requirement**: Implement Batch Processing for Kafka messages. +- **Refactor**: Decouple message parsing from insertion in `processor`. +- **Logic**: + - Accumulate messages in a buffer (e.g., 500ms or 500 items). + - Perform Batch Insert into PostgreSQL. + - Implement Row-by-Row fallback for batch failures (to isolate bad data). + - Handle DB connection errors with retry loop at batch level. + +## Impact +- Affected specs: `onoffline` +- Affected code: `src/index.js`, `src/processor/index.js` +- Performance: Expected 10x-100x throughput increase. diff --git a/bls-register-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/specs/onoffline/spec.md b/bls-register-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/specs/onoffline/spec.md new file mode 100644 index 0000000..6b0b52b --- /dev/null +++ b/bls-register-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/specs/onoffline/spec.md @@ -0,0 +1,13 @@ +## ADDED Requirements +### Requirement: 批量消费与写入 +系统 SHALL 对 Kafka 消息进行缓冲,并按批次写入数据库,以提高吞吐量。 + +#### Scenario: 批量写入 +- **GIVEN** 短时间内收到多条消息 (e.g., 500条) +- **WHEN** 缓冲区满或超时 (e.g., 200ms) +- **THEN** 执行一次批量数据库插入操作 + +#### Scenario: 写入失败降级 +- **GIVEN** 批量写入因数据错误失败 (非连接错误) +- **WHEN** 捕获异常 +- **THEN** 自动降级为逐条写入,以隔离错误数据并确保有效数据入库 diff --git a/bls-register-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/tasks.md b/bls-register-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/tasks.md new file mode 100644 index 0000000..54e3a35 --- /dev/null +++ b/bls-register-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/tasks.md @@ -0,0 +1,5 @@ +## 1. Implementation +- [ ] Refactor `src/processor/index.js` to export `parseMessageToRows` +- [ ] Implement `BatchProcessor` logic in `src/index.js` +- [ ] Update `handleMessage` to use `BatchProcessor` +- [ ] Verify performance improvement diff --git a/bls-register-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/proposal.md b/bls-register-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/proposal.md new file mode 100644 index 0000000..67e0e57 --- /dev/null +++ b/bls-register-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/proposal.md @@ -0,0 +1,11 @@ +# Proposal: Refactor Partition Indexes + +## Goal +利用 PostgreSQL 默认的支持,改变每日分区创立时的索引策略,不再在代码中对每个分区单独创建索引。 + +## Context +当前 `PartitionManager` 在动态创建子分区后,会隐式调用查询在子分区上创建六个单列索引。由于我们使用的是 PostgreSQL 11+,且我们在初始化脚本中的主分区表 `onoffline.onoffline_record` 上已经创建了所有的索引,此主表上的索引会自动应用于所有的子分区,不需要我们在创建分区时另外手动添加。 + +## Proposed Changes +1. 在 `src/db/partitionManager.js` 中移除子分区显式创建索引的方法 `ensurePartitionIndexes` 以及针对已有子分区的循环索引检查函数 `ensureIndexesForExistingPartitions`。 +2. 在更新分区流程 `ensurePartitions` 以及 `ensurePartitionsForTimestamps` 中,移除对 `ensurePartitionIndexes` 的调用。 \ No newline at end of file diff --git a/bls-register-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/specs/onoffline/spec.md b/bls-register-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/specs/onoffline/spec.md new file mode 100644 index 0000000..e42cc97 --- /dev/null +++ b/bls-register-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/specs/onoffline/spec.md @@ -0,0 +1,11 @@ +# Spec Delta: onoffline-backend + +## MODIFIED Requirements + +### Requirement: 数据库分区策略 +系统 SHALL 使用 Range Partitioning 按天分区,并自动维护未来 30 天的分区表,子表依赖 PostgreSQL 原生机制继承主表索引。 + +#### Scenario: 分区预创建 +- **GIVEN** 系统启动或每日凌晨 +- **WHEN** 运行分区维护任务 +- **THEN** 确保数据库中存在未来 30 天的分区表,无需对子表显式创建单列表索引 diff --git a/bls-register-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/tasks.md b/bls-register-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/tasks.md new file mode 100644 index 0000000..b205407 --- /dev/null +++ b/bls-register-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/tasks.md @@ -0,0 +1,6 @@ +# Tasks: Refactor Partition Indexes + +- [x] refactor `src/db/partitionManager.js`: remove `ensurePartitionIndexes` and `ensureIndexesForExistingPartitions`. +- [x] refactor `src/db/partitionManager.js`: update `ensurePartitions` and `ensurePartitionsForTimestamps` to remove calls to `ensurePartitionIndexes`. +- [x] refactor `src/db/initializer.js` (and any other occurrences) to reflect the removal. +- [x] update openspec requirements to clarify that index propagation relies on PostgreSQL parent-table indexes. \ No newline at end of file diff --git a/bls-register-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/proposal.md b/bls-register-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/proposal.md new file mode 100644 index 0000000..31905ce --- /dev/null +++ b/bls-register-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/proposal.md @@ -0,0 +1,14 @@ +# Change: remove runtime db provisioning + +## Why +当前服务在运行时承担了建库、建表和分区维护职责,导致服务职责边界不清晰,也会引入启动阶段 DDL 风险。现已将该能力剥离到根目录 `SQL_Script/`,需要通过 OpenSpec 正式记录为规范变更。 + +## What Changes +- 移除服务启动阶段的数据库初始化与定时分区维护要求。 +- 移除服务在写入失败时自动创建缺失分区的要求。 +- 明确数据库结构与分区维护由外部脚本(`SQL_Script/`)负责。 +- 保留服务的核心职责:Kafka 消费、解析、写库、重试与监控。 + +## Impact +- Affected specs: `openspec/specs/onoffline/spec.md` +- Affected code: `src/index.js`, `src/config/config.js`, `src/db/initializer.js`, `src/db/partitionManager.js`, `scripts/init_db.sql`, `scripts/verify_partitions.js`, `../SQL_Script/*` diff --git a/bls-register-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/specs/onoffline/spec.md b/bls-register-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/specs/onoffline/spec.md new file mode 100644 index 0000000..4fa7887 --- /dev/null +++ b/bls-register-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/specs/onoffline/spec.md @@ -0,0 +1,32 @@ +## MODIFIED Requirements + +### Requirement: 数据库分区策略 +系统 SHALL 使用 Range Partitioning 按天分区;运行服务本身 SHALL NOT 执行建库、建表、分区创建或定时分区维护。 + +#### Scenario: 服务启动不执行 DDL +- **GIVEN** 服务进程启动 +- **WHEN** 进入 bootstrap 过程 +- **THEN** 仅初始化消费、处理、监控相关能力,不执行数据库创建、表结构初始化与分区创建 + +#### Scenario: 分区由外部脚本维护 +- **GIVEN** 需要创建数据库对象或新增未来分区 +- **WHEN** 执行外部 SQL/JS 工具 +- **THEN** 通过根目录 `SQL_Script/` 完成建库和分区维护,而不是由服务运行时自动执行 + +### Requirement: 批量消费与写入 +系统 SHALL 对 Kafka 消息进行缓冲,并按批次写入数据库,以提高吞吐量;当写入失败时,系统 SHALL 执行连接恢复重试与降级策略,但不在运行时创建数据库分区。 + +#### Scenario: 批量写入 +- **GIVEN** 短时间内收到多条消息 (e.g., 500条) +- **WHEN** 缓冲区满或超时 (e.g., 200ms) +- **THEN** 执行一次批量数据库插入操作 + +#### Scenario: 写入失败降级 +- **GIVEN** 批量写入因数据错误失败 (非连接错误) +- **WHEN** 捕获异常 +- **THEN** 自动降级为逐条写入,以隔离错误数据并确保有效数据入库 + +#### Scenario: 分区缺失错误处理 +- **GIVEN** 写入时数据库返回分区缺失错误 +- **WHEN** 服务处理该错误 +- **THEN** 服务记录错误并按既有错误处理机制处理,不在运行时执行分区创建 diff --git a/bls-register-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/tasks.md b/bls-register-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/tasks.md new file mode 100644 index 0000000..bc99b8c --- /dev/null +++ b/bls-register-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/tasks.md @@ -0,0 +1,12 @@ +## 1. Implementation +- [x] 1.1 Remove runtime DB initialization from bootstrap flow (`src/index.js`). +- [x] 1.2 Remove scheduled partition maintenance job from runtime service. +- [x] 1.3 Remove runtime missing-partition auto-fix behavior. +- [x] 1.4 Remove legacy DB provisioning modules and scripts from service project. +- [x] 1.5 Add external SQL/JS provisioning scripts under root `SQL_Script/` for DB/schema/partition management. +- [x] 1.6 Update project docs to point DB provisioning to `SQL_Script/`. + +## 2. Validation +- [x] 2.1 Run `npm run lint` in `bls-onoffline-backend`. +- [x] 2.2 Run `npm run build` in `bls-onoffline-backend`. +- [x] 2.3 Run `openspec validate remove-runtime-db-provisioning --strict`. diff --git a/bls-register-backend/openspec/project.md b/bls-register-backend/openspec/project.md new file mode 100644 index 0000000..3da5119 --- /dev/null +++ b/bls-register-backend/openspec/project.md @@ -0,0 +1,31 @@ +# Project Context + +## Purpose +[Describe your project's purpose and goals] + +## Tech Stack +- [List your primary technologies] +- [e.g., TypeScript, React, Node.js] + +## Project Conventions + +### Code Style +[Describe your code style preferences, formatting rules, and naming conventions] + +### Architecture Patterns +[Document your architectural decisions and patterns] + +### Testing Strategy +[Explain your testing approach and requirements] + +### Git Workflow +[Describe your branching strategy and commit conventions] + +## Domain Context +[Add domain-specific knowledge that AI assistants need to understand] + +## Important Constraints +[List any technical, business, or regulatory constraints] + +## External Dependencies +[Document key external services, APIs, or systems] diff --git a/bls-register-backend/openspec/specs/onoffline/spec.md b/bls-register-backend/openspec/specs/onoffline/spec.md new file mode 100644 index 0000000..a3bc8b0 --- /dev/null +++ b/bls-register-backend/openspec/specs/onoffline/spec.md @@ -0,0 +1,103 @@ +# Spec: onoffline-backend + +## Purpose +从 Kafka 消费设备上下线事件并按规则写入 PostgreSQL 分区表,确保高可靠性、幂等写入和错误恢复能力。 +## Requirements +### Requirement: 消费并落库 +系统 SHALL 从 blwlog4Nodejs-rcu-onoffline-topic-0 消费消息,并写入 log_platform.onoffline.onoffline_record。 + +#### Scenario: 非重启数据写入 +- **GIVEN** RebootReason 为空或不存在 +- **WHEN** 消息被处理 +- **THEN** current_status 等于 CurrentStatus (截断至 255 字符) + +### Requirement: 重启数据处理 +系统 SHALL 在 RebootReason 非空时强制 current_status 为 on。 + +#### Scenario: 重启数据写入 +- **GIVEN** RebootReason 为非空值 +- **WHEN** 消息被处理 +- **THEN** current_status 等于 on + +### Requirement: 空值保留 +系统 SHALL 保留上游空值,不对字段进行补 0。 + +#### Scenario: 空值写入 +- **GIVEN** LauncherVersion 或 RebootReason 为空字符串 +- **WHEN** 消息被处理 +- **THEN** 数据库存储值为对应的空字符串 + +### Requirement: 数据库分区策略 +系统 SHALL 使用 Range Partitioning 按天分区,并自动维护未来 30 天的分区表,子表依赖 PostgreSQL 原生机制继承主表索引。 + +#### Scenario: 分区预创建 +- **GIVEN** 系统启动或每日凌晨 +- **WHEN** 运行分区维护任务 +- **THEN** 确保数据库中存在未来 30 天的分区表,无需对子表显式创建单列表索引 + +### Requirement: 消费可靠性 (At-Least-Once) +系统 SHALL 仅在数据成功写入数据库后,才向 Kafka 提交消费位点。 + +#### Scenario: 逐条确认与顺序提交 +- **GIVEN** 并发处理多条消息 (Offset 1, 2, 3) +- **WHEN** Offset 2 先完成,Offset 1 尚未完成 +- **THEN** 系统不提交 Offset 2,直到 Offset 1 也完成,才提交 Offset 3 (即 1, 2, 3 都完成) + +### Requirement: 数据库离线保护 +系统 SHALL 在数据库连接丢失时暂停消费,防止数据堆积或丢失。 + +#### Scenario: 数据库断连 +- **GIVEN** 数据库连接失败 (ECONNREFUSED 等) +- **WHEN** 消费者尝试写入 +- **THEN** 暂停 Kafka 消费 1 分钟,并进入轮询检测模式,直到数据库恢复 + +### Requirement: 幂等写入 +系统 SHALL 处理重复消费的数据,防止主键冲突。 + +#### Scenario: 重复数据处理 +- **GIVEN** Kafka 重新投递已处理过的消息 +- **WHEN** 尝试写入数据库 +- **THEN** 使用 `ON CONFLICT DO NOTHING` 忽略冲突,视为处理成功 + +### Requirement: 性能与日志 +系统 SHALL 最小化正常运行时的日志输出。 + +#### Scenario: 正常运行日志 +- **GIVEN** 数据正常处理 +- **WHEN** 写入成功 +- **THEN** 不输出单条日志,仅每分钟输出聚合统计 (Pulled/Inserted) + +### Requirement: 字段长度限制与截断 +系统 SHALL 将部分变长字段截断至数据库允许的最大长度 (VARCHAR(255)),防止写入失败。 + +#### Scenario: 超长字段处理 +- **GIVEN** LauncherVersion, CurrentStatus 或 RebootReason 超过 255 字符 +- **WHEN** 消息被处理 +- **THEN** 字段被截断为前 255 个字符并入库 + +### Requirement: 时间戳单位自动识别 +系统 SHALL 自动识别 UnixTime 字段是秒还是毫秒,并统一转换为毫秒。 + +#### Scenario: 秒级时间戳转换 +- **GIVEN** UnixTime < 100000000000 (约 1973 年前) +- **WHEN** 解析时间戳 +- **THEN** 自动乘以 1000 转换为毫秒 + +### Requirement: 批量消费与写入 +系统 SHALL 对 Kafka 消息进行缓冲,并按批次写入数据库,以提高吞吐量;当写入失败时,系统 SHALL 执行连接恢复重试与降级策略,但不在运行时创建数据库分区。 + +#### Scenario: 批量写入 +- **GIVEN** 短时间内收到多条消息 (e.g., 500条) +- **WHEN** 缓冲区满或超时 (e.g., 200ms) +- **THEN** 执行一次批量数据库插入操作 + +#### Scenario: 写入失败降级 +- **GIVEN** 批量写入因数据错误失败 (非连接错误) +- **WHEN** 捕获异常 +- **THEN** 自动降级为逐条写入,以隔离错误数据并确保有效数据入库 + +#### Scenario: 分区缺失错误处理 +- **GIVEN** 写入时数据库返回分区缺失错误 +- **WHEN** 服务处理该错误 +- **THEN** 服务记录错误并按既有错误处理机制处理,不在运行时执行分区创建 + diff --git a/bls-register-backend/openspec/specs/onoffline/status.md b/bls-register-backend/openspec/specs/onoffline/status.md new file mode 100644 index 0000000..a80909a --- /dev/null +++ b/bls-register-backend/openspec/specs/onoffline/status.md @@ -0,0 +1,11 @@ + +## Implementation Status +- **Date**: 2026-02-04 +- **Status**: Completed +- **Notes**: + - 已完成核心消费逻辑、分区管理、数据库幂等写入。 + - 已处理数据库连接泄露 (EADDRINUSE) 问题,增加了离线保护机制。 + - 已修复时间戳单位问题 (Seconds -> MS)。 + - 已将关键字段长度扩展至 VARCHAR(255) 并增加了代码层截断保护。 + - 验证了数据积压消费能力。 + - 本阶段开发任务已归档。 diff --git a/bls-register-backend/out.log b/bls-register-backend/out.log new file mode 100644 index 0000000..5210aae --- /dev/null +++ b/bls-register-backend/out.log @@ -0,0 +1,13 @@ +{"level":"info","message":"Starting register consumer","timestamp":1773480367035,"context":{"env":"development","kafka":{"brokers":["kafka.blv-oa.com:9092"],"topic":"blwlog4Nodejs-rcu-register-topic","groupId":"bls-register-consumer-probe-1773480366464"},"db":{"host":"10.8.8.80","port":5434,"database":"log_platform","schema":"rcu_info","table":"rcu_info_events_g5","roomStatusSchema":"room_status","roomStatusTable":"room_status_moment_g5"},"flushIntervalMs":3000}} +{"level":"info","message":"Kafka Consumer rebalancing","timestamp":1773480367119,"context":{"groupId":"bls-register-consumer-probe-1773480366464","clientId":"bls-register-producer"}} +{"level":"info","message":"Kafka Consumer connected","timestamp":1773480367237,"context":{"groupId":"bls-register-consumer-probe-1773480366464","clientId":"bls-register-producer"}} +{"level":"info","message":"Kafka Consumer rebalanced","timestamp":1773480367237,"context":{"clientId":"bls-register-producer","groupId":"bls-register-consumer-probe-1773480366464"}} +{"level":"info","message":"Run counters","timestamp":1773480377038,"context":{"kafkaPulled":23,"dbInserted":23,"parseError":0,"dbFailed":0}} +{"level":"info","message":"Run counters","timestamp":1773480387038,"context":{"kafkaPulled":37,"dbInserted":31,"parseError":0,"dbFailed":0}} + +[probe] published topic=blwlog4Nodejs-rcu-register-topic ts_ms=1773480366464 hotel_id=1172 room_id=515 +[probe-db] event_rows=1 +[probe-db] event.ts_ms=1773480366464 room_id=515 app_version=v1.2 +[probe-db] event.udp_raw=YWJjZGVm +[probe-db] room_status_rows=1 +[probe-db] room_status.room_id=515 register_ts_ms=1773480366464 upgrade_ts_ms=1773480367698 diff --git a/bls-register-backend/package-lock.json b/bls-register-backend/package-lock.json new file mode 100644 index 0000000..61419bb --- /dev/null +++ b/bls-register-backend/package-lock.json @@ -0,0 +1,3526 @@ +{ + "name": "bls-register-backend", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "bls-register-backend", + "version": "1.0.0", + "dependencies": { + "dotenv": "^16.4.5", + "kafka-node": "^5.0.0", + "node-cron": "^4.2.1", + "pg": "^8.11.5", + "redis": "^4.6.13", + "zod": "^4.3.6" + }, + "devDependencies": { + "vite": "^5.4.0", + "vitest": "^4.0.18" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.2.tgz", + "integrity": "sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.2.tgz", + "integrity": "sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.2.tgz", + "integrity": "sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmmirror.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@redis/bloom": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/@redis/bloom/-/bloom-1.2.0.tgz", + "integrity": "sha512-HG2DFjYKbpNmVXsa0keLHp/3leGJz1mjh09f2RLGGLQZzSHpkmZWuwJbAvo3QcRY8p80m5+ZdXZdYOSBLlp7Cg==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/client": { + "version": "1.6.1", + "resolved": "https://registry.npmmirror.com/@redis/client/-/client-1.6.1.tgz", + "integrity": "sha512-/KCsg3xSlR+nCK8/8ZYSknYxvXHwubJrU82F3Lm1Fp6789VQ0/3RJKfsmRXjqfaTA++23CvC3hqmqe/2GEt6Kw==", + "license": "MIT", + "peer": true, + "dependencies": { + "cluster-key-slot": "1.1.2", + "generic-pool": "3.9.0", + "yallist": "4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@redis/graph": { + "version": "1.1.1", + "resolved": "https://registry.npmmirror.com/@redis/graph/-/graph-1.1.1.tgz", + "integrity": "sha512-FEMTcTHZozZciLRl6GiiIB4zGm5z5F3F6a6FZCyrfxdKOhFlGkiAqlexWMBzCi4DcRoyiOsuLfW+cjlGWyExOw==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/json": { + "version": "1.0.7", + "resolved": "https://registry.npmmirror.com/@redis/json/-/json-1.0.7.tgz", + "integrity": "sha512-6UyXfjVaTBTJtKNG4/9Z8PSpKE6XgSyEb8iwaqDcy+uKrd/DGYHTWkUdnQDyzm727V7p21WUMhsqz5oy65kPcQ==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/search": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/@redis/search/-/search-1.2.0.tgz", + "integrity": "sha512-tYoDBbtqOVigEDMAcTGsRlMycIIjwMCgD8eR2t0NANeQmgK/lvxNAvYyb6bZDD4frHRhIHkJu2TBRvB0ERkOmw==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/time-series": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/@redis/time-series/-/time-series-1.1.0.tgz", + "integrity": "sha512-c1Q99M5ljsIuc4YdaCwfUEXsofakb9c8+Zse2qxTadu8TalLXuAESzLvFAvNVbkmSlvlzIQOLpBCmWI9wTOt+g==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.57.0.tgz", + "integrity": "sha512-tPgXB6cDTndIe1ah7u6amCI1T0SsnlOuKgg10Xh3uizJk4e5M1JGaUMk7J4ciuAUcFpbOiNhm2XIjP9ON0dUqA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.57.0.tgz", + "integrity": "sha512-sa4LyseLLXr1onr97StkU1Nb7fWcg6niokTwEVNOO7awaKaoRObQ54+V/hrF/BP1noMEaaAW6Fg2d/CfLiq3Mg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.57.0.tgz", + "integrity": "sha512-/NNIj9A7yLjKdmkx5dC2XQ9DmjIECpGpwHoGmA5E1AhU0fuICSqSWScPhN1yLCkEdkCwJIDu2xIeLPs60MNIVg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.57.0.tgz", + "integrity": "sha512-xoh8abqgPrPYPr7pTYipqnUi1V3em56JzE/HgDgitTqZBZ3yKCWI+7KUkceM6tNweyUKYru1UMi7FC060RyKwA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.57.0.tgz", + "integrity": "sha512-PCkMh7fNahWSbA0OTUQ2OpYHpjZZr0hPr8lId8twD7a7SeWrvT3xJVyza+dQwXSSq4yEQTMoXgNOfMCsn8584g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.57.0.tgz", + "integrity": "sha512-1j3stGx+qbhXql4OCDZhnK7b01s6rBKNybfsX+TNrEe9JNq4DLi1yGiR1xW+nL+FNVvI4D02PUnl6gJ/2y6WJA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.57.0.tgz", + "integrity": "sha512-eyrr5W08Ms9uM0mLcKfM/Uzx7hjhz2bcjv8P2uynfj0yU8GGPdz8iYrBPhiLOZqahoAMB8ZiolRZPbbU2MAi6Q==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.57.0.tgz", + "integrity": "sha512-Xds90ITXJCNyX9pDhqf85MKWUI4lqjiPAipJ8OLp8xqI2Ehk+TCVhF9rvOoN8xTbcafow3QOThkNnrM33uCFQA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.57.0.tgz", + "integrity": "sha512-Xws2KA4CLvZmXjy46SQaXSejuKPhwVdaNinldoYfqruZBaJHqVo6hnRa8SDo9z7PBW5x84SH64+izmldCgbezw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.57.0.tgz", + "integrity": "sha512-hrKXKbX5FdaRJj7lTMusmvKbhMJSGWJ+w++4KmjiDhpTgNlhYobMvKfDoIWecy4O60K6yA4SnztGuNTQF+Lplw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.57.0.tgz", + "integrity": "sha512-6A+nccfSDGKsPm00d3xKcrsBcbqzCTAukjwWK6rbuAnB2bHaL3r9720HBVZ/no7+FhZLz/U3GwwZZEh6tOSI8Q==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.57.0.tgz", + "integrity": "sha512-4P1VyYUe6XAJtQH1Hh99THxr0GKMMwIXsRNOceLrJnaHTDgk1FTcTimDgneRJPvB3LqDQxUmroBclQ1S0cIJwQ==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.57.0.tgz", + "integrity": "sha512-8Vv6pLuIZCMcgXre6c3nOPhE0gjz1+nZP6T+hwWjr7sVH8k0jRkH+XnfjjOTglyMBdSKBPPz54/y1gToSKwrSQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.57.0.tgz", + "integrity": "sha512-r1te1M0Sm2TBVD/RxBPC6RZVwNqUTwJTA7w+C/IW5v9Ssu6xmxWEi+iJQlpBhtUiT1raJ5b48pI8tBvEjEFnFA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.57.0.tgz", + "integrity": "sha512-say0uMU/RaPm3CDQLxUUTF2oNWL8ysvHkAjcCzV2znxBr23kFfaxocS9qJm+NdkRhF8wtdEEAJuYcLPhSPbjuQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.57.0.tgz", + "integrity": "sha512-/MU7/HizQGsnBREtRpcSbSV1zfkoxSTR7wLsRmBPQ8FwUj5sykrP1MyJTvsxP5KBq9SyE6kH8UQQQwa0ASeoQQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.57.0.tgz", + "integrity": "sha512-Q9eh+gUGILIHEaJf66aF6a414jQbDnn29zeu0eX3dHMuysnhTvsUvZTCAyZ6tJhUjnvzBKE4FtuaYxutxRZpOg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.57.0.tgz", + "integrity": "sha512-OR5p5yG5OKSxHReWmwvM0P+VTPMwoBS45PXTMYaskKQqybkS3Kmugq1W+YbNWArF8/s7jQScgzXUhArzEQ7x0A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.57.0.tgz", + "integrity": "sha512-XeatKzo4lHDsVEbm1XDHZlhYZZSQYym6dg2X/Ko0kSFgio+KXLsxwJQprnR48GvdIKDOpqWqssC3iBCjoMcMpw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.57.0.tgz", + "integrity": "sha512-Lu71y78F5qOfYmubYLHPcJm74GZLU6UJ4THkf/a1K7Tz2ycwC2VUbsqbJAXaR6Bx70SRdlVrt2+n5l7F0agTUw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.57.0.tgz", + "integrity": "sha512-v5xwKDWcu7qhAEcsUubiav7r+48Uk/ENWdr82MBZZRIm7zThSxCIVDfb3ZeRRq9yqk+oIzMdDo6fCcA5DHfMyA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.57.0.tgz", + "integrity": "sha512-XnaaaSMGSI6Wk8F4KK3QP7GfuuhjGchElsVerCplUuxRIzdvZ7hRBpLR0omCmw+kI2RFJB80nenhOoGXlJ5TfQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.57.0.tgz", + "integrity": "sha512-3K1lP+3BXY4t4VihLw5MEg6IZD3ojSYzqzBG571W3kNQe4G4CcFpSUQVgurYgib5d+YaCjeFow8QivWp8vuSvA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.57.0.tgz", + "integrity": "sha512-MDk610P/vJGc5L5ImE4k5s+GZT3en0KoK1MKPXCRgzmksAMk79j4h3k1IerxTNqwDLxsGxStEZVBqG0gIqZqoA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.57.0.tgz", + "integrity": "sha512-Zv7v6q6aV+VslnpwzqKAmrk5JdVkLUzok2208ZXGipjb+msxBr/fJPZyeEXiFgH7k62Ak0SLIfxQRZQvTuf7rQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@standard-schema/spec": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/chai": { + "version": "5.2.3", + "resolved": "https://registry.npmmirror.com/@types/chai/-/chai-5.2.3.tgz", + "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*", + "assertion-error": "^2.0.1" + } + }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmmirror.com/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmmirror.com/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@vitest/expect": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/@vitest/expect/-/expect-4.0.18.tgz", + "integrity": "sha512-8sCWUyckXXYvx4opfzVY03EOiYVxyNrHS5QxX3DAIi5dpJAAkyJezHCP77VMX4HKA2LDT/Jpfo8i2r5BE3GnQQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@standard-schema/spec": "^1.0.0", + "@types/chai": "^5.2.2", + "@vitest/spy": "4.0.18", + "@vitest/utils": "4.0.18", + "chai": "^6.2.1", + "tinyrainbow": "^3.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/pretty-format": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/@vitest/pretty-format/-/pretty-format-4.0.18.tgz", + "integrity": "sha512-P24GK3GulZWC5tz87ux0m8OADrQIUVDPIjjj65vBXYG17ZeU3qD7r+MNZ1RNv4l8CGU2vtTRqixrOi9fYk/yKw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^3.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/@vitest/runner/-/runner-4.0.18.tgz", + "integrity": "sha512-rpk9y12PGa22Jg6g5M3UVVnTS7+zycIGk9ZNGN+m6tZHKQb7jrP7/77WfZy13Y/EUDd52NDsLRQhYKtv7XfPQw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/utils": "4.0.18", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/snapshot": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/@vitest/snapshot/-/snapshot-4.0.18.tgz", + "integrity": "sha512-PCiV0rcl7jKQjbgYqjtakly6T1uwv/5BQ9SwBLekVg/EaYeQFPiXcgrC2Y7vDMA8dM1SUEAEV82kgSQIlXNMvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "4.0.18", + "magic-string": "^0.30.21", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/@vitest/spy/-/spy-4.0.18.tgz", + "integrity": "sha512-cbQt3PTSD7P2OARdVW3qWER5EGq7PHlvE+QfzSC0lbwO+xnt7+XH06ZzFjFRgzUX//JmpxrCu92VdwvEPlWSNw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/@vitest/utils/-/utils-4.0.18.tgz", + "integrity": "sha512-msMRKLMVLWygpK3u2Hybgi4MNjcYJvwTb0Ru09+fOyCXIgT5raYP041DRRdiJiI3k/2U6SEbAETB3YtBrUkCFA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "4.0.18", + "tinyrainbow": "^3.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmmirror.com/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", + "license": "ISC", + "optional": true + }, + "node_modules/are-we-there-yet": { + "version": "1.1.7", + "resolved": "https://registry.npmmirror.com/are-we-there-yet/-/are-we-there-yet-1.1.7.tgz", + "integrity": "sha512-nxwy40TuMiUGqMyRHgCSWZ9FM4VAoRP4xUYSTv5ImRog+h9yISPbVH7H8fASCIzYn9wlEv4zvFL7uKDMCFQm3g==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "optional": true, + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^2.0.6" + } + }, + "node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/async": { + "version": "2.6.4", + "resolved": "https://registry.npmmirror.com/async/-/async-2.6.4.tgz", + "integrity": "sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==", + "license": "MIT", + "dependencies": { + "lodash": "^4.17.14" + } + }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmmirror.com/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" + }, + "node_modules/binary": { + "version": "0.3.0", + "resolved": "https://registry.npmmirror.com/binary/-/binary-0.3.0.tgz", + "integrity": "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg==", + "license": "MIT", + "dependencies": { + "buffers": "~0.1.1", + "chainsaw": "~0.1.0" + }, + "engines": { + "node": "*" + } + }, + "node_modules/bindings": { + "version": "1.5.0", + "resolved": "https://registry.npmmirror.com/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "file-uri-to-path": "1.0.0" + } + }, + "node_modules/bl": { + "version": "2.2.1", + "resolved": "https://registry.npmmirror.com/bl/-/bl-2.2.1.tgz", + "integrity": "sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g==", + "license": "MIT", + "dependencies": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/buffer-alloc": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz", + "integrity": "sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow==", + "license": "MIT", + "optional": true, + "dependencies": { + "buffer-alloc-unsafe": "^1.1.0", + "buffer-fill": "^1.0.0" + } + }, + "node_modules/buffer-alloc-unsafe": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz", + "integrity": "sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg==", + "license": "MIT", + "optional": true + }, + "node_modules/buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmmirror.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/buffer-fill": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/buffer-fill/-/buffer-fill-1.0.0.tgz", + "integrity": "sha512-T7zexNBwiiaCOGDg9xNX9PBmjrubblRkENuptryuI64URkXDFum9il/JGL8Lm8wYfAXpredVXXZz7eMHilimiQ==", + "license": "MIT", + "optional": true + }, + "node_modules/buffermaker": { + "version": "1.2.1", + "resolved": "https://registry.npmmirror.com/buffermaker/-/buffermaker-1.2.1.tgz", + "integrity": "sha512-IdnyU2jDHU65U63JuVQNTHiWjPRH0CS3aYd/WPaEwyX84rFdukhOduAVb1jwUScmb5X0JWPw8NZOrhoLMiyAHQ==", + "license": "MIT", + "dependencies": { + "long": "1.1.2" + } + }, + "node_modules/buffers": { + "version": "0.1.1", + "resolved": "https://registry.npmmirror.com/buffers/-/buffers-0.1.1.tgz", + "integrity": "sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ==", + "engines": { + "node": ">=0.2.0" + } + }, + "node_modules/call-bind": { + "version": "1.0.8", + "resolved": "https://registry.npmmirror.com/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", + "license": "MIT", + "optional": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmmirror.com/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "optional": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/chai": { + "version": "6.2.2", + "resolved": "https://registry.npmmirror.com/chai/-/chai-6.2.2.tgz", + "integrity": "sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/chainsaw": { + "version": "0.1.0", + "resolved": "https://registry.npmmirror.com/chainsaw/-/chainsaw-0.1.0.tgz", + "integrity": "sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ==", + "license": "MIT/X11", + "dependencies": { + "traverse": ">=0.3.0 <0.4" + }, + "engines": { + "node": "*" + } + }, + "node_modules/chownr": { + "version": "1.1.4", + "resolved": "https://registry.npmmirror.com/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", + "license": "ISC", + "optional": true + }, + "node_modules/cluster-key-slot": { + "version": "1.1.2", + "resolved": "https://registry.npmmirror.com/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz", + "integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==", + "license": "Apache-2.0", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha512-RpAVKQA5T63xEj6/giIbUEtZwJ4UFIc3ZtvEkiaUERylqe8xb5IvqcgOurZLahv93CLKfxcw5YI+DZcUBRyLXA==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmmirror.com/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "license": "MIT" + }, + "node_modules/console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", + "license": "ISC", + "optional": true + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "license": "MIT" + }, + "node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmmirror.com/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/decompress-response": { + "version": "3.3.0", + "resolved": "https://registry.npmmirror.com/decompress-response/-/decompress-response-3.3.0.tgz", + "integrity": "sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA==", + "license": "MIT", + "optional": true, + "dependencies": { + "mimic-response": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmmirror.com/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmmirror.com/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "license": "MIT", + "optional": true, + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", + "license": "MIT", + "optional": true + }, + "node_modules/denque": { + "version": "1.5.1", + "resolved": "https://registry.npmmirror.com/denque/-/denque-1.5.1.tgz", + "integrity": "sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw==", + "license": "Apache-2.0", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/detect-libc": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/detect-libc/-/detect-libc-1.0.3.tgz", + "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==", + "license": "Apache-2.0", + "optional": true, + "bin": { + "detect-libc": "bin/detect-libc.js" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmmirror.com/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "optional": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "resolved": "https://registry.npmmirror.com/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "license": "MIT", + "optional": true, + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmmirror.com/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-module-lexer": { + "version": "1.7.0", + "resolved": "https://registry.npmmirror.com/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", + "dev": true, + "license": "MIT" + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmmirror.com/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "optional": true, + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmmirror.com/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/expand-template": { + "version": "2.0.3", + "resolved": "https://registry.npmmirror.com/expand-template/-/expand-template-2.0.3.tgz", + "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==", + "license": "(MIT OR WTFPL)", + "optional": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/expect-type": { + "version": "1.3.0", + "resolved": "https://registry.npmmirror.com/expect-type/-/expect-type-1.3.0.tgz", + "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmmirror.com/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", + "license": "MIT", + "optional": true + }, + "node_modules/for-each": { + "version": "0.3.5", + "resolved": "https://registry.npmmirror.com/for-each/-/for-each-0.3.5.tgz", + "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", + "license": "MIT", + "optional": true, + "dependencies": { + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", + "license": "MIT", + "optional": true + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmmirror.com/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmmirror.com/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "optional": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gauge": { + "version": "2.7.4", + "resolved": "https://registry.npmmirror.com/gauge/-/gauge-2.7.4.tgz", + "integrity": "sha512-14x4kjc6lkD3ltw589k0NrPD6cCNTD6CWoVUNpB85+DrtONoZn+Rug6xZU5RvSC4+TZPxA5AnBibQYAvZn41Hg==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "optional": true, + "dependencies": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + } + }, + "node_modules/generic-pool": { + "version": "3.9.0", + "resolved": "https://registry.npmmirror.com/generic-pool/-/generic-pool-3.9.0.tgz", + "integrity": "sha512-hymDOu5B53XvN4QT9dBmZxPX4CWhBPPLguTZ9MMFeFa/Kg0xWVfylOVNlJji/E7yTZWFd/q9GO5TxDLq156D7g==", + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmmirror.com/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "optional": true, + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/github-from-package": { + "version": "0.0.0", + "resolved": "https://registry.npmmirror.com/github-from-package/-/github-from-package-0.0.0.tgz", + "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==", + "license": "MIT", + "optional": true + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "license": "MIT", + "optional": true, + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "optional": true, + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", + "license": "ISC", + "optional": true + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmmirror.com/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmmirror.com/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "license": "ISC", + "optional": true + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmmirror.com/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha512-1pqUqRjkhPJ9miNq9SwMfdvi6lBJcd6eFxvfaivQhaH3SgisfiuudvFntdKOmxuee/77l+FPjKrQjWvmPjWrRw==", + "license": "MIT", + "optional": true, + "dependencies": { + "number-is-nan": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmmirror.com/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "license": "MIT" + }, + "node_modules/kafka-node": { + "version": "5.0.0", + "resolved": "https://registry.npmmirror.com/kafka-node/-/kafka-node-5.0.0.tgz", + "integrity": "sha512-dD2ga5gLcQhsq1yNoQdy1MU4x4z7YnXM5bcG9SdQuiNr5KKuAmXixH1Mggwdah5o7EfholFbcNDPSVA6BIfaug==", + "license": "MIT", + "dependencies": { + "async": "^2.6.2", + "binary": "~0.3.0", + "bl": "^2.2.0", + "buffer-crc32": "~0.2.5", + "buffermaker": "~1.2.0", + "debug": "^2.1.3", + "denque": "^1.3.0", + "lodash": "^4.17.4", + "minimatch": "^3.0.2", + "nested-error-stacks": "^2.0.0", + "optional": "^0.1.3", + "retry": "^0.10.1", + "uuid": "^3.0.0" + }, + "engines": { + "node": ">=8.5.1" + }, + "optionalDependencies": { + "snappy": "^6.0.1" + } + }, + "node_modules/lodash": { + "version": "4.17.23", + "resolved": "https://registry.npmmirror.com/lodash/-/lodash-4.17.23.tgz", + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", + "license": "MIT" + }, + "node_modules/long": { + "version": "1.1.2", + "resolved": "https://registry.npmmirror.com/long/-/long-1.1.2.tgz", + "integrity": "sha512-pjR3OP1X2VVQhCQlrq3s8UxugQsuoucwMOn9Yj/kN/61HMc+lDFJS5bvpNEHneZ9NVaSm8gNWxZvtGS7lqHb3Q==", + "license": "Apache-2.0", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmmirror.com/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mimic-response": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmmirror.com/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "license": "MIT", + "optional": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmmirror.com/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "license": "MIT", + "optional": true, + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/nan": { + "version": "2.25.0", + "resolved": "https://registry.npmmirror.com/nan/-/nan-2.25.0.tgz", + "integrity": "sha512-0M90Ag7Xn5KMLLZ7zliPWP3rT90P6PN+IzVFS0VqmnPktBk3700xUVv8Ikm9EUaUE5SDWdp/BIxdENzVznpm1g==", + "license": "MIT", + "optional": true + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmmirror.com/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/napi-build-utils": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/napi-build-utils/-/napi-build-utils-1.0.2.tgz", + "integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==", + "license": "MIT", + "optional": true + }, + "node_modules/nested-error-stacks": { + "version": "2.1.1", + "resolved": "https://registry.npmmirror.com/nested-error-stacks/-/nested-error-stacks-2.1.1.tgz", + "integrity": "sha512-9iN1ka/9zmX1ZvLV9ewJYEk9h7RyRRtqdK0woXcqohu8EWIerfPUjYJPg0ULy0UqP7cslmdGc8xKDJcojlKiaw==", + "license": "MIT" + }, + "node_modules/node-abi": { + "version": "2.30.1", + "resolved": "https://registry.npmmirror.com/node-abi/-/node-abi-2.30.1.tgz", + "integrity": "sha512-/2D0wOQPgaUWzVSVgRMx+trKJRC2UG4SUc4oCJoXx9Uxjtp0Vy3/kt7zcbxHF8+Z/pK3UloLWzBISg72brfy1w==", + "license": "MIT", + "optional": true, + "dependencies": { + "semver": "^5.4.1" + } + }, + "node_modules/node-cron": { + "version": "4.2.1", + "resolved": "https://registry.npmmirror.com/node-cron/-/node-cron-4.2.1.tgz", + "integrity": "sha512-lgimEHPE/QDgFlywTd8yTR61ptugX3Qer29efeyWw2rv259HtGBNn1vZVmp8lB9uo9wC0t/AT4iGqXxia+CJFg==", + "license": "ISC", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/noop-logger": { + "version": "0.1.1", + "resolved": "https://registry.npmmirror.com/noop-logger/-/noop-logger-0.1.1.tgz", + "integrity": "sha512-6kM8CLXvuW5crTxsAtva2YLrRrDaiTIkIePWs9moLHqbFWT94WpNFjwS/5dfLfECg5i/lkmw3aoqVidxt23TEQ==", + "license": "MIT", + "optional": true + }, + "node_modules/npmlog": { + "version": "4.1.2", + "resolved": "https://registry.npmmirror.com/npmlog/-/npmlog-4.1.2.tgz", + "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "optional": true, + "dependencies": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + } + }, + "node_modules/number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha512-4jbtZXNAsfZbAHiiqjLPBiCl16dES1zI4Hpzzxw61Tk+loF+sBDBKx1ICKKKwIqQ7M0mFn1TmkN7euSncWgHiQ==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmmirror.com/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/obug": { + "version": "2.1.1", + "resolved": "https://registry.npmmirror.com/obug/-/obug-2.1.1.tgz", + "integrity": "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/sxzz", + "https://opencollective.com/debug" + ], + "license": "MIT" + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmmirror.com/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "optional": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/optional": { + "version": "0.1.4", + "resolved": "https://registry.npmmirror.com/optional/-/optional-0.1.4.tgz", + "integrity": "sha512-gtvrrCfkE08wKcgXaVwQVgwEQ8vel2dc5DDBn9RLQZ3YtmtkBss6A2HY6BnJH4N/4Ku97Ri/SF8sNWE2225WJw==", + "license": "MIT" + }, + "node_modules/os-homedir": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/os-homedir/-/os-homedir-1.0.2.tgz", + "integrity": "sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmmirror.com/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/pg": { + "version": "8.17.2", + "resolved": "https://registry.npmmirror.com/pg/-/pg-8.17.2.tgz", + "integrity": "sha512-vjbKdiBJRqzcYw1fNU5KuHyYvdJ1qpcQg1CeBrHFqV1pWgHeVR6j/+kX0E1AAXfyuLUGY1ICrN2ELKA/z2HWzw==", + "license": "MIT", + "peer": true, + "dependencies": { + "pg-connection-string": "^2.10.1", + "pg-pool": "^3.11.0", + "pg-protocol": "^1.11.0", + "pg-types": "2.2.0", + "pgpass": "1.0.5" + }, + "engines": { + "node": ">= 16.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.3.0" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.3.0", + "resolved": "https://registry.npmmirror.com/pg-cloudflare/-/pg-cloudflare-1.3.0.tgz", + "integrity": "sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ==", + "license": "MIT", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.10.1", + "resolved": "https://registry.npmmirror.com/pg-connection-string/-/pg-connection-string-2.10.1.tgz", + "integrity": "sha512-iNzslsoeSH2/gmDDKiyMqF64DATUCWj3YJ0wP14kqcsf2TUklwimd+66yYojKwZCA7h2yRNLGug71hCBA2a4sw==", + "license": "MIT" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "license": "ISC", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.11.0", + "resolved": "https://registry.npmmirror.com/pg-pool/-/pg-pool-3.11.0.tgz", + "integrity": "sha512-MJYfvHwtGp870aeusDh+hg9apvOe2zmpZJpyt+BMtzUWlVqbhFmMK6bOBXLBUPd7iRtIF9fZplDc7KrPN3PN7w==", + "license": "MIT", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.11.0", + "resolved": "https://registry.npmmirror.com/pg-protocol/-/pg-protocol-1.11.0.tgz", + "integrity": "sha512-pfsxk2M9M3BuGgDOfuy37VNRRX3jmKgMjcvAcWqNDpZSf4cUmv8HSOl5ViRQFsfARFn0KuUQTgLxVMbNq5NW3g==", + "license": "MIT" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmmirror.com/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "license": "MIT", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmmirror.com/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "license": "MIT", + "dependencies": { + "split2": "^4.1.0" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmmirror.com/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmmirror.com/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/possible-typed-array-names": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", + "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmmirror.com/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/postgres-bytea/-/postgres-bytea-1.0.1.tgz", + "integrity": "sha512-5+5HqXnsZPE65IJZSMkZtURARZelel2oXUEO8rH83VS/hxH5vv1uHquPg5wZs8yMAfdv971IU+kcPUczi7NVBQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmmirror.com/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "license": "MIT", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/prebuild-install": { + "version": "5.3.0", + "resolved": "https://registry.npmmirror.com/prebuild-install/-/prebuild-install-5.3.0.tgz", + "integrity": "sha512-aaLVANlj4HgZweKttFNUVNRxDukytuIuxeK2boIMHjagNJCiVKWFsKF4tCE3ql3GbrD2tExPQ7/pwtEJcHNZeg==", + "license": "MIT", + "optional": true, + "dependencies": { + "detect-libc": "^1.0.3", + "expand-template": "^2.0.3", + "github-from-package": "0.0.0", + "minimist": "^1.2.0", + "mkdirp": "^0.5.1", + "napi-build-utils": "^1.0.1", + "node-abi": "^2.7.0", + "noop-logger": "^0.1.1", + "npmlog": "^4.0.1", + "os-homedir": "^1.0.1", + "pump": "^2.0.1", + "rc": "^1.2.7", + "simple-get": "^2.7.0", + "tar-fs": "^1.13.0", + "tunnel-agent": "^0.6.0", + "which-pm-runs": "^1.0.0" + }, + "bin": { + "prebuild-install": "bin.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "license": "MIT" + }, + "node_modules/pump": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/pump/-/pump-2.0.1.tgz", + "integrity": "sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==", + "license": "MIT", + "optional": true, + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/rc": { + "version": "1.2.8", + "resolved": "https://registry.npmmirror.com/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "license": "(BSD-2-Clause OR MIT OR Apache-2.0)", + "optional": true, + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "bin": { + "rc": "cli.js" + } + }, + "node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmmirror.com/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/readable-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmmirror.com/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "license": "MIT" + }, + "node_modules/redis": { + "version": "4.7.1", + "resolved": "https://registry.npmmirror.com/redis/-/redis-4.7.1.tgz", + "integrity": "sha512-S1bJDnqLftzHXHP8JsT5II/CtHWQrASX5K96REjWjlmWKrviSOLWmM7QnRLstAWsu1VBBV1ffV6DzCvxNP0UJQ==", + "license": "MIT", + "workspaces": [ + "./packages/*" + ], + "dependencies": { + "@redis/bloom": "1.2.0", + "@redis/client": "1.6.1", + "@redis/graph": "1.1.1", + "@redis/json": "1.0.7", + "@redis/search": "1.2.0", + "@redis/time-series": "1.1.0" + } + }, + "node_modules/retry": { + "version": "0.10.1", + "resolved": "https://registry.npmmirror.com/retry/-/retry-0.10.1.tgz", + "integrity": "sha512-ZXUSQYTHdl3uS7IuCehYfMzKyIDBNoAuUblvy5oGO5UJSUTmStUUVPXbA9Qxd173Bgre53yCQczQuHgRWAdvJQ==", + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/rollup": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/rollup/-/rollup-4.57.0.tgz", + "integrity": "sha512-e5lPJi/aui4TO1LpAXIRLySmwXSE8k3b9zoGfd42p67wzxog4WHjiZF3M2uheQih4DGyc25QEV4yRBbpueNiUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.57.0", + "@rollup/rollup-android-arm64": "4.57.0", + "@rollup/rollup-darwin-arm64": "4.57.0", + "@rollup/rollup-darwin-x64": "4.57.0", + "@rollup/rollup-freebsd-arm64": "4.57.0", + "@rollup/rollup-freebsd-x64": "4.57.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.57.0", + "@rollup/rollup-linux-arm-musleabihf": "4.57.0", + "@rollup/rollup-linux-arm64-gnu": "4.57.0", + "@rollup/rollup-linux-arm64-musl": "4.57.0", + "@rollup/rollup-linux-loong64-gnu": "4.57.0", + "@rollup/rollup-linux-loong64-musl": "4.57.0", + "@rollup/rollup-linux-ppc64-gnu": "4.57.0", + "@rollup/rollup-linux-ppc64-musl": "4.57.0", + "@rollup/rollup-linux-riscv64-gnu": "4.57.0", + "@rollup/rollup-linux-riscv64-musl": "4.57.0", + "@rollup/rollup-linux-s390x-gnu": "4.57.0", + "@rollup/rollup-linux-x64-gnu": "4.57.0", + "@rollup/rollup-linux-x64-musl": "4.57.0", + "@rollup/rollup-openbsd-x64": "4.57.0", + "@rollup/rollup-openharmony-arm64": "4.57.0", + "@rollup/rollup-win32-arm64-msvc": "4.57.0", + "@rollup/rollup-win32-ia32-msvc": "4.57.0", + "@rollup/rollup-win32-x64-gnu": "4.57.0", + "@rollup/rollup-win32-x64-msvc": "4.57.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmmirror.com/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmmirror.com/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "license": "ISC", + "optional": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", + "license": "ISC", + "optional": true + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmmirror.com/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "license": "MIT", + "optional": true, + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/siginfo": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", + "dev": true, + "license": "ISC" + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmmirror.com/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "license": "ISC", + "optional": true + }, + "node_modules/simple-concat": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/simple-concat/-/simple-concat-1.0.1.tgz", + "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "optional": true + }, + "node_modules/simple-get": { + "version": "2.8.2", + "resolved": "https://registry.npmmirror.com/simple-get/-/simple-get-2.8.2.tgz", + "integrity": "sha512-Ijd/rV5o+mSBBs4F/x9oDPtTx9Zb6X9brmnXvMW4J7IR15ngi9q5xxqWBKU744jTZiaXtxaPL7uHG6vtN8kUkw==", + "license": "MIT", + "optional": true, + "dependencies": { + "decompress-response": "^3.3.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, + "node_modules/snappy": { + "version": "6.3.5", + "resolved": "https://registry.npmmirror.com/snappy/-/snappy-6.3.5.tgz", + "integrity": "sha512-lonrUtdp1b1uDn1dbwgQbBsb5BbaiLeKq+AGwOk2No+en+VvJThwmtztwulEQsLinRF681pBqib0NUZaizKLIA==", + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "dependencies": { + "bindings": "^1.3.1", + "nan": "^2.14.1", + "prebuild-install": "5.3.0" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmmirror.com/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmmirror.com/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/stackback": { + "version": "0.0.2", + "resolved": "https://registry.npmmirror.com/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", + "dev": true, + "license": "MIT" + }, + "node_modules/std-env": { + "version": "3.10.0", + "resolved": "https://registry.npmmirror.com/std-env/-/std-env-3.10.0.tgz", + "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==", + "dev": true, + "license": "MIT" + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmmirror.com/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmmirror.com/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "license": "MIT" + }, + "node_modules/string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha512-0XsVpQLnVCXHJfyEs8tC0zpTVIr5PKKsQtkT29IwupnPTjtPmQ3xT/4yCREF9hYkV/3M3kzcUTSAZT6a6h81tw==", + "license": "MIT", + "optional": true, + "dependencies": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==", + "license": "MIT", + "optional": true, + "dependencies": { + "ansi-regex": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/tar-fs": { + "version": "1.16.6", + "resolved": "https://registry.npmmirror.com/tar-fs/-/tar-fs-1.16.6.tgz", + "integrity": "sha512-JkOgFt3FxM/2v2CNpAVHqMW2QASjc/Hxo7IGfNd3MHaDYSW/sBFiS7YVmmhmr8x6vwN1VFQDQGdT2MWpmIuVKA==", + "license": "MIT", + "optional": true, + "dependencies": { + "chownr": "^1.0.1", + "mkdirp": "^0.5.1", + "pump": "^1.0.0", + "tar-stream": "^1.1.2" + } + }, + "node_modules/tar-fs/node_modules/pump": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/pump/-/pump-1.0.3.tgz", + "integrity": "sha512-8k0JupWme55+9tCVE+FS5ULT3K6AbgqrGa58lTT49RpyfwwcGedHqaC5LlQNdEAumn/wFsu6aPwkuPMioy8kqw==", + "license": "MIT", + "optional": true, + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/tar-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmmirror.com/tar-stream/-/tar-stream-1.6.2.tgz", + "integrity": "sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A==", + "license": "MIT", + "optional": true, + "dependencies": { + "bl": "^1.0.0", + "buffer-alloc": "^1.2.0", + "end-of-stream": "^1.0.0", + "fs-constants": "^1.0.0", + "readable-stream": "^2.3.0", + "to-buffer": "^1.1.1", + "xtend": "^4.0.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/tar-stream/node_modules/bl": { + "version": "1.2.3", + "resolved": "https://registry.npmmirror.com/bl/-/bl-1.2.3.tgz", + "integrity": "sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww==", + "license": "MIT", + "optional": true, + "dependencies": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/tinybench": { + "version": "2.9.0", + "resolved": "https://registry.npmmirror.com/tinybench/-/tinybench-2.9.0.tgz", + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyexec": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/tinyexec/-/tinyexec-1.0.2.tgz", + "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmmirror.com/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyrainbow": { + "version": "3.0.3", + "resolved": "https://registry.npmmirror.com/tinyrainbow/-/tinyrainbow-3.0.3.tgz", + "integrity": "sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/to-buffer": { + "version": "1.2.2", + "resolved": "https://registry.npmmirror.com/to-buffer/-/to-buffer-1.2.2.tgz", + "integrity": "sha512-db0E3UJjcFhpDhAF4tLo03oli3pwl3dbnzXOUIlRKrp+ldk/VUxzpWYZENsw2SZiuBjHAk7DfB0VU7NKdpb6sw==", + "license": "MIT", + "optional": true, + "dependencies": { + "isarray": "^2.0.5", + "safe-buffer": "^5.2.1", + "typed-array-buffer": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/to-buffer/node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmmirror.com/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "license": "MIT", + "optional": true + }, + "node_modules/traverse": { + "version": "0.3.9", + "resolved": "https://registry.npmmirror.com/traverse/-/traverse-0.3.9.tgz", + "integrity": "sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ==", + "license": "MIT/X11", + "engines": { + "node": "*" + } + }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmmirror.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/typed-array-buffer": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", + "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", + "license": "MIT", + "optional": true, + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "license": "MIT" + }, + "node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmmirror.com/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "license": "MIT", + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/vite": { + "version": "5.4.21", + "resolved": "https://registry.npmmirror.com/vite/-/vite-5.4.21.tgz", + "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/vitest": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/vitest/-/vitest-4.0.18.tgz", + "integrity": "sha512-hOQuK7h0FGKgBAas7v0mSAsnvrIgAvWmRFjmzpJ7SwFHH3g1k2u37JtYwOwmEKhK6ZO3v9ggDBBm0La1LCK4uQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/expect": "4.0.18", + "@vitest/mocker": "4.0.18", + "@vitest/pretty-format": "4.0.18", + "@vitest/runner": "4.0.18", + "@vitest/snapshot": "4.0.18", + "@vitest/spy": "4.0.18", + "@vitest/utils": "4.0.18", + "es-module-lexer": "^1.7.0", + "expect-type": "^1.2.2", + "magic-string": "^0.30.21", + "obug": "^2.1.1", + "pathe": "^2.0.3", + "picomatch": "^4.0.3", + "std-env": "^3.10.0", + "tinybench": "^2.9.0", + "tinyexec": "^1.0.2", + "tinyglobby": "^0.2.15", + "tinyrainbow": "^3.0.3", + "vite": "^6.0.0 || ^7.0.0", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@opentelemetry/api": "^1.9.0", + "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0", + "@vitest/browser-playwright": "4.0.18", + "@vitest/browser-preview": "4.0.18", + "@vitest/browser-webdriverio": "4.0.18", + "@vitest/ui": "4.0.18", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@opentelemetry/api": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser-playwright": { + "optional": true + }, + "@vitest/browser-preview": { + "optional": true + }, + "@vitest/browser-webdriverio": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/vitest/node_modules/@esbuild/aix-ppc64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/aix-ppc64/-/aix-ppc64-0.27.2.tgz", + "integrity": "sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/android-arm": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/android-arm/-/android-arm-0.27.2.tgz", + "integrity": "sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/android-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/android-arm64/-/android-arm64-0.27.2.tgz", + "integrity": "sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/android-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/android-x64/-/android-x64-0.27.2.tgz", + "integrity": "sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/darwin-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/darwin-arm64/-/darwin-arm64-0.27.2.tgz", + "integrity": "sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/darwin-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/darwin-x64/-/darwin-x64-0.27.2.tgz", + "integrity": "sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.2.tgz", + "integrity": "sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/freebsd-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-x64/-/freebsd-x64-0.27.2.tgz", + "integrity": "sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-arm": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm/-/linux-arm-0.27.2.tgz", + "integrity": "sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm64/-/linux-arm64-0.27.2.tgz", + "integrity": "sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-ia32": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-ia32/-/linux-ia32-0.27.2.tgz", + "integrity": "sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-loong64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-loong64/-/linux-loong64-0.27.2.tgz", + "integrity": "sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-mips64el": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-mips64el/-/linux-mips64el-0.27.2.tgz", + "integrity": "sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-ppc64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-ppc64/-/linux-ppc64-0.27.2.tgz", + "integrity": "sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-riscv64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-riscv64/-/linux-riscv64-0.27.2.tgz", + "integrity": "sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-s390x": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-s390x/-/linux-s390x-0.27.2.tgz", + "integrity": "sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-x64/-/linux-x64-0.27.2.tgz", + "integrity": "sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/netbsd-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/netbsd-x64/-/netbsd-x64-0.27.2.tgz", + "integrity": "sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/openbsd-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/openbsd-x64/-/openbsd-x64-0.27.2.tgz", + "integrity": "sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/sunos-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/sunos-x64/-/sunos-x64-0.27.2.tgz", + "integrity": "sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/win32-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/win32-arm64/-/win32-arm64-0.27.2.tgz", + "integrity": "sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/win32-ia32": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/win32-ia32/-/win32-ia32-0.27.2.tgz", + "integrity": "sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/win32-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/win32-x64/-/win32-x64-0.27.2.tgz", + "integrity": "sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@vitest/mocker": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/@vitest/mocker/-/mocker-4.0.18.tgz", + "integrity": "sha512-HhVd0MDnzzsgevnOWCBj5Otnzobjy5wLBe4EdeeFGv8luMsGcYqDuFRMcttKWZA5vVO8RFjexVovXvAM4JoJDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "4.0.18", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.21" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^6.0.0 || ^7.0.0-0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "node_modules/vitest/node_modules/esbuild": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/esbuild/-/esbuild-0.27.2.tgz", + "integrity": "sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.2", + "@esbuild/android-arm": "0.27.2", + "@esbuild/android-arm64": "0.27.2", + "@esbuild/android-x64": "0.27.2", + "@esbuild/darwin-arm64": "0.27.2", + "@esbuild/darwin-x64": "0.27.2", + "@esbuild/freebsd-arm64": "0.27.2", + "@esbuild/freebsd-x64": "0.27.2", + "@esbuild/linux-arm": "0.27.2", + "@esbuild/linux-arm64": "0.27.2", + "@esbuild/linux-ia32": "0.27.2", + "@esbuild/linux-loong64": "0.27.2", + "@esbuild/linux-mips64el": "0.27.2", + "@esbuild/linux-ppc64": "0.27.2", + "@esbuild/linux-riscv64": "0.27.2", + "@esbuild/linux-s390x": "0.27.2", + "@esbuild/linux-x64": "0.27.2", + "@esbuild/netbsd-arm64": "0.27.2", + "@esbuild/netbsd-x64": "0.27.2", + "@esbuild/openbsd-arm64": "0.27.2", + "@esbuild/openbsd-x64": "0.27.2", + "@esbuild/openharmony-arm64": "0.27.2", + "@esbuild/sunos-x64": "0.27.2", + "@esbuild/win32-arm64": "0.27.2", + "@esbuild/win32-ia32": "0.27.2", + "@esbuild/win32-x64": "0.27.2" + } + }, + "node_modules/vitest/node_modules/vite": { + "version": "7.3.1", + "resolved": "https://registry.npmmirror.com/vite/-/vite-7.3.1.tgz", + "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "esbuild": "^0.27.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/which-pm-runs": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/which-pm-runs/-/which-pm-runs-1.1.0.tgz", + "integrity": "sha512-n1brCuqClxfFfq/Rb0ICg9giSZqCS+pLtccdag6C2HyufBrh3fBOiy9nb6ggRMvWOVH5GrdJskj5iGTZNxd7SA==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/which-typed-array": { + "version": "1.1.20", + "resolved": "https://registry.npmmirror.com/which-typed-array/-/which-typed-array-1.1.20.tgz", + "integrity": "sha512-LYfpUkmqwl0h9A2HL09Mms427Q1RZWuOHsukfVcKRq9q95iQxdw0ix1JQrqbcDR9PH1QDwf5Qo8OZb5lksZ8Xg==", + "license": "MIT", + "optional": true, + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "for-each": "^0.3.5", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/why-is-node-running": { + "version": "2.3.0", + "resolved": "https://registry.npmmirror.com/why-is-node-running/-/why-is-node-running-2.3.0.tgz", + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", + "dev": true, + "license": "MIT", + "dependencies": { + "siginfo": "^2.0.0", + "stackback": "0.0.2" + }, + "bin": { + "why-is-node-running": "cli.js" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmmirror.com/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "license": "ISC", + "optional": true, + "dependencies": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC", + "optional": true + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmmirror.com/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "license": "MIT", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "license": "ISC" + }, + "node_modules/zod": { + "version": "4.3.6", + "resolved": "https://registry.npmmirror.com/zod/-/zod-4.3.6.tgz", + "integrity": "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + } + } +} diff --git a/bls-register-backend/package.json b/bls-register-backend/package.json new file mode 100644 index 0000000..81ecb9e --- /dev/null +++ b/bls-register-backend/package.json @@ -0,0 +1,27 @@ +{ + "name": "bls-register-backend", + "version": "1.0.0", + "type": "module", + "private": true, + "scripts": { + "dev": "node src/index.js", + "build": "vite build --ssr src/index.js --outDir dist", + "test": "vitest run", + "lint": "node scripts/lint.js", + "spec:lint": "openspec validate --specs --strict --no-interactive", + "spec:validate": "openspec validate --specs --no-interactive", + "start": "node dist/index.js" + }, + "dependencies": { + "dotenv": "^16.4.5", + "kafka-node": "^5.0.0", + "node-cron": "^4.2.1", + "pg": "^8.11.5", + "redis": "^4.6.13", + "zod": "^4.3.6" + }, + "devDependencies": { + "vite": "^5.4.0", + "vitest": "^4.0.18" + } +} diff --git a/bls-register-backend/probe.log b/bls-register-backend/probe.log new file mode 100644 index 0000000..4075633 --- /dev/null +++ b/bls-register-backend/probe.log @@ -0,0 +1 @@ +[probe] published topic=blwlog4Nodejs-rcu-register-topic ts_ms=1773480366464 hotel_id=1172 room_id=515 diff --git a/bls-register-backend/scripts/lint.js b/bls-register-backend/scripts/lint.js new file mode 100644 index 0000000..eaa9035 --- /dev/null +++ b/bls-register-backend/scripts/lint.js @@ -0,0 +1,41 @@ +import fs from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; +import { spawnSync } from 'child_process'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); +const projectRoot = path.resolve(__dirname, '..'); +const targets = ['src', 'tests']; + +const collectFiles = (dir) => { + if (!fs.existsSync(dir)) { + return []; + } + const entries = fs.readdirSync(dir, { withFileTypes: true }); + return entries.flatMap((entry) => { + const fullPath = path.join(dir, entry.name); + if (entry.isDirectory()) { + return collectFiles(fullPath); + } + if (entry.isFile() && fullPath.endsWith('.js')) { + return [fullPath]; + } + return []; + }); +}; + +const files = targets.flatMap((target) => collectFiles(path.join(projectRoot, target))); + +const failures = []; + +files.forEach((file) => { + const result = spawnSync(process.execPath, ['--check', file], { stdio: 'inherit' }); + if (result.status !== 0) { + failures.push(file); + } +}); + +if (failures.length > 0) { + process.exit(1); +} diff --git a/bls-register-backend/scripts/publishProbe.js b/bls-register-backend/scripts/publishProbe.js new file mode 100644 index 0000000..d62d1dd --- /dev/null +++ b/bls-register-backend/scripts/publishProbe.js @@ -0,0 +1,66 @@ +import dotenv from 'dotenv'; +import kafka from 'kafka-node'; + +dotenv.config(); + +const probeTs = Number(process.argv[2] || Date.now()); +const probeRoom = process.argv[3] || `PROBE-${probeTs}`; +const probeHotelId = Number(process.argv[4] || 1085); +const topic = process.env.KAFKA_TOPIC || process.env.KAFKA_TOPICS || 'blwlog4Nodejs-rcu-register-topic'; +const kafkaHost = (process.env.KAFKA_BROKERS || '').split(',').map((s) => s.trim()).filter(Boolean).join(','); +const saslEnabled = process.env.KAFKA_SASL_ENABLED === 'true'; +const sslEnabled = process.env.KAFKA_SSL_ENABLED === 'true'; + +const kafkaClientOptions = { + kafkaHost, + clientId: process.env.KAFKA_CLIENT_ID || 'bls-register-producer' +}; + +if (saslEnabled && process.env.KAFKA_SASL_USERNAME && process.env.KAFKA_SASL_PASSWORD) { + kafkaClientOptions.sasl = { + mechanism: process.env.KAFKA_SASL_MECHANISM || 'plain', + username: process.env.KAFKA_SASL_USERNAME, + password: process.env.KAFKA_SASL_PASSWORD + }; +} + +if (sslEnabled) { + kafkaClientOptions.sslOptions = { rejectUnauthorized: false }; +} + +const payload = { + ts_ms: probeTs, + upgrade_ts_ms: probeTs + 1234, + hotel_id: Number.isFinite(probeHotelId) ? probeHotelId : 1085, + room_id: probeRoom, + device_id: `probe-device-${probeTs}`, + is_send: 0, + udp_raw: 'abc\u0000def', + extra: { source: 'probe', note: 'with\\u0000nul' }, + app_version: 'v1\u0000.2', + launcher_version: 'launcher-1', + config_version: 'cfg-1' +}; + +const client = new kafka.KafkaClient(kafkaClientOptions); + +const producer = new kafka.Producer(client); + +producer.on('ready', () => { + producer.send( + [{ topic, messages: JSON.stringify(payload) }], + (err) => { + if (err) { + console.error(`[probe] publish failed: ${err.message}`); + process.exit(1); + } + console.log(`[probe] published topic=${topic} ts_ms=${probeTs} hotel_id=${payload.hotel_id} room_id=${probeRoom}`); + producer.close(() => client.close(() => process.exit(0))); + } + ); +}); + +producer.on('error', (err) => { + console.error(`[probe] producer error: ${err.message}`); + process.exit(1); +}); diff --git a/bls-register-backend/scripts/run-30s.ps1 b/bls-register-backend/scripts/run-30s.ps1 new file mode 100644 index 0000000..0959c1a --- /dev/null +++ b/bls-register-backend/scripts/run-30s.ps1 @@ -0,0 +1,7 @@ +$ErrorActionPreference = "Stop" +Remove-Item -Force out.log -ErrorAction SilentlyContinue +$p = Start-Process -FilePath node -ArgumentList 'src/index.js' -WorkingDirectory (Get-Location).Path -RedirectStandardOutput 'out.log' -RedirectStandardError 'out.log' -PassThru +Start-Sleep -Seconds 30 +Stop-Process -Id $p.Id -Force +Start-Sleep -Seconds 1 +Add-Content -Path out.log -Value "[runner] stopped after 30s" diff --git a/bls-register-backend/scripts/verifyProbeInDb.js b/bls-register-backend/scripts/verifyProbeInDb.js new file mode 100644 index 0000000..68cbf84 --- /dev/null +++ b/bls-register-backend/scripts/verifyProbeInDb.js @@ -0,0 +1,67 @@ +import dotenv from 'dotenv'; +import pg from 'pg'; + +dotenv.config(); + +const probeTs = Number(process.argv[2]); +const probeRoom = process.argv[3]; +const probeHotelId = Number(process.argv[4]); + +if (!Number.isFinite(probeTs) || !probeRoom || !Number.isFinite(probeHotelId)) { + console.error('Usage: node scripts/verifyProbeInDb.js '); + process.exit(1); +} + +const pool = new pg.Pool({ + host: process.env.POSTGRES_HOST_G5, + port: Number(process.env.POSTGRES_PORT_G5 || 5434), + user: process.env.POSTGRES_USER_G5, + password: process.env.POSTGRES_PASSWORD_G5, + database: process.env.POSTGRES_DATABASE_G5, + max: 1 +}); + +const main = async () => { + const eventResult = await pool.query( + `SELECT ts_ms, room_id, app_version, udp_raw + FROM rcu_info.rcu_info_events_g5 + WHERE ts_ms = $1 AND room_id = $2 + ORDER BY write_ts_ms DESC + LIMIT 1`, + [probeTs, probeRoom] + ); + + const statusResult = await pool.query( + `SELECT hotel_id, room_id, app_version, launcher_version, config_version, upgrade_ts_ms, register_ts_ms + FROM room_status.room_status_moment_g5 + WHERE hotel_id = $1 AND room_id = $2 + LIMIT 1`, + [probeHotelId, probeRoom] + ); + + console.log(`[probe-db] event_rows=${eventResult.rowCount}`); + if (eventResult.rowCount > 0) { + const row = eventResult.rows[0]; + console.log(`[probe-db] event.ts_ms=${row.ts_ms} room_id=${row.room_id} app_version=${row.app_version}`); + console.log(`[probe-db] event.udp_raw=${row.udp_raw}`); + } + + console.log(`[probe-db] room_status_rows=${statusResult.rowCount}`); + if (statusResult.rowCount > 0) { + const row = statusResult.rows[0]; + console.log(`[probe-db] room_status.room_id=${row.room_id} register_ts_ms=${row.register_ts_ms} upgrade_ts_ms=${row.upgrade_ts_ms}`); + } else { + console.log('[probe-db] room_status row not found (expected behavior when key does not exist)'); + } +}; + +main() + .then(async () => { + await pool.end(); + process.exit(0); + }) + .catch(async (err) => { + console.error(`[probe-db] verify failed: ${err.message}`); + await pool.end(); + process.exit(1); + }); diff --git a/bls-register-backend/scripts/verify_data.js b/bls-register-backend/scripts/verify_data.js new file mode 100644 index 0000000..4440be0 --- /dev/null +++ b/bls-register-backend/scripts/verify_data.js @@ -0,0 +1,36 @@ + +import { config } from '../src/config/config.js'; +import dbManager from '../src/db/databaseManager.js'; +import { logger } from '../src/utils/logger.js'; + +const verifyData = async () => { + const client = await dbManager.pool.connect(); + try { + console.log('Verifying data in database...'); + + // Count total rows + const countSql = `SELECT count(*) FROM ${config.db.schema}.${config.db.table}`; + const countRes = await client.query(countSql); + console.log(`Total rows in ${config.db.schema}.${config.db.table}: ${countRes.rows[0].count}`); + + // Check recent rows + const recentSql = ` + SELECT * FROM ${config.db.schema}.${config.db.table} + ORDER BY ts_ms DESC + LIMIT 5 + `; + const recentRes = await client.query(recentSql); + console.log('Recent 5 rows:'); + recentRes.rows.forEach(row => { + console.log(JSON.stringify(row)); + }); + + } catch (err) { + console.error('Error verifying data:', err); + } finally { + client.release(); + await dbManager.pool.end(); + } +}; + +verifyData(); diff --git a/bls-register-backend/spec/onoffline-spec.md b/bls-register-backend/spec/onoffline-spec.md new file mode 100644 index 0000000..8115bef --- /dev/null +++ b/bls-register-backend/spec/onoffline-spec.md @@ -0,0 +1,50 @@ +bls-onoffline-backend 规格说明 + +1. Kafka 数据结构 +{ + "HotelCode": "1085", + "MAC": "00:1A:2B:3C:4D:5E", + "HostNumber": "091123987456", + "RoomNumber": "8888房", + "EndPoint": "50.2.60.1:6543", + "CurrentStatus": "on", + "CurrentTime": "2026-02-02T10:30:00Z", + "UnixTime": 1770000235000, + "LauncherVersion": "1.0.0", + "RebootReason": "1" +} + +2. Kafka 主题 +Topic:blwlog4Nodejs-rcu-onoffline-topic + +3. 数据库结构 +数据库:log_platform +表:onoffline_record +字段: + guid varchar(32) + ts_ms int8 + write_ts_ms int8 + hotel_id int2 + mac varchar(21) + device_id varchar(64) + room_id varchar(64) + ip varchar(21) + current_status varchar(10) + launcher_version varchar(10) + reboot_reason varchar(10) +主键:(ts_ms, mac, device_id, room_id) +按 ts_ms 每日分区 + +G5库结构(双写,临时接入): +库同为:log_platform +表:onoffline_record_g5 +差异字段: + - guid 为 int4,由库自己生成。 + - record_source 固定为 CRICS。 + - current_status 为 int2,on映射为1,off映射为2,其余为0。 +支持通过环境变量开关双写。 + +4. 数据处理规则 +非重启数据:reboot_reason 为空或不存在,current_status 取 CurrentStatus +重启数据:reboot_reason 不为空,current_status 固定为 on +其余字段直接按 Kafka 原值落库,空值不补 0 diff --git a/bls-register-backend/src/config/config.js b/bls-register-backend/src/config/config.js new file mode 100644 index 0000000..b8165a9 --- /dev/null +++ b/bls-register-backend/src/config/config.js @@ -0,0 +1,64 @@ +import dotenv from 'dotenv'; + +dotenv.config(); + +const parseNumber = (value, defaultValue) => { + const parsed = Number(value); + return Number.isFinite(parsed) ? parsed : defaultValue; +}; + +const parseList = (value) => + (value || '') + .split(',') + .map((item) => item.trim()) + .filter(Boolean); + +export const config = { + env: process.env.NODE_ENV || 'development', + port: parseNumber(process.env.PORT, 3001), + kafka: { + brokers: parseList(process.env.KAFKA_BROKERS), + topic: process.env.KAFKA_TOPIC || process.env.KAFKA_TOPICS || 'blwlog4Nodejs-rcu-register-topic', + groupId: process.env.KAFKA_GROUP_ID || 'bls-register-consumer', + clientId: process.env.KAFKA_CLIENT_ID || 'bls-register-consumer-client', + consumerInstances: parseNumber(process.env.KAFKA_CONSUMER_INSTANCES, 1), + maxInFlight: parseNumber(process.env.KAFKA_MAX_IN_FLIGHT, 20000), + fetchMaxBytes: parseNumber(process.env.KAFKA_FETCH_MAX_BYTES, 50 * 1024 * 1024), + fetchMinBytes: parseNumber(process.env.KAFKA_FETCH_MIN_BYTES, 256 * 1024), + fetchMaxWaitMs: parseNumber(process.env.KAFKA_FETCH_MAX_WAIT_MS, 100), + fromOffset: process.env.KAFKA_FROM_OFFSET || 'latest', + autoCommitIntervalMs: parseNumber(process.env.KAFKA_AUTO_COMMIT_INTERVAL_MS, 5000), + commitIntervalMs: parseNumber(process.env.KAFKA_COMMIT_INTERVAL_MS, 200), + commitOnAttempt: process.env.KAFKA_COMMIT_ON_ATTEMPT === 'true', + batchSize: parseNumber(process.env.KAFKA_BATCH_SIZE, 5000), + batchTimeoutMs: parseNumber(process.env.KAFKA_BATCH_TIMEOUT_MS, 50), + flushIntervalMs: parseNumber(process.env.KAFKA_FLUSH_INTERVAL_MS, 3000), + logMessages: process.env.KAFKA_LOG_MESSAGES === 'true', + sasl: process.env.KAFKA_SASL_USERNAME && process.env.KAFKA_SASL_PASSWORD ? { + mechanism: process.env.KAFKA_SASL_MECHANISM || 'plain', + username: process.env.KAFKA_SASL_USERNAME, + password: process.env.KAFKA_SASL_PASSWORD + } : undefined + }, + db: { + host: process.env.POSTGRES_HOST_G5, + port: parseNumber(process.env.POSTGRES_PORT_G5, 5434), + user: process.env.POSTGRES_USER_G5, + password: process.env.POSTGRES_PASSWORD_G5, + database: process.env.POSTGRES_DATABASE_G5, + max: parseNumber(process.env.POSTGRES_MAX_CONNECTIONS_G5, 6), + ssl: process.env.POSTGRES_SSL_G5 === 'true' ? { rejectUnauthorized: false } : undefined, + schema: process.env.DB_SCHEMA || 'rcu_info', + table: process.env.DB_TABLE || 'rcu_info_events_g5', + roomStatusSchema: process.env.DB_ROOM_STATUS_SCHEMA || 'room_status', + roomStatusTable: process.env.DB_ROOM_STATUS_TABLE || 'room_status_moment_g5' + }, + redis: { + host: process.env.REDIS_HOST || 'localhost', + port: parseNumber(process.env.REDIS_PORT, 6379), + password: process.env.REDIS_PASSWORD || undefined, + db: parseNumber(process.env.REDIS_DB, 0), + projectName: process.env.REDIS_PROJECT_NAME || 'bls-onoffline', + apiBaseUrl: process.env.REDIS_API_BASE_URL || `http://localhost:${parseNumber(process.env.PORT, 3001)}` + } +}; diff --git a/bls-register-backend/src/db/databaseManager.js b/bls-register-backend/src/db/databaseManager.js new file mode 100644 index 0000000..16f4690 --- /dev/null +++ b/bls-register-backend/src/db/databaseManager.js @@ -0,0 +1,242 @@ +import pg from 'pg'; +import { config } from '../config/config.js'; +import { logger } from '../utils/logger.js'; + +const { Pool } = pg; + +const registerColumns = [ + 'ts_ms', + 'hotel_id', + 'room_id', + 'device_id', + 'write_ts_ms', + 'is_send', + 'udp_raw', + 'extra', + 'ip_type', + 'model_num', + 'server_ip', + 'ip', + 'subnet_mask', + 'gateway', + 'dns', + 'app_version', + 'rcu_time', + 'launcher_version', + 'mac', + 'room_type_id', + 'config_version', + 'room_status', + 'season', + 'sys_lock_status', + 'authorization_time', + 'authorization_days', + 'room_num_remark', + 'room_type_remark', + 'room_remark', + 'mcu_name', + 'central_control_name', + 'configure_hotel_name', + 'configure_room_type_name' +]; + +const roomStatusColumns = [ + 'hotel_id', + 'room_id', + 'app_version', + 'launcher_version', + 'config_version', + 'upgrade_ts_ms', + 'register_ts_ms' +]; + +export class DatabaseManager { + constructor(dbConfig) { + this.pool = new Pool({ + host: dbConfig.host, + port: dbConfig.port, + user: dbConfig.user, + password: dbConfig.password, + database: dbConfig.database, + max: dbConfig.max, + ssl: dbConfig.ssl + }); + } + + async insertRegisterRows({ schema, table, rows }) { + if (!rows || rows.length === 0) { + return; + } + + const statement = ` + INSERT INTO ${schema}.${table} (${registerColumns.join(', ')}) + SELECT * + FROM UNNEST( + $1::int8[], + $2::int2[], + $3::text[], + $4::text[], + $5::int8[], + $6::int2[], + $7::text[], + $8::jsonb[], + $9::int2[], + $10::text[], + $11::text[], + $12::text[], + $13::text[], + $14::text[], + $15::text[], + $16::text[], + $17::text[], + $18::text[], + $19::text[], + $20::int8[], + $21::text[], + $22::int4[], + $23::int4[], + $24::int4[], + $25::text[], + $26::text[], + $27::text[], + $28::text[], + $29::text[], + $30::text[], + $31::text[], + $32::text[], + $33::text[] + ) + ON CONFLICT DO NOTHING + `; + + try { + const params = registerColumns.map((column) => rows.map((row) => row[column] ?? null)); + await this.pool.query(statement, params); + } catch (error) { + logger.error('Register table insert failed', { + error: error?.message, + schema, + table, + rowsLength: rows.length + }); + throw error; + } + } + + async updateRoomStatusRows({ schema, table, rows }) { + if (!rows || rows.length === 0) { + return; + } + + const statement = ` + WITH incoming AS ( + SELECT * + FROM UNNEST( + $1::int2[], + $2::text[], + $3::text[], + $4::text[], + $5::text[], + $6::int8[], + $7::int8[] + ) AS u(${roomStatusColumns.join(', ')}) + ), dedup AS ( + SELECT DISTINCT ON (hotel_id, room_id) + hotel_id, + room_id, + app_version, + launcher_version, + config_version, + upgrade_ts_ms, + register_ts_ms + FROM incoming + ORDER BY hotel_id, room_id, register_ts_ms DESC + ), existing AS ( + SELECT i.*, t.device_id + FROM dedup i + INNER JOIN ${schema}.${table} t + ON t.hotel_id = i.hotel_id + AND t.room_id = i.room_id + ) + INSERT INTO ${schema}.${table} ( + hotel_id, + room_id, + device_id, + app_version, + launcher_version, + config_version, + upgrade_ts_ms, + register_ts_ms + ) + SELECT + hotel_id, + room_id, + device_id, + app_version, + launcher_version, + config_version, + upgrade_ts_ms, + register_ts_ms + FROM existing + ON CONFLICT (hotel_id, room_id) DO UPDATE + SET + app_version = EXCLUDED.app_version, + launcher_version = EXCLUDED.launcher_version, + config_version = EXCLUDED.config_version, + upgrade_ts_ms = EXCLUDED.upgrade_ts_ms, + register_ts_ms = EXCLUDED.register_ts_ms + `; + + try { + const params = roomStatusColumns.map((column) => rows.map((row) => row[column] ?? null)); + await this.pool.query(statement, params); + } catch (error) { + logger.error('Room status table update failed', { + error: error?.message, + schema, + table, + rowsLength: rows.length + }); + throw error; + } + } + + async checkConnection() { + let client; + try { + const connectPromise = this.pool.connect(); + + // Create a timeout promise that rejects after 5000ms + const timeoutPromise = new Promise((_, reject) => { + setTimeout(() => reject(new Error('Connection timeout')), 5000); + }); + + try { + // Race the connection attempt against the timeout + client = await Promise.race([connectPromise, timeoutPromise]); + } catch (raceError) { + // If we timed out, the connectPromise might still resolve later. + // We must ensure that if it does, the client is released back to the pool immediately. + connectPromise.then(c => c.release()).catch(() => {}); + throw raceError; + } + + await client.query('SELECT 1'); + return true; + } catch (err) { + logger.error('Database check connection failed', { error: err.message }); + return false; + } finally { + if (client) { + client.release(); + } + } + } + + async close() { + await this.pool.end(); + } +} + +const dbManager = new DatabaseManager(config.db); +export default dbManager; diff --git a/bls-register-backend/src/db/g5DatabaseManager.js b/bls-register-backend/src/db/g5DatabaseManager.js new file mode 100644 index 0000000..99c8a01 --- /dev/null +++ b/bls-register-backend/src/db/g5DatabaseManager.js @@ -0,0 +1,121 @@ +import pg from 'pg'; +import { config } from '../config/config.js'; +import { logger } from '../utils/logger.js'; + +const { Pool } = pg; + +const g5Columns = [ + 'ts_ms', + 'write_ts_ms', + 'hotel_id', + 'mac', + 'device_id', + 'room_id', + 'ip', + 'current_status', + 'launcher_version', + 'reboot_reason', + 'record_source' +]; + +export class G5DatabaseManager { + constructor(dbConfig) { + if (!dbConfig.enabled) return; + this.pool = new Pool({ + host: dbConfig.host, + port: dbConfig.port, + user: dbConfig.user, + password: dbConfig.password, + database: dbConfig.database, + max: dbConfig.max, + ssl: dbConfig.ssl + }); + } + + async insertRows({ schema, table, rows }) { + if (!this.pool || !rows || rows.length === 0) { + return; + } + + const statement = ` + INSERT INTO ${schema}.${table} (${g5Columns.join(', ')}) + SELECT * + FROM UNNEST( + $1::int8[], + $2::int8[], + $3::int2[], + $4::text[], + $5::text[], + $6::text[], + $7::text[], + $8::int2[], + $9::text[], + $10::text[], + $11::text[] + ) + ON CONFLICT DO NOTHING + `; + + try { + const params = g5Columns.map((column) => { + return rows.map((row) => { + if (column === 'record_source') { + return 'CRICS'; + } + if (column === 'current_status') { + // current_status in G5 is int2 + if (row.current_status === 'on') return 1; + if (row.current_status === 'off') return 2; + return 0; + } + return row[column] ?? null; + }); + }); + + await this.pool.query(statement, params); + } catch (error) { + logger.error('G5 Database insert failed', { + error: error?.message, + schema, + table, + rowsLength: rows.length + }); + throw error; + } + } + + async checkConnection() { + if (!this.pool) return true; // Pretend it's ok if disabled + let client; + try { + const connectPromise = this.pool.connect(); + const timeoutPromise = new Promise((_, reject) => { + setTimeout(() => reject(new Error('Connection timeout')), 5000); + }); + try { + client = await Promise.race([connectPromise, timeoutPromise]); + } catch (raceError) { + connectPromise.then(c => c.release()).catch(() => { }); + throw raceError; + } + await client.query('SELECT 1'); + return true; + } catch (err) { + logger.error('G5 Database check connection failed', { error: err.message }); + return false; + } finally { + if (client) { + client.release(); + } + } + } + + async close() { + if (this.pool) { + await this.pool.end(); + } + } +} + +const g5DbManager = new G5DatabaseManager(config.g5db); +export default g5DbManager; diff --git a/bls-register-backend/src/index.js b/bls-register-backend/src/index.js new file mode 100644 index 0000000..ac825bd --- /dev/null +++ b/bls-register-backend/src/index.js @@ -0,0 +1,271 @@ +import cron from 'node-cron'; +import { config } from './config/config.js'; +import dbManager from './db/databaseManager.js'; +import { createKafkaConsumers } from './kafka/consumer.js'; +import { parseMessageToRows } from './processor/index.js'; +import { MetricCollector } from './utils/metricCollector.js'; +import { logger } from './utils/logger.js'; + +const NETWORK_CODES = new Set([ + 'ECONNREFUSED', + 'ECONNRESET', + 'EPIPE', + 'ETIMEDOUT', + 'ENOTFOUND', + 'EHOSTUNREACH', + 'ENETUNREACH', + '57P03', + '08006', + '08001', + '08000', + '08003' +]); + +const isDbConnectionError = (err) => { + if (typeof err?.code === 'string' && NETWORK_CODES.has(err.code)) { + return true; + } + const message = typeof err?.message === 'string' ? err.message.toLowerCase() : ''; + return ( + message.includes('connection timeout') || + message.includes('connection terminated') || + message.includes('connection refused') || + message.includes('terminating connection') || + message.includes('econnrefused') || + message.includes('econnreset') || + message.includes('etimedout') || + message.includes('could not connect') || + message.includes('the database system is starting up') || + message.includes('no pg_hba.conf entry') + ); +}; + +const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); + +const bootstrap = async () => { + logger.info('Starting register consumer', { + env: config.env, + kafka: { + brokers: config.kafka.brokers, + topic: config.kafka.topic, + groupId: config.kafka.groupId + }, + db: { + host: config.db.host, + port: config.db.port, + database: config.db.database, + schema: config.db.schema, + table: config.db.table, + roomStatusSchema: config.db.roomStatusSchema, + roomStatusTable: config.db.roomStatusTable + }, + flushIntervalMs: config.kafka.flushIntervalMs + }); + + const metricCollector = new MetricCollector(); + const totals = { + kafkaPulled: 0, + dbInserted: 0, + parseError: 0, + dbFailed: 0 + }; + const flushIntervalMs = Math.max(3000, Number.isFinite(config.kafka.flushIntervalMs) ? config.kafka.flushIntervalMs : 3000); + + const queue = []; + let flushTimer = null; + let flushing = false; + const runCounterTimer = setInterval(() => { + logger.info('Run counters', { + kafkaPulled: totals.kafkaPulled, + dbInserted: totals.dbInserted, + parseError: totals.parseError, + dbFailed: totals.dbFailed + }); + }, 10000); + + const handleError = (error, message) => { + logger.error('Kafka processing error', { + error: error?.message, + type: error?.type, + topic: message?.topic, + partition: message?.partition, + offset: message?.offset + }); + }; + + cron.schedule('* * * * *', () => { + const metrics = metricCollector.getAndReset(); + const flushAvgMs = metrics.batch_flush_count > 0 + ? (metrics.batch_flush_ms_sum / metrics.batch_flush_count).toFixed(1) + : '0.0'; + const dbAvgMs = metrics.db_insert_count > 0 + ? (metrics.db_insert_ms_sum / metrics.db_insert_count).toFixed(1) + : '0.0'; + logger.info('Minute metrics', { + kafkaPulled: metrics.kafka_pulled, + parseError: metrics.parse_error, + dbInserted: metrics.db_inserted, + dbFailed: metrics.db_failed, + flushAvgMs, + dbAvgMs + }); + }); + + const processValidRowsWithRetry = async (registerRows, roomStatusRows) => { + const startedAt = Date.now(); + while (true) { + try { + await dbManager.insertRegisterRows({ + schema: config.db.schema, + table: config.db.table, + rows: registerRows + }); + await dbManager.updateRoomStatusRows({ + schema: config.db.roomStatusSchema, + table: config.db.roomStatusTable, + rows: roomStatusRows + }); + + metricCollector.increment('db_insert_count', 1); + metricCollector.increment('db_insert_ms_sum', Date.now() - startedAt); + metricCollector.increment('db_inserted', registerRows.length); + totals.dbInserted += registerRows.length; + return; + } catch (err) { + if (!isDbConnectionError(err)) { + throw err; + } + logger.warn('Database unavailable, retrying in 5s', { error: err?.message }); + await sleep(5000); + } + } + }; + + const scheduleFlush = () => { + if (flushTimer) { + return; + } + flushTimer = setTimeout(() => { + flushTimer = null; + void flushQueue(); + }, flushIntervalMs); + }; + + const flushQueue = async () => { + if (flushing) { + return; + } + if (queue.length === 0) { + return; + } + + flushing = true; + const startedAt = Date.now(); + const currentBatch = queue.splice(0, queue.length); + + const parsedItems = []; + for (const item of currentBatch) { + try { + const parsed = parseMessageToRows(item.message); + parsedItems.push({ item, parsed }); + } catch (err) { + metricCollector.increment('parse_error'); + totals.parseError += 1; + handleError(err, item.message); + item.resolve(); + } + } + + const insertParsedItems = async (items) => { + if (items.length === 0) { + return; + } + + const registerRows = items.flatMap((it) => it.parsed.registerRows); + const roomStatusRows = items.flatMap((it) => it.parsed.roomStatusRows); + + try { + await processValidRowsWithRetry(registerRows, roomStatusRows); + } catch (err) { + if (items.length > 1) { + const mid = Math.floor(items.length / 2); + await insertParsedItems(items.slice(0, mid)); + await insertParsedItems(items.slice(mid)); + return; + } + + metricCollector.increment('db_failed', 1); + totals.dbFailed += 1; + handleError(err, items[0].item.message); + } + }; + + if (parsedItems.length > 0) { + await insertParsedItems(parsedItems); + + for (const parsedItem of parsedItems) { + parsedItem.item.resolve(); + } + } + + metricCollector.increment('batch_flush_count', 1); + metricCollector.increment('batch_flush_ms_sum', Date.now() - startedAt); + + flushing = false; + + if (queue.length > 0) { + scheduleFlush(); + } + }; + + const handleMessage = (message) => { + metricCollector.increment('kafka_pulled'); + totals.kafkaPulled += 1; + return new Promise((resolve) => { + queue.push({ message, resolve }); + scheduleFlush(); + }); + }; + + const consumers = createKafkaConsumers({ + kafkaConfig: config.kafka, + onMessage: handleMessage, + onError: handleError + }); + + const shutdown = async (signal) => { + logger.info(`Received ${signal}, shutting down...`); + try { + if (flushTimer) { + clearTimeout(flushTimer); + flushTimer = null; + } + clearInterval(runCounterTimer); + await flushQueue(); + + if (consumers && consumers.length > 0) { + await Promise.all(consumers.map((consumer) => new Promise((resolve) => consumer.close(true, resolve)))); + } + + await dbManager.close(); + logger.info('Run summary', { + kafkaPulled: totals.kafkaPulled, + dbInserted: totals.dbInserted, + parseError: totals.parseError, + dbFailed: totals.dbFailed + }); + process.exit(0); + } catch (err) { + logger.error('Error during shutdown', { error: err?.message }); + process.exit(1); + } + }; + + process.on('SIGTERM', () => shutdown('SIGTERM')); + process.on('SIGINT', () => shutdown('SIGINT')); +}; + +bootstrap().catch((error) => { + logger.error('Service bootstrap failed', { error: error?.message }); + process.exit(1); +}); diff --git a/bls-register-backend/src/kafka/consumer.js b/bls-register-backend/src/kafka/consumer.js new file mode 100644 index 0000000..1758c93 --- /dev/null +++ b/bls-register-backend/src/kafka/consumer.js @@ -0,0 +1,175 @@ +import kafka from 'kafka-node'; +import { logger } from '../utils/logger.js'; + +const { ConsumerGroup } = kafka; + +import { OffsetTracker } from './offsetTracker.js'; + +const createOneConsumer = ({ kafkaConfig, onMessage, onError, instanceIndex }) => { + const kafkaHost = kafkaConfig.brokers.join(','); + const clientId = instanceIndex === 0 ? kafkaConfig.clientId : `${kafkaConfig.clientId}-${instanceIndex}`; + const id = `${clientId}-${process.pid}-${Date.now()}`; + const maxInFlight = Number.isFinite(kafkaConfig.maxInFlight) ? kafkaConfig.maxInFlight : 5000; + const commitIntervalMs = Number.isFinite(kafkaConfig.commitIntervalMs) ? kafkaConfig.commitIntervalMs : 200; + let inFlight = 0; + + const tracker = new OffsetTracker(); + let pendingCommits = new Map(); // key: `${topic}-${partition}` -> { topic, partition, offset } + let commitTimer = null; + + const flushCommits = () => { + if (pendingCommits.size === 0) return; + const batch = pendingCommits; + pendingCommits = new Map(); + + consumer.sendOffsetCommitRequest( + Array.from(batch.values()), + (err) => { + if (err) { + for (const [k, v] of batch.entries()) { + pendingCommits.set(k, v); + } + logger.error('Kafka commit failed', { error: err?.message, count: batch.size }); + } + } + ); + }; + + const scheduleCommitFlush = () => { + if (commitTimer) return; + commitTimer = setTimeout(() => { + commitTimer = null; + flushCommits(); + }, commitIntervalMs); + }; + + const consumer = new ConsumerGroup( + { + kafkaHost, + groupId: kafkaConfig.groupId, + clientId, + id, + fromOffset: kafkaConfig.fromOffset || 'latest', + protocol: ['roundrobin'], + outOfRangeOffset: 'latest', + autoCommit: false, + autoCommitIntervalMs: kafkaConfig.autoCommitIntervalMs, + fetchMaxBytes: kafkaConfig.fetchMaxBytes, + fetchMinBytes: kafkaConfig.fetchMinBytes, + fetchMaxWaitMs: kafkaConfig.fetchMaxWaitMs, + sasl: kafkaConfig.sasl + }, + kafkaConfig.topic + ); + + const tryResume = () => { + if (inFlight < maxInFlight && consumer.paused) { + consumer.resume(); + } + }; + + consumer.on('message', (message) => { + inFlight += 1; + tracker.add(message.topic, message.partition, message.offset); + + if (inFlight >= maxInFlight) { + consumer.pause(); + } + Promise.resolve(onMessage(message)) + .then(() => {}) + .catch((error) => { + logger.error('Kafka message handling failed', { error: error?.message }); + if (onError) { + onError(error, message); + } + }) + .finally(() => { + const commitOffset = tracker.markDone(message.topic, message.partition, message.offset); + if (commitOffset !== null) { + const key = `${message.topic}-${message.partition}`; + pendingCommits.set(key, { + topic: message.topic, + partition: message.partition, + offset: commitOffset, + metadata: 'm' + }); + scheduleCommitFlush(); + } + inFlight -= 1; + tryResume(); + }); + }); + + consumer.on('error', (error) => { + logger.error('Kafka consumer error', { error: error?.message }); + if (onError) { + onError(error); + } + }); + + consumer.on('connect', () => { + logger.info(`Kafka Consumer connected`, { + groupId: kafkaConfig.groupId, + clientId: clientId + }); + }); + + consumer.on('rebalancing', () => { + logger.info(`Kafka Consumer rebalancing`, { + groupId: kafkaConfig.groupId, + clientId: clientId + }); + tracker.clear(); + pendingCommits.clear(); + if (commitTimer) { + clearTimeout(commitTimer); + commitTimer = null; + } + }); + + consumer.on('rebalanced', () => { + logger.info('Kafka Consumer rebalanced', { clientId, groupId: kafkaConfig.groupId }); + }); + + consumer.on('error', (err) => { + logger.error('Kafka Consumer Error', { error: err.message }); + }); + + consumer.on('offsetOutOfRange', (err) => { + logger.warn('Offset out of range', { error: err.message, topic: err.topic, partition: err.partition }); + }); + + + consumer.on('offsetOutOfRange', (error) => { + logger.warn(`Kafka Consumer offset out of range`, { + error: error?.message, + groupId: kafkaConfig.groupId, + clientId: clientId + }); + }); + + consumer.on('close', () => { + if (commitTimer) { + clearTimeout(commitTimer); + commitTimer = null; + } + flushCommits(); + logger.warn(`Kafka Consumer closed`, { + groupId: kafkaConfig.groupId, + clientId: clientId + }); + }); + + return consumer; +}; + +export const createKafkaConsumers = ({ kafkaConfig, onMessage, onError }) => { + const instances = Number.isFinite(kafkaConfig.consumerInstances) ? kafkaConfig.consumerInstances : 1; + const count = Math.max(1, instances); + return Array.from({ length: count }, (_, idx) => + createOneConsumer({ kafkaConfig, onMessage, onError, instanceIndex: idx }) + ); +}; + +export const createKafkaConsumer = ({ kafkaConfig, onMessage, onError }) => + createKafkaConsumers({ kafkaConfig, onMessage, onError })[0]; diff --git a/bls-register-backend/src/kafka/offsetTracker.js b/bls-register-backend/src/kafka/offsetTracker.js new file mode 100644 index 0000000..7ba557c --- /dev/null +++ b/bls-register-backend/src/kafka/offsetTracker.js @@ -0,0 +1,53 @@ +export class OffsetTracker { + constructor() { + // Map }> + this.partitions = new Map(); + } + + // Called when a message is received (before processing) + add(topic, partition, offset) { + const key = `${topic}-${partition}`; + if (!this.partitions.has(key)) { + this.partitions.set(key, { nextCommitOffset: null, done: new Set() }); + } + const state = this.partitions.get(key); + const numericOffset = Number(offset); + if (!Number.isFinite(numericOffset)) return; + if (state.nextCommitOffset === null) { + state.nextCommitOffset = numericOffset; + } else if (numericOffset < state.nextCommitOffset) { + state.nextCommitOffset = numericOffset; + } + } + + // Called when a message is successfully processed + // Returns the next offset to commit (if any advancement is possible), or null + markDone(topic, partition, offset) { + const key = `${topic}-${partition}`; + const state = this.partitions.get(key); + if (!state) return null; + + const numericOffset = Number(offset); + if (!Number.isFinite(numericOffset)) return null; + + state.done.add(numericOffset); + + if (state.nextCommitOffset === null) { + state.nextCommitOffset = numericOffset; + } + + let advanced = false; + while (state.nextCommitOffset !== null && state.done.has(state.nextCommitOffset)) { + state.done.delete(state.nextCommitOffset); + state.nextCommitOffset += 1; + advanced = true; + } + + if (!advanced) return null; + return state.nextCommitOffset; + } + + clear() { + this.partitions.clear(); + } +} diff --git a/bls-register-backend/src/processor/index.js b/bls-register-backend/src/processor/index.js new file mode 100644 index 0000000..646cda8 --- /dev/null +++ b/bls-register-backend/src/processor/index.js @@ -0,0 +1,288 @@ +import { kafkaPayloadSchema } from '../schema/kafkaPayload.js'; + +const parseKafkaPayload = (value) => { + const raw = Buffer.isBuffer(value) ? value.toString('utf8') : value; + if (typeof raw !== 'string') { + throw new Error('Invalid kafka message value'); + } + return JSON.parse(raw); +}; + +const normalizeText = (value, maxLength) => { + if (value === undefined || value === null) { + return null; + } + const str = String(value).replace(/\u0000/g, ''); + if (maxLength && str.length > maxLength) { + return str.substring(0, maxLength); + } + return str; +}; + +const sanitizeJsonValue = (value) => { + if (value === undefined || value === null) { + return value; + } + if (typeof value === 'string') { + return value.replace(/\u0000/g, ''); + } + if (Array.isArray(value)) { + return value.map((item) => sanitizeJsonValue(item)); + } + if (typeof value === 'object') { + const out = {}; + for (const [k, v] of Object.entries(value)) { + out[k] = sanitizeJsonValue(v); + } + return out; + } + return value; +}; + +const isLikelyBase64 = (text) => { + if (!text || text.length % 4 !== 0) { + return false; + } + return /^[A-Za-z0-9+/]+={0,2}$/.test(text); +}; + +const normalizeInteger = (value) => { + if (value === undefined || value === null || value === '') { + return null; + } + const numeric = typeof value === 'number' ? value : Number(value); + if (!Number.isFinite(numeric)) { + return null; + } + return Math.trunc(numeric); +}; + +const inRangeOr = (value, min, max, fallback) => { + if (typeof value !== 'number' || Number.isNaN(value) || value < min || value > max) { + return fallback; + } + return value; +}; + +const normalizeTsMs = (value) => { + const numeric = normalizeInteger(value); + if (numeric === null) { + return Date.now(); + } + if (numeric > 0 && numeric < 100000000000) { + return numeric * 1000; + } + return numeric; +}; + +const normalizeUdpRaw = (value) => { + if (value === undefined || value === null) { + return null; + } + if (typeof value === 'string') { + const text = value.replace(/\u0000/g, ''); + if (isLikelyBase64(text)) { + return text; + } + return Buffer.from(text, 'utf8').toString('base64'); + } + if (Buffer.isBuffer(value)) { + return value.toString('base64'); + } + if (Array.isArray(value)) { + return Buffer.from(value).toString('base64'); + } + return Buffer.from(String(value), 'utf8').toString('base64'); +}; + +const normalizeExtra = (value) => { + if (value === undefined || value === null || value === '') { + return null; + } + if (typeof value === 'object') { + return sanitizeJsonValue(value); + } + if (typeof value === 'string') { + try { + const parsed = JSON.parse(value); + if (parsed && typeof parsed === 'object') { + return sanitizeJsonValue(parsed); + } + return sanitizeJsonValue({ value: parsed }); + } catch { + return sanitizeJsonValue({ raw: value }); + } + } + return sanitizeJsonValue({ raw: String(value) }); +}; + +const pick = (payload, snakeKey, pascalKey) => { + if (payload[snakeKey] !== undefined) { + return payload[snakeKey]; + } + if (payload[pascalKey] !== undefined) { + return payload[pascalKey]; + } + return undefined; +}; + +export const buildRowsFromMessageValue = (value) => { + const payload = parseKafkaPayload(value); + return buildRowsFromPayload(payload); +}; + +export const buildRowsFromPayload = (rawPayload) => { + const normalizedInput = { + ts_ms: pick(rawPayload, 'ts_ms', 'ts_ms'), + upgrade_ts_ms: pick(rawPayload, 'upgrade_ts_ms', 'upgrade_ts_ms'), + hotel_id: pick(rawPayload, 'hotel_id', 'hotel_id'), + room_id: pick(rawPayload, 'room_id', 'room_id'), + device_id: pick(rawPayload, 'device_id', 'device_id'), + is_send: pick(rawPayload, 'is_send', 'is_send'), + udp_raw: pick(rawPayload, 'udp_raw', 'udp_raw'), + extra: pick(rawPayload, 'extra', 'extra'), + ip_type: pick(rawPayload, 'ip_type', 'ip_type'), + model_num: pick(rawPayload, 'model_num', 'model_num'), + server_ip: pick(rawPayload, 'server_ip', 'server_ip'), + ip: pick(rawPayload, 'ip', 'ip'), + subnet_mask: pick(rawPayload, 'subnet_mask', 'subnet_mask'), + gateway: pick(rawPayload, 'gateway', 'gateway'), + dns: pick(rawPayload, 'dns', 'dns'), + app_version: pick(rawPayload, 'app_version', 'app_version'), + rcu_time: pick(rawPayload, 'rcu_time', 'rcu_time'), + launcher_version: pick(rawPayload, 'launcher_version', 'launcher_version'), + mac: pick(rawPayload, 'mac', 'mac'), + room_type_id: pick(rawPayload, 'room_type_id', 'room_type_id'), + config_version: pick(rawPayload, 'config_version', 'config_version'), + room_status: pick(rawPayload, 'room_status', 'room_status'), + season: pick(rawPayload, 'season', 'season'), + sys_lock_status: pick(rawPayload, 'sys_lock_status', 'sys_lock_status'), + authorization_time: pick(rawPayload, 'authorization_time', 'authorization_time'), + authorization_days: pick(rawPayload, 'authorization_days', 'authorization_days'), + room_num_remark: pick(rawPayload, 'room_num_remark', 'room_num_remark'), + room_type_remark: pick(rawPayload, 'room_type_remark', 'room_type_remark'), + room_remark: pick(rawPayload, 'room_remark', 'room_remark'), + mcu_name: pick(rawPayload, 'mcu_name', 'mcu_name'), + central_control_name: pick(rawPayload, 'central_control_name', 'central_control_name'), + configure_hotel_name: pick(rawPayload, 'configure_hotel_name', 'configure_hotel_name'), + configure_room_type_name: pick(rawPayload, 'configure_room_type_name', 'configure_room_type_name') + }; + + const payload = kafkaPayloadSchema.parse(normalizedInput); + + const tsMs = normalizeTsMs(payload.ts_ms); + const hotelId = inRangeOr(normalizeInteger(payload.hotel_id), -32768, 32767, 0); + const roomId = normalizeText(payload.room_id, 50) || ''; + + const registerRow = { + ts_ms: tsMs, + hotel_id: hotelId, + room_id: roomId, + device_id: normalizeText(payload.device_id, 64), + write_ts_ms: Date.now(), + is_send: inRangeOr(normalizeInteger(payload.is_send), -32768, 32767, 0), + udp_raw: normalizeUdpRaw(payload.udp_raw), + extra: normalizeExtra(payload.extra), + ip_type: inRangeOr(normalizeInteger(payload.ip_type), -32768, 32767, null), + model_num: normalizeText(payload.model_num, 32), + server_ip: normalizeText(payload.server_ip, 21), + ip: normalizeText(payload.ip, 21), + subnet_mask: normalizeText(payload.subnet_mask, 15), + gateway: normalizeText(payload.gateway, 15), + dns: normalizeText(payload.dns, 15), + app_version: normalizeText(payload.app_version, 64), + rcu_time: normalizeText(payload.rcu_time, 25), + launcher_version: normalizeText(payload.launcher_version, 64), + mac: normalizeText(payload.mac, 17), + room_type_id: normalizeInteger(payload.room_type_id), + config_version: normalizeText(payload.config_version, 32), + room_status: inRangeOr(normalizeInteger(payload.room_status), -2147483648, 2147483647, null), + season: inRangeOr(normalizeInteger(payload.season), -2147483648, 2147483647, null), + sys_lock_status: inRangeOr(normalizeInteger(payload.sys_lock_status), -2147483648, 2147483647, null), + authorization_time: normalizeText(payload.authorization_time, 10), + authorization_days: normalizeText(payload.authorization_days, 10), + room_num_remark: normalizeText(payload.room_num_remark, 255), + room_type_remark: normalizeText(payload.room_type_remark, 64), + room_remark: normalizeText(payload.room_remark, 64), + mcu_name: normalizeText(payload.mcu_name, 255), + central_control_name: normalizeText(payload.central_control_name, 255), + configure_hotel_name: normalizeText(payload.configure_hotel_name, 255), + configure_room_type_name: normalizeText(payload.configure_room_type_name, 255) + }; + + const roomStatusUpdateRow = { + hotel_id: hotelId, + room_id: roomId, + app_version: registerRow.app_version, + launcher_version: registerRow.launcher_version, + config_version: registerRow.config_version, + upgrade_ts_ms: normalizeTsMs(payload.upgrade_ts_ms), + register_ts_ms: tsMs + }; + + return { + registerRows: [registerRow], + roomStatusRows: [roomStatusUpdateRow] + }; +}; + +export const parseMessageToRows = (message) => { + const rawValue = message.value.toString(); + // logger.info('Processing message', { offset: message.offset, rawValuePreview: rawValue.substring(0, 100) }); + + let payload; + try { + payload = JSON.parse(rawValue); + } catch (e) { + const error = new Error(`JSON Parse Error: ${e.message}`); + error.type = 'PARSE_ERROR'; + throw error; + } + + // logger.info('Payload parsed', { payload }); + + const validationResult = kafkaPayloadSchema.safeParse(payload); + + if (!validationResult.success) { + const error = new Error(`Schema Validation Failed: ${JSON.stringify(validationResult.error.errors)}`); + error.type = 'VALIDATION_ERROR'; + throw error; + } + + return buildRowsFromPayload(payload); +}; + +export const processKafkaMessage = async ({ message, dbManager, config }) => { + let rows; + try { + rows = parseMessageToRows(message); + } catch (error) { + throw error; + } + + try { + await dbManager.insertRegisterRows({ schema: config.db.schema, table: config.db.table, rows: rows.registerRows }); + await dbManager.updateRoomStatusRows({ + schema: config.db.roomStatusSchema, + table: config.db.roomStatusTable, + rows: rows.roomStatusRows + }); + } catch (error) { + error.type = 'DB_ERROR'; + const sample = rows?.registerRows?.[0]; + error.dbContext = { + rowsLength: rows?.registerRows?.length || 0, + sampleRow: sample + ? { + ts_ms: sample.ts_ms, + hotel_id: sample.hotel_id, + device_id: sample.device_id, + room_id: sample.room_id + } + : null + }; + throw error; + } + + return rows.registerRows.length; +}; diff --git a/bls-register-backend/src/processor/udpParser.js b/bls-register-backend/src/processor/udpParser.js new file mode 100644 index 0000000..ccecfc9 --- /dev/null +++ b/bls-register-backend/src/processor/udpParser.js @@ -0,0 +1,83 @@ +const normalizeHex = (hex) => { + if (typeof hex !== 'string') { + return ''; + } + let cleaned = hex.trim().replace(/^0x/i, '').replace(/\s+/g, ''); + if (cleaned.length % 2 === 1) { + cleaned = `0${cleaned}`; + } + return cleaned; +}; + +const toHex = (value) => `0x${value.toString(16).padStart(2, '0')}`; + +const readUInt16 = (buffer, offset) => buffer.readUInt16BE(offset); + +export const parse0x36 = (udpRaw) => { + const cleaned = normalizeHex(udpRaw); + const buffer = cleaned ? Buffer.from(cleaned, 'hex') : Buffer.alloc(0); + const sysLockStatus = buffer.length > 0 ? buffer[0] : null; + const reportCount = buffer.length > 7 ? buffer[7] : null; + let offset = 8; + const devices = []; + for (let i = 0; i < (reportCount || 0) && offset + 5 < buffer.length; i += 1) { + devices.push({ + dev_type: buffer[offset], + dev_addr: buffer[offset + 1], + dev_loop: readUInt16(buffer, offset + 2), + dev_data: readUInt16(buffer, offset + 4) + }); + offset += 6; + } + const faultCount = offset < buffer.length ? buffer[offset] : null; + offset += 1; + const faults = []; + for (let i = 0; i < (faultCount || 0) && offset + 5 < buffer.length; i += 1) { + faults.push({ + fault_dev_type: buffer[offset], + fault_dev_addr: buffer[offset + 1], + fault_dev_loop: readUInt16(buffer, offset + 2), + error_type: buffer[offset + 4], + error_data: buffer[offset + 5] + }); + offset += 6; + } + return { + sysLockStatus, + reportCount, + faultCount, + devices, + faults + }; +}; + +export const parse0x0fDownlink = (udpRaw) => { + const cleaned = normalizeHex(udpRaw); + const buffer = cleaned ? Buffer.from(cleaned, 'hex') : Buffer.alloc(0); + const controlCount = buffer.length > 0 ? buffer[0] : null; + let offset = 1; + const controlParams = []; + for (let i = 0; i < (controlCount || 0) && offset + 5 < buffer.length; i += 1) { + const typeValue = readUInt16(buffer, offset + 4); + controlParams.push({ + dev_type: buffer[offset], + dev_addr: buffer[offset + 1], + loop: readUInt16(buffer, offset + 2), + type: typeValue, + type_l: buffer[offset + 4], + type_h: buffer[offset + 5] + }); + offset += 6; + } + return { + controlCount, + controlParams + }; +}; + +export const parse0x0fAck = (udpRaw) => { + const cleaned = normalizeHex(udpRaw); + const buffer = cleaned ? Buffer.from(cleaned, 'hex') : Buffer.alloc(0); + const ackCode = buffer.length > 1 ? toHex(buffer[1]) : null; + return { ackCode }; +}; diff --git a/bls-register-backend/src/redis/errorQueue.js b/bls-register-backend/src/redis/errorQueue.js new file mode 100644 index 0000000..226f863 --- /dev/null +++ b/bls-register-backend/src/redis/errorQueue.js @@ -0,0 +1,53 @@ +import { logger } from '../utils/logger.js'; + +export const buildErrorQueueKey = (projectName) => `${projectName}_error_queue`; + +export const enqueueError = async (client, queueKey, payload) => { + try { + await client.rPush(queueKey, JSON.stringify(payload)); + } catch (error) { + logger.error('Redis enqueue error failed', { error: error?.message }); + throw error; + } +}; + +export const startErrorRetryWorker = async ({ + client, + queueKey, + handler, + redisIntegration, + maxAttempts = 5 +}) => { + while (true) { + const result = await client.blPop(queueKey, 0); + const raw = result?.element; + if (!raw) { + continue; + } + let item; + try { + item = JSON.parse(raw); + } catch (error) { + logger.error('Invalid error payload', { error: error?.message }); + await redisIntegration.error('Invalid error payload', { module: 'redis', stack: error?.message }); + continue; + } + const attempts = item.attempts || 0; + try { + await handler(item); + } catch (error) { + logger.error('Retry handler failed', { error: error?.message, stack: error?.stack }); + const nextPayload = { + ...item, + attempts: attempts + 1, + lastError: error?.message, + lastAttemptAt: Date.now() + }; + if (nextPayload.attempts >= maxAttempts) { + await redisIntegration.error('Retry attempts exceeded', { module: 'retry', stack: JSON.stringify(nextPayload) }); + } else { + await enqueueError(client, queueKey, nextPayload); + } + } + } +}; diff --git a/bls-register-backend/src/redis/redisClient.js b/bls-register-backend/src/redis/redisClient.js new file mode 100644 index 0000000..e19e036 --- /dev/null +++ b/bls-register-backend/src/redis/redisClient.js @@ -0,0 +1,14 @@ +import { createClient } from 'redis'; + +export const createRedisClient = async (config) => { + const client = createClient({ + socket: { + host: config.host, + port: config.port + }, + password: config.password, + database: config.db + }); + await client.connect(); + return client; +}; diff --git a/bls-register-backend/src/redis/redisIntegration.js b/bls-register-backend/src/redis/redisIntegration.js new file mode 100644 index 0000000..4502d16 --- /dev/null +++ b/bls-register-backend/src/redis/redisIntegration.js @@ -0,0 +1,40 @@ +export class RedisIntegration { + constructor(client, projectName, apiBaseUrl) { + this.client = client; + this.projectName = projectName; + this.apiBaseUrl = apiBaseUrl; + this.heartbeatKey = '项目心跳'; + this.logKey = `${projectName}_项目控制台`; + } + + async info(message, context) { + const payload = { + timestamp: new Date().toISOString(), + level: 'info', + message, + metadata: context || undefined + }; + await this.client.rPush(this.logKey, JSON.stringify(payload)); + } + + async error(message, context) { + const payload = { + timestamp: new Date().toISOString(), + level: 'error', + message, + metadata: context || undefined + }; + await this.client.rPush(this.logKey, JSON.stringify(payload)); + } + + startHeartbeat() { + setInterval(() => { + const payload = { + projectName: this.projectName, + apiBaseUrl: this.apiBaseUrl, + lastActiveAt: Date.now() + }; + this.client.rPush(this.heartbeatKey, JSON.stringify(payload)); + }, 3000); + } +} diff --git a/bls-register-backend/src/schema/kafkaPayload.js b/bls-register-backend/src/schema/kafkaPayload.js new file mode 100644 index 0000000..ce5f99f --- /dev/null +++ b/bls-register-backend/src/schema/kafkaPayload.js @@ -0,0 +1,55 @@ +import { z } from 'zod'; + +const toNumber = (value) => { + if (value === undefined || value === null || value === '') { + return null; + } + if (typeof value === 'number') { + return value; + } + const parsed = Number(value); + return Number.isFinite(parsed) ? parsed : null; +}; + +const toStringAllowEmpty = (value) => { + if (value === undefined || value === null) { + return value; + } + return String(value); +}; + +export const kafkaPayloadSchema = z.object({ + ts_ms: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + upgrade_ts_ms: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + hotel_id: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + room_id: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + device_id: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + is_send: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + udp_raw: z.any().optional().nullable(), + extra: z.any().optional().nullable(), + ip_type: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + model_num: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + server_ip: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + ip: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + subnet_mask: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + gateway: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + dns: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + app_version: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + rcu_time: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + launcher_version: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + mac: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + room_type_id: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + config_version: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + room_status: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + season: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + sys_lock_status: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + authorization_time: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + authorization_days: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + room_num_remark: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + room_type_remark: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + room_remark: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + mcu_name: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + central_control_name: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + configure_hotel_name: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + configure_room_type_name: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable() +}); diff --git a/bls-register-backend/src/utils/logger.js b/bls-register-backend/src/utils/logger.js new file mode 100644 index 0000000..a671e5a --- /dev/null +++ b/bls-register-backend/src/utils/logger.js @@ -0,0 +1,21 @@ +const format = (level, message, context) => { + const payload = { + level, + message, + timestamp: Date.now(), + ...(context ? { context } : {}) + }; + return JSON.stringify(payload); +}; + +export const logger = { + info(message, context) { + process.stdout.write(`${format('info', message, context)}\n`); + }, + error(message, context) { + process.stderr.write(`${format('error', message, context)}\n`); + }, + warn(message, context) { + process.stderr.write(`${format('warn', message, context)}\n`); + } +}; diff --git a/bls-register-backend/src/utils/metricCollector.js b/bls-register-backend/src/utils/metricCollector.js new file mode 100644 index 0000000..dc5b3af --- /dev/null +++ b/bls-register-backend/src/utils/metricCollector.js @@ -0,0 +1,43 @@ +export class MetricCollector { + constructor() { + this.reset(); + } + + reset() { + this.metrics = { + kafka_pulled: 0, + parse_error: 0, + db_inserted: 0, + db_failed: 0, + db_insert_count: 0, + db_insert_ms_sum: 0, + batch_flush_count: 0, + batch_flush_ms_sum: 0 + }; + this.keyed = {}; + } + + increment(metric, count = 1) { + if (this.metrics.hasOwnProperty(metric)) { + this.metrics[metric] += count; + } + } + + incrementKeyed(metric, key, count = 1) { + if (!key) return; + if (!this.keyed[metric]) { + this.keyed[metric] = {}; + } + if (!Object.prototype.hasOwnProperty.call(this.keyed[metric], key)) { + this.keyed[metric][key] = 0; + } + this.keyed[metric][key] += count; + } + + getAndReset() { + const current = { ...this.metrics }; + const keyed = JSON.parse(JSON.stringify(this.keyed)); + this.reset(); + return { ...current, keyed }; + } +} diff --git a/bls-register-backend/src/utils/uuid.js b/bls-register-backend/src/utils/uuid.js new file mode 100644 index 0000000..e76a340 --- /dev/null +++ b/bls-register-backend/src/utils/uuid.js @@ -0,0 +1,3 @@ +import { randomUUID } from 'crypto'; + +export const createGuid = () => randomUUID().replace(/-/g, ''); diff --git a/bls-register-backend/tests/processor.test.js b/bls-register-backend/tests/processor.test.js new file mode 100644 index 0000000..0063e4c --- /dev/null +++ b/bls-register-backend/tests/processor.test.js @@ -0,0 +1,54 @@ +import { describe, it, expect } from 'vitest'; +import { buildRowsFromPayload } from '../src/processor/index.js'; + +describe('Register Processor', () => { + const basePayload = { + ts_ms: 1770000235000, + upgrade_ts_ms: 1770001235000, + hotel_id: 1085, + room_id: '8888', + device_id: '091123987456', + is_send: 0, + app_version: '2.1.0', + launcher_version: '1.0.0', + config_version: 'cfg-v8' + }; + + it('should map payload into register and room-status rows', () => { + const rows = buildRowsFromPayload(basePayload); + + expect(rows.registerRows).toHaveLength(1); + expect(rows.roomStatusRows).toHaveLength(1); + + expect(rows.registerRows[0].hotel_id).toBe(1085); + expect(rows.registerRows[0].room_id).toBe('8888'); + expect(rows.registerRows[0].device_id).toBe('091123987456'); + + expect(rows.roomStatusRows[0].register_ts_ms).toBe(1770000235000); + expect(rows.roomStatusRows[0].upgrade_ts_ms).toBe(1770001235000); + }); + + it('should force hotel_id to 0 when out of int2 range', () => { + const rows = buildRowsFromPayload({ ...basePayload, hotel_id: 60000 }); + expect(rows.registerRows[0].hotel_id).toBe(0); + expect(rows.roomStatusRows[0].hotel_id).toBe(0); + }); + + it('should convert udp_raw byte array to base64 text', () => { + const rows = buildRowsFromPayload({ + ...basePayload, + udp_raw: [1, 2, 3, 4] + }); + expect(rows.registerRows[0].udp_raw).toBe('AQIDBA=='); + }); + + it('should strip NUL bytes from text fields', () => { + const rows = buildRowsFromPayload({ + ...basePayload, + app_version: 'v1\u0000\u0000.2', + room_num_remark: 'A\u0000B' + }); + expect(rows.registerRows[0].app_version).toBe('v1.2'); + expect(rows.registerRows[0].room_num_remark).toBe('AB'); + }); +}); diff --git a/bls-register-backend/verify.log b/bls-register-backend/verify.log new file mode 100644 index 0000000..1c3fe7e --- /dev/null +++ b/bls-register-backend/verify.log @@ -0,0 +1,5 @@ +[probe-db] event_rows=1 +[probe-db] event.ts_ms=1773480366464 room_id=515 app_version=v1.2 +[probe-db] event.udp_raw=YWJjZGVm +[probe-db] room_status_rows=1 +[probe-db] room_status.room_id=515 register_ts_ms=1773480366464 upgrade_ts_ms=1773480367698 diff --git a/bls-register-backend/vite.config.js b/bls-register-backend/vite.config.js new file mode 100644 index 0000000..54d63c4 --- /dev/null +++ b/bls-register-backend/vite.config.js @@ -0,0 +1,12 @@ +import { defineConfig } from 'vite'; + +export default defineConfig({ + build: { + ssr: 'src/index.js', + outDir: 'dist', + target: 'node18', + rollupOptions: { + external: ['dotenv', 'kafka-node', 'pg', 'redis'] + } + } +}); diff --git a/docs/project.md b/docs/project.md new file mode 100644 index 0000000..3352f2a --- /dev/null +++ b/docs/project.md @@ -0,0 +1,91 @@ + +0xB1命令 +## 模式 +- rcu_info +### 数据表 +- rcu_info_events + +#### 基础字段 +| 字段名 | 类型 | 备注 | +| --- | --- | --- | +| guid | int8 | 8位整数,由数据库自己生成 | +| ts_ms | int8 | 事件发生的时间戳(毫秒级 Unix 时间),作为分区键和主键的一部分。 | +| hotel_id | int2 | 酒店Code,smallint 类型,范围 [0, 32767],标识所属酒店。 | +| room_id | varchar(50) | 房间号,字符串类型,长度 1~50,标识具体房间。 | +| device_id | varchar(64) | 设备唯一标识符,最长64字符,以CRICS拼接字段为准。 | +| write_ts_ms | int8 | 写入数据库的时间戳(毫秒级 Unix 时间) | + +#### 信息字段 +| 字段名 | 类型 | 备注 | +| --- | --- | --- | +| is_send | int2 | 1:下发,0:上报 默认0 | +| udp_raw | text | UDP原始数据(使用Base64编码) | +| extra | jsonb | 扩展字段(JSON格式) | + +#### 数据字段 +| 字段名 | 类型 | 备注 | +| --- | --- | --- | +| ip_type | int2 | IP类型 | +| model_num | varchar(32) | 机型编号 | +| server_ip | varchar(21) | 服务器IP | +| ip | varchar(21) | IP:Port | +| subnet_mask | varchar(15) | 子网掩码 | +| gateway | varchar(15) | 网关 | +| dns | varchar(15) | DNS | +| app_version | varchar(64) | 软件版本号 | +| rcu_time | varchar(25) | RCU时间 | +| launcher_version | varchar(64) | Launcher版本号 | +| mac | varchar(17) | MAC地址 | +| room_type_id | int8 | 房间类型id | +| config_version | varchar(32) | 配置版本号 | +| room_status | int4 | 房间状态 | +| season | int4 | 季节 | +| sys_lock_status | int4 | 系统锁定状态 | +| authorization_time | varchar(10) | 授权时间 | +| authorization_days | varchar(10) | 授权天数 | +| room_num_remark | varchar(255) | 房号备注 | +| room_type_remark | varchar(64) | 房型备注 | +| room_remark | varchar(64) | 房间备注 | +| mcu_name | varchar(255) | MCU名称 | +| central_control_name | varchar(255) | 中控机型名称 | +| configure_hotel_name | varchar(255) | 配置数据酒店名称 | +| configure_room_type_name | varchar(255) | 配置数据房型别名 | + + +#### 生产服务器的推送的数据用于kafka的C#类: +public class RegisterInfo + { + public long ts_ms { get; set; } + public long upgrade_ts_ms { get; set; } + public long hotel_id { get; set; } + public string room_id { get; set; } + public string device_id { get; set; } + public ushort is_send { get; set; } + public byte[] udp_raw { get; set; } + public string extra { get; set; } + public int ip_type { get; set; } + public string model_num { get; set; } + public string server_ip { get; set; } + public string ip { get; set; } + public string subnet_mask { get; set; } + public string gateway { get; set; } + public string dns { get; set; } + public string app_version { get; set; } + public string rcu_time { get; set; } + public string launcher_version { get; set; } + public string mac { get; set; } + public int room_type_id { get; set; } + public string config_version { get; set; } + public int room_status { get; set; } + public string season { get; set; } + public int sys_lock_status { get; set; } + public long authorization_time { get; set; } + public long authorization_days { get; set; } + public string room_num_remark { get; set; } + public string room_type_remark { get; set; } + public string room_remark { get; set; } + public string mcu_name { get; set; } + public string central_control_name { get; set; } + public string configure_hotel_name { get; set; } + public string configure_room_type_name { get; set; } + } \ No newline at end of file diff --git a/docs/rcu_info_events_g5.sql b/docs/rcu_info_events_g5.sql new file mode 100644 index 0000000..ed8ede2 --- /dev/null +++ b/docs/rcu_info_events_g5.sql @@ -0,0 +1,92 @@ +/* + Navicat Premium Dump SQL + + Source Server : FnOS 80 + Source Server Type : PostgreSQL + Source Server Version : 150017 (150017) + Source Host : 10.8.8.80:5434 + Source Catalog : log_platform + Source Schema : rcu_info + + Target Server Type : PostgreSQL + Target Server Version : 150017 (150017) + File Encoding : 65001 + + Date: 12/03/2026 17:36:43 +*/ + + +-- ---------------------------- +-- Table structure for rcu_info_events_g5 +-- ---------------------------- +DROP TABLE IF EXISTS "rcu_info"."rcu_info_events_g5"; +CREATE TABLE "rcu_info"."rcu_info_events_g5" ( + "guid" int8 NOT NULL DEFAULT nextval('"rcu_info".rcu_info_events_g5_guid_seq'::regclass), + "ts_ms" int8 NOT NULL, + "hotel_id" int2 NOT NULL, + "room_id" varchar(50) COLLATE "pg_catalog"."default" NOT NULL, + "device_id" varchar(64) COLLATE "pg_catalog"."default", + "write_ts_ms" int8 DEFAULT ((EXTRACT(epoch FROM now()) * (1000)::numeric))::bigint, + "is_send" int2 DEFAULT 0, + "udp_raw" text COLLATE "pg_catalog"."default", + "extra" jsonb, + "ip_type" int2, + "model_num" varchar(32) COLLATE "pg_catalog"."default", + "server_ip" varchar(21) COLLATE "pg_catalog"."default", + "ip" varchar(21) COLLATE "pg_catalog"."default", + "subnet_mask" varchar(15) COLLATE "pg_catalog"."default", + "gateway" varchar(15) COLLATE "pg_catalog"."default", + "dns" varchar(15) COLLATE "pg_catalog"."default", + "app_version" varchar(64) COLLATE "pg_catalog"."default", + "rcu_time" varchar(25) COLLATE "pg_catalog"."default", + "launcher_version" varchar(64) COLLATE "pg_catalog"."default", + "mac" varchar(17) COLLATE "pg_catalog"."default", + "room_type_id" int8, + "config_version" varchar(32) COLLATE "pg_catalog"."default", + "room_status" int4, + "season" int4, + "sys_lock_status" int4, + "authorization_time" varchar(10) COLLATE "pg_catalog"."default", + "authorization_days" varchar(10) COLLATE "pg_catalog"."default", + "room_num_remark" varchar(255) COLLATE "pg_catalog"."default", + "room_type_remark" varchar(64) COLLATE "pg_catalog"."default", + "room_remark" varchar(64) COLLATE "pg_catalog"."default", + "mcu_name" varchar(255) COLLATE "pg_catalog"."default", + "central_control_name" varchar(255) COLLATE "pg_catalog"."default", + "configure_hotel_name" varchar(255) COLLATE "pg_catalog"."default", + "configure_room_type_name" varchar(255) COLLATE "pg_catalog"."default" +) +; + +-- ---------------------------- +-- Indexes structure for table rcu_info_events_g5 +-- ---------------------------- +CREATE INDEX "idx_rcu_info_g5_app_ver" ON "rcu_info"."rcu_info_events_g5" USING btree ( + "app_version" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST +) WHERE app_version IS NOT NULL; +CREATE INDEX "idx_rcu_info_g5_central_ctrl_name" ON "rcu_info"."rcu_info_events_g5" USING btree ( + "central_control_name" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST +) WHERE central_control_name IS NOT NULL; +CREATE INDEX "idx_rcu_info_g5_device_id" ON "rcu_info"."rcu_info_events_g5" USING btree ( + "device_id" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST +) WHERE device_id IS NOT NULL; +CREATE INDEX "idx_rcu_info_g5_is_send" ON "rcu_info"."rcu_info_events_g5" USING btree ( + "is_send" "pg_catalog"."int2_ops" ASC NULLS LAST +); +CREATE INDEX "idx_rcu_info_g5_launcher_ver" ON "rcu_info"."rcu_info_events_g5" USING btree ( + "launcher_version" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST +) WHERE launcher_version IS NOT NULL; +CREATE INDEX "idx_rcu_info_g5_mac" ON "rcu_info"."rcu_info_events_g5" USING btree ( + "mac" COLLATE "pg_catalog"."default" "pg_catalog"."text_ops" ASC NULLS LAST +) WHERE mac IS NOT NULL; +CREATE INDEX "idx_rcu_info_g5_sys_lock" ON "rcu_info"."rcu_info_events_g5" USING btree ( + "sys_lock_status" "pg_catalog"."int4_ops" ASC NULLS LAST +); +CREATE INDEX "rcu_info_events_g5_ts_ms_idx" ON "rcu_info"."rcu_info_events_g5" USING btree ( + "ts_ms" "pg_catalog"."int8_ops" DESC NULLS FIRST +); + +-- ---------------------------- +-- Primary Key structure for table rcu_info_events_g5 +-- ---------------------------- +ALTER TABLE "rcu_info"."rcu_info_events_g5" ADD CONSTRAINT "rcu_info_events_g5_pkey" PRIMARY KEY ("hotel_id", "room_id", "ts_ms", "guid"); diff --git a/docs/room_status_moment_g5.sql b/docs/room_status_moment_g5.sql new file mode 100644 index 0000000..9606222 --- /dev/null +++ b/docs/room_status_moment_g5.sql @@ -0,0 +1,91 @@ +/* + Navicat Premium Dump SQL + + Source Server : FnOS 80 + Source Server Type : PostgreSQL + Source Server Version : 150017 (150017) + Source Host : 10.8.8.80:5434 + Source Catalog : log_platform + Source Schema : room_status + + Target Server Type : PostgreSQL + Target Server Version : 150017 (150017) + File Encoding : 65001 + + Date: 14/03/2026 09:58:21 +*/ + + +-- ---------------------------- +-- Table structure for room_status_moment_g5 +-- ---------------------------- +DROP TABLE IF EXISTS "room_status"."room_status_moment_g5"; +CREATE TABLE "room_status"."room_status_moment_g5" ( + "hotel_id" int2 NOT NULL, + "room_id" text COLLATE "pg_catalog"."default" NOT NULL, + "device_id" text COLLATE "pg_catalog"."default" NOT NULL, + "ts_ms" int8 NOT NULL DEFAULT ((EXTRACT(epoch FROM clock_timestamp()) * (1000)::numeric))::bigint, + "sys_lock_status" int2, + "online_status" int2, + "launcher_version" text COLLATE "pg_catalog"."default", + "app_version" text COLLATE "pg_catalog"."default", + "config_version" text COLLATE "pg_catalog"."default", + "register_ts_ms" int8, + "upgrade_ts_ms" int8, + "config_ts_ms" int8, + "ip" text COLLATE "pg_catalog"."default", + "pms_status" int2, + "power_state" int2, + "cardless_state" int2, + "service_mask" int8, + "insert_card" int2, + "bright_g" int2, + "agreement_ver" text COLLATE "pg_catalog"."default", + "air_address" _text COLLATE "pg_catalog"."default", + "air_state" _int2, + "air_model" _int2, + "air_speed" _int2, + "air_set_temp" _int2, + "air_now_temp" _int2, + "air_solenoid_valve" _int2, + "elec_address" _text COLLATE "pg_catalog"."default", + "elec_voltage" _float8, + "elec_ampere" _float8, + "elec_power" _float8, + "elec_phase" _float8, + "elec_energy" _float8, + "elec_sum_energy" _float8, + "carbon_state" int2, + "dev_loops" jsonb, + "energy_carbon_sum" float8, + "energy_nocard_sum" float8, + "external_device" jsonb DEFAULT '{}'::jsonb, + "faulty_device_count" jsonb DEFAULT '{}'::jsonb +) +WITH (fillfactor=90) +TABLESPACE "ts_hot" +; + +-- ---------------------------- +-- Indexes structure for table room_status_moment_g5 +-- ---------------------------- +CREATE INDEX "idx_rsm_g5_dashboard_query" ON "room_status"."room_status_moment_g5" USING btree ( + "hotel_id" "pg_catalog"."int2_ops" ASC NULLS LAST, + "online_status" "pg_catalog"."int2_ops" ASC NULLS LAST, + "power_state" "pg_catalog"."int2_ops" ASC NULLS LAST +); + +-- ---------------------------- +-- Triggers structure for table room_status_moment_g5 +-- ---------------------------- +CREATE TRIGGER "trg_update_rsm_ts_ms" BEFORE UPDATE ON "room_status"."room_status_moment_g5" +FOR EACH ROW +EXECUTE PROCEDURE "room_status"."update_ts_ms_g5"(); +CREATE TRIGGER "trigger_room_status_change" AFTER UPDATE ON "room_status"."room_status_moment_g5" +FOR EACH ROW +EXECUTE PROCEDURE "room_status"."handle_room_status_change"(); + +-- ---------------------------- +-- Primary Key structure for table room_status_moment_g5 +-- ---------------------------- +ALTER TABLE "room_status"."room_status_moment_g5" ADD CONSTRAINT "room_status_moment_g5_pkey" PRIMARY KEY ("hotel_id", "room_id"); diff --git a/docs/template/bls-onoffline-backend/.env b/docs/template/bls-onoffline-backend/.env new file mode 100644 index 0000000..cdc85a7 --- /dev/null +++ b/docs/template/bls-onoffline-backend/.env @@ -0,0 +1,51 @@ +KAFKA_BROKERS=kafka.blv-oa.com:9092 +KAFKA_CLIENT_ID=bls-onoffline-producer +KAFKA_GROUP_ID=bls-onoffline-consumer +KAFKA_TOPICS=blwlog4Nodejs-rcu-onoffline-topic-0 +KAFKA_AUTO_COMMIT=false +KAFKA_AUTO_COMMIT_INTERVAL_MS=5000 +KAFKA_SASL_ENABLED=true +KAFKA_SASL_MECHANISM=plain +KAFKA_SASL_USERNAME=blwmomo +KAFKA_SASL_PASSWORD=blwmomo +KAFKA_SSL_ENABLED=false +KAFKA_CONSUMER_INSTANCES=3 +KAFKA_MAX_IN_FLIGHT=5000 +KAFKA_BATCH_SIZE=1000 +KAFKA_BATCH_TIMEOUT_MS=20 +KAFKA_COMMIT_INTERVAL_MS=200 +KAFKA_COMMIT_ON_ATTEMPT=true +KAFKA_FETCH_MAX_BYTES=10485760 +KAFKA_FETCH_MAX_WAIT_MS=100 +KAFKA_FETCH_MIN_BYTES=1 + +#POSTGRES_HOST=10.8.8.109 +#POSTGRES_PORT=5433 +#POSTGRES_DATABASE=log_platform +#POSTGRES_USER=log_admin +#POSTGRES_PASSWORD=YourActualStrongPasswordForPostgres! +#POSTGRES_MAX_CONNECTIONS=6 +#POSTGRES_IDLE_TIMEOUT_MS=30000 +#DB_SCHEMA=onoffline +#DB_TABLE=onoffline_record + +# ========================= +# PostgreSQL 配置 G5库专用 +# ========================= +POSTGRES_HOST_G5=10.8.8.80 +POSTGRES_PORT_G5=5434 +POSTGRES_DATABASE_G5=log_platform +POSTGRES_USER_G5=log_admin +POSTGRES_PASSWORD_G5=H3IkLUt8K!x +POSTGRES_IDLE_TIMEOUT_MS_G5=30000 + +PORT=3001 +LOG_LEVEL=info + +# Redis connection +REDIS_HOST=10.8.8.109 +REDIS_PORT=6379 +REDIS_PASSWORD= +REDIS_DB=15 +REDIS_CONNECT_TIMEOUT_MS=5000 +REDIS_PROJECT_NAME=bls-onoffline diff --git a/docs/template/bls-onoffline-backend/.env.example b/docs/template/bls-onoffline-backend/.env.example new file mode 100644 index 0000000..d716cb7 --- /dev/null +++ b/docs/template/bls-onoffline-backend/.env.example @@ -0,0 +1,31 @@ +# Server Configuration +PORT=3001 +NODE_ENV=development + +# Kafka Configuration +KAFKA_BROKERS=localhost:9092 +KAFKA_TOPIC=blwlog4Nodejs-rcu-onoffline-topic +KAFKA_GROUP_ID=bls-onoffline-group +KAFKA_CLIENT_ID=bls-onoffline-client +KAFKA_CONSUMER_INSTANCES=1 +# KAFKA_SASL_USERNAME= +# KAFKA_SASL_PASSWORD= +# KAFKA_SASL_MECHANISM=plain + +# Database Configuration (PostgreSQL) +DB_HOST=localhost +DB_PORT=5432 +DB_USER=postgres +DB_PASSWORD=password +DB_DATABASE=log_platform +DB_SCHEMA=public +DB_TABLE=onoffline_record +DB_MAX_CONNECTIONS=10 + +# Redis Configuration +REDIS_HOST=localhost +REDIS_PORT=6379 +REDIS_PASSWORD= +REDIS_DB=0 +REDIS_PROJECT_NAME=bls-onoffline +REDIS_API_BASE_URL=http://localhost:3001 diff --git a/docs/template/bls-onoffline-backend/AGENTS.md b/docs/template/bls-onoffline-backend/AGENTS.md new file mode 100644 index 0000000..0669699 --- /dev/null +++ b/docs/template/bls-onoffline-backend/AGENTS.md @@ -0,0 +1,18 @@ + +# OpenSpec Instructions + +These instructions are for AI assistants working in this project. + +Always open `@/openspec/AGENTS.md` when the request: +- Mentions planning or proposals (words like proposal, spec, change, plan) +- Introduces new capabilities, breaking changes, architecture shifts, or big performance/security work +- Sounds ambiguous and you need the authoritative spec before coding + +Use `@/openspec/AGENTS.md` to learn: +- How to create and apply change proposals +- Spec format and conventions +- Project structure and guidelines + +Keep this managed block so 'openspec update' can refresh the instructions. + + \ No newline at end of file diff --git a/docs/template/bls-onoffline-backend/README.md b/docs/template/bls-onoffline-backend/README.md new file mode 100644 index 0000000..4180ac8 --- /dev/null +++ b/docs/template/bls-onoffline-backend/README.md @@ -0,0 +1,30 @@ +bls-onoffline-backend + +安装与运行 +- Node.js 22+ +- npm install +- npm run dev + +构建与测试 +- npm run build +- npm run test +- npm run lint + +规范校验 +- npm run spec:lint +- npm run spec:validate + +环境变量 +- 复制 .env.example 为 .env 并按实际环境配置 + +数据库初始化 +- 运行服务前请先通过根目录 SQL_Script 下脚本完成建库与分区维护 +- `../SQL_Script/create_database.sql`:建库(psql) +- `../SQL_Script/create_schema_and_parent_table.sql`:建 schema 与主分区表 +- `../SQL_Script/create_partition_for_day.sql`:按日建分区模板 +- `../SQL_Script/generate_init_sql.js`:生成建库+建表 SQL +- `../SQL_Script/generate_partition_sql.js`:生成单日分区 SQL +- `../SQL_Script/generate_partition_range_sql.js`:生成批量分区 SQL + +规范说明 +- 规格文件位于 spec/onoffline-spec.md diff --git a/docs/template/bls-onoffline-backend/dist/index.js b/docs/template/bls-onoffline-backend/dist/index.js new file mode 100644 index 0000000..aa9908d --- /dev/null +++ b/docs/template/bls-onoffline-backend/dist/index.js @@ -0,0 +1,1086 @@ +import cron from "node-cron"; +import dotenv from "dotenv"; +import pg from "pg"; +import kafka from "kafka-node"; +import { randomUUID } from "crypto"; +import { z } from "zod"; +import { createClient } from "redis"; +dotenv.config(); +const parseNumber = (value, defaultValue) => { + const parsed = Number(value); + return Number.isFinite(parsed) ? parsed : defaultValue; +}; +const parseList = (value) => (value || "").split(",").map((item) => item.trim()).filter(Boolean); +const config = { + env: process.env.NODE_ENV || "development", + port: parseNumber(process.env.PORT, 3001), + kafka: { + brokers: parseList(process.env.KAFKA_BROKERS), + topic: process.env.KAFKA_TOPIC || process.env.KAFKA_TOPICS || "blwlog4Nodejs-rcu-onoffline-topic", + groupId: process.env.KAFKA_GROUP_ID || "bls-onoffline-group", + clientId: process.env.KAFKA_CLIENT_ID || "bls-onoffline-client", + consumerInstances: parseNumber(process.env.KAFKA_CONSUMER_INSTANCES, 1), + maxInFlight: parseNumber(process.env.KAFKA_MAX_IN_FLIGHT, 2e4), + fetchMaxBytes: parseNumber(process.env.KAFKA_FETCH_MAX_BYTES, 50 * 1024 * 1024), + fetchMinBytes: parseNumber(process.env.KAFKA_FETCH_MIN_BYTES, 256 * 1024), + fetchMaxWaitMs: parseNumber(process.env.KAFKA_FETCH_MAX_WAIT_MS, 100), + autoCommitIntervalMs: parseNumber(process.env.KAFKA_AUTO_COMMIT_INTERVAL_MS, 5e3), + commitIntervalMs: parseNumber(process.env.KAFKA_COMMIT_INTERVAL_MS, 200), + commitOnAttempt: process.env.KAFKA_COMMIT_ON_ATTEMPT === "true", + batchSize: parseNumber(process.env.KAFKA_BATCH_SIZE, 5e3), + batchTimeoutMs: parseNumber(process.env.KAFKA_BATCH_TIMEOUT_MS, 50), + logMessages: process.env.KAFKA_LOG_MESSAGES === "true", + sasl: process.env.KAFKA_SASL_USERNAME && process.env.KAFKA_SASL_PASSWORD ? { + mechanism: process.env.KAFKA_SASL_MECHANISM || "plain", + username: process.env.KAFKA_SASL_USERNAME, + password: process.env.KAFKA_SASL_PASSWORD + } : void 0 + }, + db: { + host: process.env.DB_HOST || process.env.POSTGRES_HOST || "localhost", + port: parseNumber(process.env.DB_PORT || process.env.POSTGRES_PORT, 5432), + user: process.env.DB_USER || process.env.POSTGRES_USER || "postgres", + password: process.env.DB_PASSWORD || process.env.POSTGRES_PASSWORD || "", + database: process.env.DB_DATABASE || process.env.POSTGRES_DATABASE || "log_platform", + max: parseNumber(process.env.DB_MAX_CONNECTIONS || process.env.POSTGRES_MAX_CONNECTIONS, 10), + ssl: process.env.DB_SSL === "true" ? { rejectUnauthorized: false } : void 0, + schema: process.env.DB_SCHEMA || "onoffline", + table: process.env.DB_TABLE || "onoffline_record" + }, + g5db: { + enabled: !!process.env.POSTGRES_HOST_G5, + host: process.env.POSTGRES_HOST_G5, + port: parseNumber(process.env.POSTGRES_PORT_G5, 5434), + user: process.env.POSTGRES_USER_G5, + password: process.env.POSTGRES_PASSWORD_G5, + database: process.env.POSTGRES_DATABASE_G5, + max: parseNumber(process.env.POSTGRES_MAX_CONNECTIONS_G5, 3), + ssl: process.env.POSTGRES_SSL_G5 === "true" ? { rejectUnauthorized: false } : void 0, + schema: process.env.DB_SCHEMA_G5 || "onoffline", + table: process.env.DB_TABLE_G5 || "onoffline_record_g5" + }, + redis: { + host: process.env.REDIS_HOST || "localhost", + port: parseNumber(process.env.REDIS_PORT, 6379), + password: process.env.REDIS_PASSWORD || void 0, + db: parseNumber(process.env.REDIS_DB, 0), + projectName: process.env.REDIS_PROJECT_NAME || "bls-onoffline", + apiBaseUrl: process.env.REDIS_API_BASE_URL || `http://localhost:${parseNumber(process.env.PORT, 3001)}` + } +}; +const format = (level, message, context) => { + const payload = { + level, + message, + timestamp: Date.now(), + ...context ? { context } : {} + }; + return JSON.stringify(payload); +}; +const logger = { + info(message, context) { + process.stdout.write(`${format("info", message, context)} +`); + }, + error(message, context) { + process.stderr.write(`${format("error", message, context)} +`); + }, + warn(message, context) { + process.stderr.write(`${format("warn", message, context)} +`); + } +}; +const { Pool: Pool$1 } = pg; +const columns = [ + "guid", + "ts_ms", + "write_ts_ms", + "hotel_id", + "mac", + "device_id", + "room_id", + "ip", + "current_status", + "launcher_version", + "reboot_reason" +]; +class DatabaseManager { + constructor(dbConfig) { + this.pool = new Pool$1({ + host: dbConfig.host, + port: dbConfig.port, + user: dbConfig.user, + password: dbConfig.password, + database: dbConfig.database, + max: dbConfig.max, + ssl: dbConfig.ssl + }); + } + async insertRows({ schema, table, rows }) { + if (!rows || rows.length === 0) { + return; + } + const statement = ` + INSERT INTO ${schema}.${table} (${columns.join(", ")}) + SELECT * + FROM UNNEST( + $1::text[], + $2::int8[], + $3::int8[], + $4::int2[], + $5::text[], + $6::text[], + $7::text[], + $8::text[], + $9::text[], + $10::text[], + $11::text[] + ) + ON CONFLICT DO NOTHING + `; + try { + const params = columns.map((column) => rows.map((row) => row[column] ?? null)); + await this.pool.query(statement, params); + } catch (error) { + logger.error("Database insert failed", { + error: error?.message, + schema, + table, + rowsLength: rows.length + }); + throw error; + } + } + async checkConnection() { + let client; + try { + const connectPromise = this.pool.connect(); + const timeoutPromise = new Promise((_, reject) => { + setTimeout(() => reject(new Error("Connection timeout")), 5e3); + }); + try { + client = await Promise.race([connectPromise, timeoutPromise]); + } catch (raceError) { + connectPromise.then((c) => c.release()).catch(() => { + }); + throw raceError; + } + await client.query("SELECT 1"); + return true; + } catch (err) { + logger.error("Database check connection failed", { error: err.message }); + return false; + } finally { + if (client) { + client.release(); + } + } + } + async close() { + await this.pool.end(); + } +} +const dbManager = new DatabaseManager(config.db); +const { Pool } = pg; +const g5Columns = [ + "ts_ms", + "write_ts_ms", + "hotel_id", + "mac", + "device_id", + "room_id", + "ip", + "current_status", + "launcher_version", + "reboot_reason", + "record_source" +]; +class G5DatabaseManager { + constructor(dbConfig) { + if (!dbConfig.enabled) return; + this.pool = new Pool({ + host: dbConfig.host, + port: dbConfig.port, + user: dbConfig.user, + password: dbConfig.password, + database: dbConfig.database, + max: dbConfig.max, + ssl: dbConfig.ssl + }); + } + async insertRows({ schema, table, rows }) { + if (!this.pool || !rows || rows.length === 0) { + return; + } + const statement = ` + INSERT INTO ${schema}.${table} (${g5Columns.join(", ")}) + SELECT * + FROM UNNEST( + $1::int8[], + $2::int8[], + $3::int2[], + $4::text[], + $5::text[], + $6::text[], + $7::text[], + $8::int2[], + $9::text[], + $10::text[], + $11::text[] + ) + ON CONFLICT DO NOTHING + `; + try { + const params = g5Columns.map((column) => { + return rows.map((row) => { + if (column === "record_source") { + return "CRICS"; + } + if (column === "current_status") { + if (row.current_status === "on") return 1; + if (row.current_status === "off") return 2; + return 0; + } + return row[column] ?? null; + }); + }); + await this.pool.query(statement, params); + } catch (error) { + logger.error("G5 Database insert failed", { + error: error?.message, + schema, + table, + rowsLength: rows.length + }); + throw error; + } + } + async checkConnection() { + if (!this.pool) return true; + let client; + try { + const connectPromise = this.pool.connect(); + const timeoutPromise = new Promise((_, reject) => { + setTimeout(() => reject(new Error("Connection timeout")), 5e3); + }); + try { + client = await Promise.race([connectPromise, timeoutPromise]); + } catch (raceError) { + connectPromise.then((c) => c.release()).catch(() => { + }); + throw raceError; + } + await client.query("SELECT 1"); + return true; + } catch (err) { + logger.error("G5 Database check connection failed", { error: err.message }); + return false; + } finally { + if (client) { + client.release(); + } + } + } + async close() { + if (this.pool) { + await this.pool.end(); + } + } +} +const g5DbManager = new G5DatabaseManager(config.g5db); +class OffsetTracker { + constructor() { + this.partitions = /* @__PURE__ */ new Map(); + } + // Called when a message is received (before processing) + add(topic, partition, offset) { + const key = `${topic}-${partition}`; + if (!this.partitions.has(key)) { + this.partitions.set(key, { nextCommitOffset: null, done: /* @__PURE__ */ new Set() }); + } + const state = this.partitions.get(key); + const numericOffset = Number(offset); + if (!Number.isFinite(numericOffset)) return; + if (state.nextCommitOffset === null) { + state.nextCommitOffset = numericOffset; + } else if (numericOffset < state.nextCommitOffset) { + state.nextCommitOffset = numericOffset; + } + } + // Called when a message is successfully processed + // Returns the next offset to commit (if any advancement is possible), or null + markDone(topic, partition, offset) { + const key = `${topic}-${partition}`; + const state = this.partitions.get(key); + if (!state) return null; + const numericOffset = Number(offset); + if (!Number.isFinite(numericOffset)) return null; + state.done.add(numericOffset); + if (state.nextCommitOffset === null) { + state.nextCommitOffset = numericOffset; + } + let advanced = false; + while (state.nextCommitOffset !== null && state.done.has(state.nextCommitOffset)) { + state.done.delete(state.nextCommitOffset); + state.nextCommitOffset += 1; + advanced = true; + } + if (!advanced) return null; + return state.nextCommitOffset; + } + clear() { + this.partitions.clear(); + } +} +const { ConsumerGroup } = kafka; +const createOneConsumer = ({ kafkaConfig, onMessage, onError, instanceIndex }) => { + const kafkaHost = kafkaConfig.brokers.join(","); + const clientId = instanceIndex === 0 ? kafkaConfig.clientId : `${kafkaConfig.clientId}-${instanceIndex}`; + const id = `${clientId}-${process.pid}-${Date.now()}`; + const maxInFlight = Number.isFinite(kafkaConfig.maxInFlight) ? kafkaConfig.maxInFlight : 5e3; + const commitIntervalMs = Number.isFinite(kafkaConfig.commitIntervalMs) ? kafkaConfig.commitIntervalMs : 200; + let inFlight = 0; + const tracker = new OffsetTracker(); + let pendingCommits = /* @__PURE__ */ new Map(); + let commitTimer = null; + const flushCommits = () => { + if (pendingCommits.size === 0) return; + const batch = pendingCommits; + pendingCommits = /* @__PURE__ */ new Map(); + consumer.sendOffsetCommitRequest( + Array.from(batch.values()), + (err) => { + if (err) { + for (const [k, v] of batch.entries()) { + pendingCommits.set(k, v); + } + logger.error("Kafka commit failed", { error: err?.message, count: batch.size }); + } + } + ); + }; + const scheduleCommitFlush = () => { + if (commitTimer) return; + commitTimer = setTimeout(() => { + commitTimer = null; + flushCommits(); + }, commitIntervalMs); + }; + const consumer = new ConsumerGroup( + { + kafkaHost, + groupId: kafkaConfig.groupId, + clientId, + id, + fromOffset: "earliest", + protocol: ["roundrobin"], + outOfRangeOffset: "latest", + autoCommit: false, + autoCommitIntervalMs: kafkaConfig.autoCommitIntervalMs, + fetchMaxBytes: kafkaConfig.fetchMaxBytes, + fetchMinBytes: kafkaConfig.fetchMinBytes, + fetchMaxWaitMs: kafkaConfig.fetchMaxWaitMs, + sasl: kafkaConfig.sasl + }, + kafkaConfig.topic + ); + const tryResume = () => { + if (inFlight < maxInFlight && consumer.paused) { + consumer.resume(); + } + }; + consumer.on("message", (message) => { + inFlight += 1; + tracker.add(message.topic, message.partition, message.offset); + if (inFlight >= maxInFlight) { + consumer.pause(); + } + Promise.resolve(onMessage(message)).then(() => { + }).catch((error) => { + logger.error("Kafka message handling failed", { error: error?.message }); + if (onError) { + onError(error, message); + } + }).finally(() => { + const commitOffset = tracker.markDone(message.topic, message.partition, message.offset); + if (commitOffset !== null) { + const key = `${message.topic}-${message.partition}`; + pendingCommits.set(key, { + topic: message.topic, + partition: message.partition, + offset: commitOffset, + metadata: "m" + }); + scheduleCommitFlush(); + } + inFlight -= 1; + tryResume(); + }); + }); + consumer.on("error", (error) => { + logger.error("Kafka consumer error", { error: error?.message }); + if (onError) { + onError(error); + } + }); + consumer.on("connect", () => { + logger.info(`Kafka Consumer connected`, { + groupId: kafkaConfig.groupId, + clientId + }); + }); + consumer.on("rebalancing", () => { + logger.info(`Kafka Consumer rebalancing`, { + groupId: kafkaConfig.groupId, + clientId + }); + tracker.clear(); + pendingCommits.clear(); + if (commitTimer) { + clearTimeout(commitTimer); + commitTimer = null; + } + }); + consumer.on("rebalanced", () => { + logger.info("Kafka Consumer rebalanced", { clientId, groupId: kafkaConfig.groupId }); + }); + consumer.on("error", (err) => { + logger.error("Kafka Consumer Error", { error: err.message }); + }); + consumer.on("offsetOutOfRange", (err) => { + logger.warn("Offset out of range", { error: err.message, topic: err.topic, partition: err.partition }); + }); + consumer.on("offsetOutOfRange", (error) => { + logger.warn(`Kafka Consumer offset out of range`, { + error: error?.message, + groupId: kafkaConfig.groupId, + clientId + }); + }); + consumer.on("close", () => { + if (commitTimer) { + clearTimeout(commitTimer); + commitTimer = null; + } + flushCommits(); + logger.warn(`Kafka Consumer closed`, { + groupId: kafkaConfig.groupId, + clientId + }); + }); + return consumer; +}; +const createKafkaConsumers = ({ kafkaConfig, onMessage, onError }) => { + const instances = Number.isFinite(kafkaConfig.consumerInstances) ? kafkaConfig.consumerInstances : 1; + const count = Math.max(1, instances); + return Array.from( + { length: count }, + (_, idx) => createOneConsumer({ kafkaConfig, onMessage, onError, instanceIndex: idx }) + ); +}; +const createGuid = () => randomUUID().replace(/-/g, ""); +const toNumber = (value) => { + if (value === void 0 || value === null || value === "") { + return value; + } + if (typeof value === "number") { + return value; + } + const parsed = Number(value); + return Number.isFinite(parsed) ? parsed : value; +}; +const toStringAllowEmpty = (value) => { + if (value === void 0 || value === null) { + return value; + } + return String(value); +}; +const kafkaPayloadSchema = z.object({ + HotelCode: z.preprocess(toNumber, z.number()), + MAC: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + HostNumber: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + RoomNumber: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + EndPoint: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + CurrentStatus: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + CurrentTime: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + UnixTime: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + LauncherVersion: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + RebootReason: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable() +}); +const normalizeText = (value, maxLength) => { + if (value === void 0 || value === null) { + return null; + } + const str = String(value); + if (maxLength && str.length > maxLength) { + return str.substring(0, maxLength); + } + return str; +}; +const buildRowsFromPayload = (rawPayload) => { + const payload = kafkaPayloadSchema.parse(rawPayload); + const rebootReason = normalizeText(payload.RebootReason, 255); + const currentStatusRaw = normalizeText(payload.CurrentStatus, 255); + const hasRebootReason = rebootReason !== null && rebootReason !== ""; + const currentStatus = hasRebootReason ? "on" : currentStatusRaw; + let tsMs = payload.UnixTime; + if (typeof tsMs === "number" && tsMs < 1e11) { + tsMs = tsMs * 1e3; + } + if (!tsMs && payload.CurrentTime) { + const parsed = Date.parse(payload.CurrentTime); + if (!isNaN(parsed)) { + tsMs = parsed; + } + } + if (!tsMs) { + tsMs = Date.now(); + } + const mac = normalizeText(payload.MAC) || ""; + const deviceId = normalizeText(payload.HostNumber) || ""; + const roomId = normalizeText(payload.RoomNumber) || ""; + let hotelId = payload.HotelCode; + if (typeof hotelId !== "number" || Number.isNaN(hotelId) || hotelId < -32768 || hotelId > 32767) { + hotelId = 0; + } + const row = { + guid: createGuid(), + ts_ms: tsMs, + write_ts_ms: Date.now(), + hotel_id: hotelId, + mac, + device_id: deviceId, + room_id: roomId, + ip: normalizeText(payload.EndPoint), + current_status: currentStatus, + launcher_version: normalizeText(payload.LauncherVersion, 255), + reboot_reason: rebootReason + }; + return [row]; +}; +const parseMessageToRows = (message) => { + const rawValue = message.value.toString(); + let payload; + try { + payload = JSON.parse(rawValue); + } catch (e) { + const error = new Error(`JSON Parse Error: ${e.message}`); + error.type = "PARSE_ERROR"; + throw error; + } + const validationResult = kafkaPayloadSchema.safeParse(payload); + if (!validationResult.success) { + const error = new Error(`Schema Validation Failed: ${JSON.stringify(validationResult.error.errors)}`); + error.type = "VALIDATION_ERROR"; + throw error; + } + return buildRowsFromPayload(payload); +}; +const createRedisClient = async (config2) => { + const client = createClient({ + socket: { + host: config2.host, + port: config2.port + }, + password: config2.password, + database: config2.db + }); + await client.connect(); + return client; +}; +class RedisIntegration { + constructor(client, projectName, apiBaseUrl) { + this.client = client; + this.projectName = projectName; + this.apiBaseUrl = apiBaseUrl; + this.heartbeatKey = "项目心跳"; + this.logKey = `${projectName}_项目控制台`; + } + async info(message, context) { + const payload = { + timestamp: (/* @__PURE__ */ new Date()).toISOString(), + level: "info", + message, + metadata: context || void 0 + }; + await this.client.rPush(this.logKey, JSON.stringify(payload)); + } + async error(message, context) { + const payload = { + timestamp: (/* @__PURE__ */ new Date()).toISOString(), + level: "error", + message, + metadata: context || void 0 + }; + await this.client.rPush(this.logKey, JSON.stringify(payload)); + } + startHeartbeat() { + setInterval(() => { + const payload = { + projectName: this.projectName, + apiBaseUrl: this.apiBaseUrl, + lastActiveAt: Date.now() + }; + this.client.rPush(this.heartbeatKey, JSON.stringify(payload)); + }, 3e3); + } +} +const buildErrorQueueKey = (projectName) => `${projectName}_error_queue`; +const enqueueError = async (client, queueKey, payload) => { + try { + await client.rPush(queueKey, JSON.stringify(payload)); + } catch (error) { + logger.error("Redis enqueue error failed", { error: error?.message }); + throw error; + } +}; +const startErrorRetryWorker = async ({ + client, + queueKey, + handler, + redisIntegration, + maxAttempts = 5 +}) => { + while (true) { + const result = await client.blPop(queueKey, 0); + const raw = result?.element; + if (!raw) { + continue; + } + let item; + try { + item = JSON.parse(raw); + } catch (error) { + logger.error("Invalid error payload", { error: error?.message }); + await redisIntegration.error("Invalid error payload", { module: "redis", stack: error?.message }); + continue; + } + const attempts = item.attempts || 0; + try { + await handler(item); + } catch (error) { + logger.error("Retry handler failed", { error: error?.message, stack: error?.stack }); + const nextPayload = { + ...item, + attempts: attempts + 1, + lastError: error?.message, + lastAttemptAt: Date.now() + }; + if (nextPayload.attempts >= maxAttempts) { + await redisIntegration.error("Retry attempts exceeded", { module: "retry", stack: JSON.stringify(nextPayload) }); + } else { + await enqueueError(client, queueKey, nextPayload); + } + } + } +}; +class MetricCollector { + constructor() { + this.reset(); + } + reset() { + this.metrics = { + kafka_pulled: 0, + parse_error: 0, + db_inserted: 0, + db_failed: 0, + db_insert_count: 0, + db_insert_ms_sum: 0, + batch_flush_count: 0, + batch_flush_ms_sum: 0 + }; + this.keyed = {}; + } + increment(metric, count = 1) { + if (this.metrics.hasOwnProperty(metric)) { + this.metrics[metric] += count; + } + } + incrementKeyed(metric, key, count = 1) { + if (!key) return; + if (!this.keyed[metric]) { + this.keyed[metric] = {}; + } + if (!Object.prototype.hasOwnProperty.call(this.keyed[metric], key)) { + this.keyed[metric][key] = 0; + } + this.keyed[metric][key] += count; + } + getAndReset() { + const current = { ...this.metrics }; + const keyed = JSON.parse(JSON.stringify(this.keyed)); + this.reset(); + return { ...current, keyed }; + } +} +const bootstrap = async () => { + logger.info("Starting application with config", { + env: process.env.NODE_ENV, + db: { + host: config.db.host, + port: config.db.port, + user: config.db.user, + database: config.db.database, + schema: config.db.schema + }, + kafka: { + brokers: config.kafka.brokers, + topic: config.kafka.topic, + groupId: config.kafka.groupId + }, + redis: { + host: config.redis.host, + port: config.redis.port + } + }); + const metricCollector = new MetricCollector(); + const redisClient = await createRedisClient(config.redis); + const redisIntegration = new RedisIntegration( + redisClient, + config.redis.projectName, + config.redis.apiBaseUrl + ); + redisIntegration.startHeartbeat(); + cron.schedule("* * * * *", async () => { + const metrics = metricCollector.getAndReset(); + const flushAvgMs = metrics.batch_flush_count > 0 ? (metrics.batch_flush_ms_sum / metrics.batch_flush_count).toFixed(1) : "0.0"; + const dbAvgMs = metrics.db_insert_count > 0 ? (metrics.db_insert_ms_sum / metrics.db_insert_count).toFixed(1) : "0.0"; + const report = `[Metrics] Pulled:${metrics.kafka_pulled} ParseErr:${metrics.parse_error} Inserted:${metrics.db_inserted} Failed:${metrics.db_failed} FlushAvg:${flushAvgMs}ms DbAvg:${dbAvgMs}ms`; + console.log(report); + logger.info(report); + try { + await redisIntegration.info("Minute Metrics", metrics); + } catch (err) { + logger.error("Failed to report metrics to Redis", { error: err?.message }); + } + }); + const errorQueueKey = buildErrorQueueKey(config.redis.projectName); + const handleError = async (error, message) => { + logger.error("Kafka processing error", { + error: error?.message, + type: error?.type, + stack: error?.stack + }); + try { + await redisIntegration.error("Kafka processing error", { + module: "kafka", + stack: error?.stack || error?.message + }); + } catch (redisError) { + logger.error("Redis error log failed", { error: redisError?.message }); + } + if (message) { + const messageValue = Buffer.isBuffer(message.value) ? message.value.toString("utf8") : message.value; + try { + await enqueueError(redisClient, errorQueueKey, { + attempts: 0, + value: messageValue, + meta: { + topic: message.topic, + partition: message.partition, + offset: message.offset, + key: message.key + }, + timestamp: Date.now() + }); + } catch (enqueueError2) { + logger.error("Enqueue error payload failed", { error: enqueueError2?.message }); + } + } + }; + const configuredBatchSize = Number.isFinite(config.kafka.batchSize) ? config.kafka.batchSize : 1e3; + const configuredBatchTimeoutMs = Number.isFinite(config.kafka.batchTimeoutMs) ? config.kafka.batchTimeoutMs : 20; + const configuredMaxInFlight = Number.isFinite(config.kafka.maxInFlight) ? config.kafka.maxInFlight : 5e3; + const BATCH_SIZE = Math.max(10, Math.min(configuredBatchSize, configuredMaxInFlight)); + const BATCH_TIMEOUT_MS = Math.max(1, configuredBatchTimeoutMs); + const commitOnAttempt = config.kafka.commitOnAttempt === true; + const batchStates = /* @__PURE__ */ new Map(); + const partitionKeyFromMessage = (message) => { + if (message?.topic !== void 0 && message?.partition !== void 0) { + return `${message.topic}-${message.partition}`; + } + return "retry"; + }; + const dayKeyFromTsMs = (tsMs) => { + const numeric = typeof tsMs === "string" ? Number(tsMs) : tsMs; + if (!Number.isFinite(numeric)) return null; + const d = new Date(numeric); + if (Number.isNaN(d.getTime())) return null; + const yyyy = d.getFullYear(); + const mm = String(d.getMonth() + 1).padStart(2, "0"); + const dd = String(d.getDate()).padStart(2, "0"); + return `${yyyy}${mm}${dd}`; + }; + const getBatchState = (key) => { + if (!batchStates.has(key)) { + batchStates.set(key, { items: [], timer: null, flushing: null }); + } + return batchStates.get(key); + }; + const isDbConnectionError = (err) => { + const code = err?.code; + if (typeof code === "string") { + const networkCodes = /* @__PURE__ */ new Set([ + "ECONNREFUSED", + "ECONNRESET", + "EPIPE", + "ETIMEDOUT", + "ENOTFOUND", + "EHOSTUNREACH", + "ENETUNREACH", + "57P03", + "08006", + "08001", + "08000", + "08003" + ]); + if (networkCodes.has(code)) return true; + } + const message = typeof err?.message === "string" ? err.message : ""; + if (!message) return false; + const lower = message.toLowerCase(); + return lower.includes("connection timeout") || lower.includes("connection terminated") || lower.includes("connection refused") || lower.includes("terminating connection") || lower.includes("econnrefused") || lower.includes("econnreset") || lower.includes("etimedout") || lower.includes("could not connect") || lower.includes("the database system is starting up") || lower.includes("no pg_hba.conf entry"); + }; + const insertRowsWithRetry = async (rows) => { + const startedAt = Date.now(); + while (true) { + try { + const promises = [ + dbManager.insertRows({ schema: config.db.schema, table: config.db.table, rows }) + ]; + if (config.g5db.enabled) { + promises.push(g5DbManager.insertRows({ schema: config.g5db.schema, table: config.g5db.table, rows }).catch((e) => { + logger.error("G5 Database insert failed but non-blocking", { error: e.message }); + })); + } + await Promise.all(promises); + metricCollector.increment("db_insert_count", 1); + metricCollector.increment("db_insert_ms_sum", Date.now() - startedAt); + return; + } catch (err) { + if (isDbConnectionError(err)) { + logger.error("Database offline during batch insert. Retrying in 5s...", { error: err.message }); + await new Promise((r) => setTimeout(r, 5e3)); + while (!await dbManager.checkConnection()) { + logger.warn("Database still offline. Waiting 5s..."); + await new Promise((r) => setTimeout(r, 5e3)); + } + continue; + } + throw err; + } + } + }; + const insertRowsOnce = async (rows) => { + const startedAt = Date.now(); + const promises = [ + dbManager.insertRows({ schema: config.db.schema, table: config.db.table, rows }) + ]; + if (config.g5db.enabled) { + promises.push(g5DbManager.insertRows({ schema: config.g5db.schema, table: config.g5db.table, rows }).catch((e) => { + logger.error("G5 Database insert failed in insertOnce", { error: e.message }); + })); + } + await Promise.all(promises); + metricCollector.increment("db_insert_count", 1); + metricCollector.increment("db_insert_ms_sum", Date.now() - startedAt); + }; + const resolveInsertedItems = (partitionKey, items) => { + let insertedRows = 0; + for (const p of items) { + insertedRows += p.rows.length; + const dayKey = dayKeyFromTsMs(p.rows?.[0]?.ts_ms); + if (dayKey) { + metricCollector.incrementKeyed("db_inserted_by_day", dayKey, p.rows.length); + } + p.item.resolve(); + } + metricCollector.increment("db_inserted", insertedRows); + metricCollector.incrementKeyed("db_inserted_by_partition", partitionKey, insertedRows); + }; + const handleFailedItem = async (partitionKey, p, err) => { + metricCollector.increment("db_failed"); + metricCollector.incrementKeyed("db_failed_by_partition", partitionKey, 1); + const dayKey = dayKeyFromTsMs(p.rows?.[0]?.ts_ms); + if (dayKey) { + metricCollector.incrementKeyed("db_failed_by_day", dayKey, 1); + } + await handleError(err, p.item.message); + p.item.resolve(); + }; + const insertItemsDegraded = async (partitionKey, items) => { + if (items.length === 0) return; + const rows = items.flatMap((p) => p.rows); + if (commitOnAttempt) { + try { + await insertRowsOnce(rows); + resolveInsertedItems(partitionKey, items); + } catch (err) { + for (const item of items) { + await handleFailedItem(partitionKey, item, err); + } + } + return; + } + try { + await insertRowsWithRetry(rows); + resolveInsertedItems(partitionKey, items); + return; + } catch (err) { + if (items.length === 1) { + try { + await insertRowsWithRetry(items[0].rows); + resolveInsertedItems(partitionKey, items); + } catch (innerErr) { + await handleFailedItem(partitionKey, items[0], innerErr); + } + return; + } + const mid = Math.floor(items.length / 2); + await insertItemsDegraded(partitionKey, items.slice(0, mid)); + await insertItemsDegraded(partitionKey, items.slice(mid)); + } + }; + const flushBatchForKey = async (partitionKey) => { + const state = getBatchState(partitionKey); + if (state.flushing) return state.flushing; + state.flushing = (async () => { + if (state.timer) { + clearTimeout(state.timer); + state.timer = null; + } + if (state.items.length === 0) return; + const startedAt = Date.now(); + const currentBatch = state.items; + state.items = []; + const pendingDbItems = []; + const unresolvedItems = []; + try { + for (const item of currentBatch) { + try { + const rows = parseMessageToRows(item.message); + pendingDbItems.push({ item, rows }); + unresolvedItems.push(item); + } catch (err) { + metricCollector.increment("parse_error"); + metricCollector.incrementKeyed("parse_error_by_partition", partitionKey, 1); + logger.error("Message processing failed (Parse/Validation)", { error: err.message }); + await handleError(err, item.message); + item.resolve(); + } + } + if (pendingDbItems.length > 0) { + const firstTs = pendingDbItems[0]?.rows?.[0]?.ts_ms; + const dayKey = dayKeyFromTsMs(firstTs); + if (dayKey) { + const dayStartMs = Date.now(); + await insertItemsDegraded(partitionKey, pendingDbItems); + metricCollector.incrementKeyed("db_insert_ms_sum_by_day", dayKey, Date.now() - dayStartMs); + } else { + await insertItemsDegraded(partitionKey, pendingDbItems); + } + } + metricCollector.increment("batch_flush_count", 1); + metricCollector.increment("batch_flush_ms_sum", Date.now() - startedAt); + } catch (err) { + if (!commitOnAttempt && isDbConnectionError(err)) { + state.items = unresolvedItems.concat(state.items); + if (!state.timer) { + state.timer = setTimeout(() => { + state.timer = null; + flushBatchForKey(partitionKey); + }, 5e3); + } + return; + } + logger.error("Batch flush failed (non-network). Marking as consumed", { + error: err?.message, + partitionKey, + batchSize: currentBatch.length + }); + for (const item of unresolvedItems) { + try { + await handleError(err, item.message); + } catch { + } + item.resolve(); + } + } + })().finally(() => { + state.flushing = null; + if (state.items.length > 0) { + if (state.items.length >= BATCH_SIZE) { + flushBatchForKey(partitionKey); + } else if (!state.timer) { + state.timer = setTimeout(() => { + state.timer = null; + flushBatchForKey(partitionKey); + }, BATCH_TIMEOUT_MS); + } + } + }); + return state.flushing; + }; + const handleMessage = (message) => { + if (message.topic) { + metricCollector.increment("kafka_pulled"); + metricCollector.incrementKeyed("kafka_pulled_by_partition", `${message.topic}-${message.partition}`, 1); + } + const partitionKey = partitionKeyFromMessage(message); + const state = getBatchState(partitionKey); + return new Promise((resolve, reject) => { + state.items.push({ message, resolve, reject }); + if (state.items.length >= BATCH_SIZE) { + flushBatchForKey(partitionKey); + } else if (!state.timer) { + state.timer = setTimeout(() => { + state.timer = null; + flushBatchForKey(partitionKey); + }, BATCH_TIMEOUT_MS); + } + }); + }; + const consumers = createKafkaConsumers({ + kafkaConfig: config.kafka, + onMessage: handleMessage, + onError: handleError + }); + startErrorRetryWorker({ + client: redisClient, + queueKey: errorQueueKey, + redisIntegration, + handler: async (item) => { + if (!item?.value) { + throw new Error("Missing value in retry payload"); + } + await handleMessage({ value: item.value }); + } + }).catch((err) => { + logger.error("Retry worker failed", { error: err?.message }); + }); + const shutdown = async (signal) => { + logger.info(`Received ${signal}, shutting down...`); + try { + if (consumers && consumers.length > 0) { + await Promise.all(consumers.map((c) => new Promise((resolve) => c.close(true, resolve)))); + logger.info("Kafka consumer closed", { count: consumers.length }); + } + await redisClient.quit(); + logger.info("Redis client closed"); + await dbManager.close(); + await g5DbManager.close(); + logger.info("Database connection closed"); + process.exit(0); + } catch (err) { + logger.error("Error during shutdown", { error: err?.message }); + process.exit(1); + } + }; + process.on("SIGTERM", () => shutdown("SIGTERM")); + process.on("SIGINT", () => shutdown("SIGINT")); +}; +bootstrap().catch((error) => { + logger.error("Service bootstrap failed", { error: error?.message }); + process.exit(1); +}); diff --git a/docs/template/bls-onoffline-backend/ecosystem.config.cjs b/docs/template/bls-onoffline-backend/ecosystem.config.cjs new file mode 100644 index 0000000..543a205 --- /dev/null +++ b/docs/template/bls-onoffline-backend/ecosystem.config.cjs @@ -0,0 +1,22 @@ +module.exports = { + apps: [{ + name: 'bls-onoffline', + script: 'dist/index.js', + instances: 1, + exec_mode: 'fork', + autorestart: true, + watch: false, + max_memory_restart: '1G', + env_file: '.env', + env: { + NODE_ENV: 'production', + PORT: 3001 + }, + error_file: './logs/error.log', + out_file: './logs/out.log', + log_date_format: 'YYYY-MM-DD HH:mm:ss Z', + merge_logs: true, + kill_timeout: 5000, + time: true + }] +}; diff --git a/docs/template/bls-onoffline-backend/openspec/AGENTS.md b/docs/template/bls-onoffline-backend/openspec/AGENTS.md new file mode 100644 index 0000000..96ab0bb --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/AGENTS.md @@ -0,0 +1,456 @@ +# OpenSpec Instructions + +Instructions for AI coding assistants using OpenSpec for spec-driven development. + +## TL;DR Quick Checklist + +- Search existing work: `openspec spec list --long`, `openspec list` (use `rg` only for full-text search) +- Decide scope: new capability vs modify existing capability +- Pick a unique `change-id`: kebab-case, verb-led (`add-`, `update-`, `remove-`, `refactor-`) +- Scaffold: `proposal.md`, `tasks.md`, `design.md` (only if needed), and delta specs per affected capability +- Write deltas: use `## ADDED|MODIFIED|REMOVED|RENAMED Requirements`; include at least one `#### Scenario:` per requirement +- Validate: `openspec validate [change-id] --strict` and fix issues +- Request approval: Do not start implementation until proposal is approved + +## Three-Stage Workflow + +### Stage 1: Creating Changes +Create proposal when you need to: +- Add features or functionality +- Make breaking changes (API, schema) +- Change architecture or patterns +- Optimize performance (changes behavior) +- Update security patterns + +Triggers (examples): +- "Help me create a change proposal" +- "Help me plan a change" +- "Help me create a proposal" +- "I want to create a spec proposal" +- "I want to create a spec" + +Loose matching guidance: +- Contains one of: `proposal`, `change`, `spec` +- With one of: `create`, `plan`, `make`, `start`, `help` + +Skip proposal for: +- Bug fixes (restore intended behavior) +- Typos, formatting, comments +- Dependency updates (non-breaking) +- Configuration changes +- Tests for existing behavior + +**Workflow** +1. Review `openspec/project.md`, `openspec list`, and `openspec list --specs` to understand current context. +2. Choose a unique verb-led `change-id` and scaffold `proposal.md`, `tasks.md`, optional `design.md`, and spec deltas under `openspec/changes//`. +3. Draft spec deltas using `## ADDED|MODIFIED|REMOVED Requirements` with at least one `#### Scenario:` per requirement. +4. Run `openspec validate --strict` and resolve any issues before sharing the proposal. + +### Stage 2: Implementing Changes +Track these steps as TODOs and complete them one by one. +1. **Read proposal.md** - Understand what's being built +2. **Read design.md** (if exists) - Review technical decisions +3. **Read tasks.md** - Get implementation checklist +4. **Implement tasks sequentially** - Complete in order +5. **Confirm completion** - Ensure every item in `tasks.md` is finished before updating statuses +6. **Update checklist** - After all work is done, set every task to `- [x]` so the list reflects reality +7. **Approval gate** - Do not start implementation until the proposal is reviewed and approved + +### Stage 3: Archiving Changes +After deployment, create separate PR to: +- Move `changes/[name]/` → `changes/archive/YYYY-MM-DD-[name]/` +- Update `specs/` if capabilities changed +- Use `openspec archive --skip-specs --yes` for tooling-only changes (always pass the change ID explicitly) +- Run `openspec validate --strict` to confirm the archived change passes checks + +## Before Any Task + +**Context Checklist:** +- [ ] Read relevant specs in `specs/[capability]/spec.md` +- [ ] Check pending changes in `changes/` for conflicts +- [ ] Read `openspec/project.md` for conventions +- [ ] Run `openspec list` to see active changes +- [ ] Run `openspec list --specs` to see existing capabilities + +**Before Creating Specs:** +- Always check if capability already exists +- Prefer modifying existing specs over creating duplicates +- Use `openspec show [spec]` to review current state +- If request is ambiguous, ask 1–2 clarifying questions before scaffolding + +### Search Guidance +- Enumerate specs: `openspec spec list --long` (or `--json` for scripts) +- Enumerate changes: `openspec list` (or `openspec change list --json` - deprecated but available) +- Show details: + - Spec: `openspec show --type spec` (use `--json` for filters) + - Change: `openspec show --json --deltas-only` +- Full-text search (use ripgrep): `rg -n "Requirement:|Scenario:" openspec/specs` + +## Quick Start + +### CLI Commands + +```bash +# Essential commands +openspec list # List active changes +openspec list --specs # List specifications +openspec show [item] # Display change or spec +openspec validate [item] # Validate changes or specs +openspec archive [--yes|-y] # Archive after deployment (add --yes for non-interactive runs) + +# Project management +openspec init [path] # Initialize OpenSpec +openspec update [path] # Update instruction files + +# Interactive mode +openspec show # Prompts for selection +openspec validate # Bulk validation mode + +# Debugging +openspec show [change] --json --deltas-only +openspec validate [change] --strict +``` + +### Command Flags + +- `--json` - Machine-readable output +- `--type change|spec` - Disambiguate items +- `--strict` - Comprehensive validation +- `--no-interactive` - Disable prompts +- `--skip-specs` - Archive without spec updates +- `--yes`/`-y` - Skip confirmation prompts (non-interactive archive) + +## Directory Structure + +``` +openspec/ +├── project.md # Project conventions +├── specs/ # Current truth - what IS built +│ └── [capability]/ # Single focused capability +│ ├── spec.md # Requirements and scenarios +│ └── design.md # Technical patterns +├── changes/ # Proposals - what SHOULD change +│ ├── [change-name]/ +│ │ ├── proposal.md # Why, what, impact +│ │ ├── tasks.md # Implementation checklist +│ │ ├── design.md # Technical decisions (optional; see criteria) +│ │ └── specs/ # Delta changes +│ │ └── [capability]/ +│ │ └── spec.md # ADDED/MODIFIED/REMOVED +│ └── archive/ # Completed changes +``` + +## Creating Change Proposals + +### Decision Tree + +``` +New request? +├─ Bug fix restoring spec behavior? → Fix directly +├─ Typo/format/comment? → Fix directly +├─ New feature/capability? → Create proposal +├─ Breaking change? → Create proposal +├─ Architecture change? → Create proposal +└─ Unclear? → Create proposal (safer) +``` + +### Proposal Structure + +1. **Create directory:** `changes/[change-id]/` (kebab-case, verb-led, unique) + +2. **Write proposal.md:** +```markdown +# Change: [Brief description of change] + +## Why +[1-2 sentences on problem/opportunity] + +## What Changes +- [Bullet list of changes] +- [Mark breaking changes with **BREAKING**] + +## Impact +- Affected specs: [list capabilities] +- Affected code: [key files/systems] +``` + +3. **Create spec deltas:** `specs/[capability]/spec.md` +```markdown +## ADDED Requirements +### Requirement: New Feature +The system SHALL provide... + +#### Scenario: Success case +- **WHEN** user performs action +- **THEN** expected result + +## MODIFIED Requirements +### Requirement: Existing Feature +[Complete modified requirement] + +## REMOVED Requirements +### Requirement: Old Feature +**Reason**: [Why removing] +**Migration**: [How to handle] +``` +If multiple capabilities are affected, create multiple delta files under `changes/[change-id]/specs//spec.md`—one per capability. + +4. **Create tasks.md:** +```markdown +## 1. Implementation +- [ ] 1.1 Create database schema +- [ ] 1.2 Implement API endpoint +- [ ] 1.3 Add frontend component +- [ ] 1.4 Write tests +``` + +5. **Create design.md when needed:** +Create `design.md` if any of the following apply; otherwise omit it: +- Cross-cutting change (multiple services/modules) or a new architectural pattern +- New external dependency or significant data model changes +- Security, performance, or migration complexity +- Ambiguity that benefits from technical decisions before coding + +Minimal `design.md` skeleton: +```markdown +## Context +[Background, constraints, stakeholders] + +## Goals / Non-Goals +- Goals: [...] +- Non-Goals: [...] + +## Decisions +- Decision: [What and why] +- Alternatives considered: [Options + rationale] + +## Risks / Trade-offs +- [Risk] → Mitigation + +## Migration Plan +[Steps, rollback] + +## Open Questions +- [...] +``` + +## Spec File Format + +### Critical: Scenario Formatting + +**CORRECT** (use #### headers): +```markdown +#### Scenario: User login success +- **WHEN** valid credentials provided +- **THEN** return JWT token +``` + +**WRONG** (don't use bullets or bold): +```markdown +- **Scenario: User login** ❌ +**Scenario**: User login ❌ +### Scenario: User login ❌ +``` + +Every requirement MUST have at least one scenario. + +### Requirement Wording +- Use SHALL/MUST for normative requirements (avoid should/may unless intentionally non-normative) + +### Delta Operations + +- `## ADDED Requirements` - New capabilities +- `## MODIFIED Requirements` - Changed behavior +- `## REMOVED Requirements` - Deprecated features +- `## RENAMED Requirements` - Name changes + +Headers matched with `trim(header)` - whitespace ignored. + +#### When to use ADDED vs MODIFIED +- ADDED: Introduces a new capability or sub-capability that can stand alone as a requirement. Prefer ADDED when the change is orthogonal (e.g., adding "Slash Command Configuration") rather than altering the semantics of an existing requirement. +- MODIFIED: Changes the behavior, scope, or acceptance criteria of an existing requirement. Always paste the full, updated requirement content (header + all scenarios). The archiver will replace the entire requirement with what you provide here; partial deltas will drop previous details. +- RENAMED: Use when only the name changes. If you also change behavior, use RENAMED (name) plus MODIFIED (content) referencing the new name. + +Common pitfall: Using MODIFIED to add a new concern without including the previous text. This causes loss of detail at archive time. If you aren’t explicitly changing the existing requirement, add a new requirement under ADDED instead. + +Authoring a MODIFIED requirement correctly: +1) Locate the existing requirement in `openspec/specs//spec.md`. +2) Copy the entire requirement block (from `### Requirement: ...` through its scenarios). +3) Paste it under `## MODIFIED Requirements` and edit to reflect the new behavior. +4) Ensure the header text matches exactly (whitespace-insensitive) and keep at least one `#### Scenario:`. + +Example for RENAMED: +```markdown +## RENAMED Requirements +- FROM: `### Requirement: Login` +- TO: `### Requirement: User Authentication` +``` + +## Troubleshooting + +### Common Errors + +**"Change must have at least one delta"** +- Check `changes/[name]/specs/` exists with .md files +- Verify files have operation prefixes (## ADDED Requirements) + +**"Requirement must have at least one scenario"** +- Check scenarios use `#### Scenario:` format (4 hashtags) +- Don't use bullet points or bold for scenario headers + +**Silent scenario parsing failures** +- Exact format required: `#### Scenario: Name` +- Debug with: `openspec show [change] --json --deltas-only` + +### Validation Tips + +```bash +# Always use strict mode for comprehensive checks +openspec validate [change] --strict + +# Debug delta parsing +openspec show [change] --json | jq '.deltas' + +# Check specific requirement +openspec show [spec] --json -r 1 +``` + +## Happy Path Script + +```bash +# 1) Explore current state +openspec spec list --long +openspec list +# Optional full-text search: +# rg -n "Requirement:|Scenario:" openspec/specs +# rg -n "^#|Requirement:" openspec/changes + +# 2) Choose change id and scaffold +CHANGE=add-two-factor-auth +mkdir -p openspec/changes/$CHANGE/{specs/auth} +printf "## Why\n...\n\n## What Changes\n- ...\n\n## Impact\n- ...\n" > openspec/changes/$CHANGE/proposal.md +printf "## 1. Implementation\n- [ ] 1.1 ...\n" > openspec/changes/$CHANGE/tasks.md + +# 3) Add deltas (example) +cat > openspec/changes/$CHANGE/specs/auth/spec.md << 'EOF' +## ADDED Requirements +### Requirement: Two-Factor Authentication +Users MUST provide a second factor during login. + +#### Scenario: OTP required +- **WHEN** valid credentials are provided +- **THEN** an OTP challenge is required +EOF + +# 4) Validate +openspec validate $CHANGE --strict +``` + +## Multi-Capability Example + +``` +openspec/changes/add-2fa-notify/ +├── proposal.md +├── tasks.md +└── specs/ + ├── auth/ + │ └── spec.md # ADDED: Two-Factor Authentication + └── notifications/ + └── spec.md # ADDED: OTP email notification +``` + +auth/spec.md +```markdown +## ADDED Requirements +### Requirement: Two-Factor Authentication +... +``` + +notifications/spec.md +```markdown +## ADDED Requirements +### Requirement: OTP Email Notification +... +``` + +## Best Practices + +### Simplicity First +- Default to <100 lines of new code +- Single-file implementations until proven insufficient +- Avoid frameworks without clear justification +- Choose boring, proven patterns + +### Complexity Triggers +Only add complexity with: +- Performance data showing current solution too slow +- Concrete scale requirements (>1000 users, >100MB data) +- Multiple proven use cases requiring abstraction + +### Clear References +- Use `file.ts:42` format for code locations +- Reference specs as `specs/auth/spec.md` +- Link related changes and PRs + +### Capability Naming +- Use verb-noun: `user-auth`, `payment-capture` +- Single purpose per capability +- 10-minute understandability rule +- Split if description needs "AND" + +### Change ID Naming +- Use kebab-case, short and descriptive: `add-two-factor-auth` +- Prefer verb-led prefixes: `add-`, `update-`, `remove-`, `refactor-` +- Ensure uniqueness; if taken, append `-2`, `-3`, etc. + +## Tool Selection Guide + +| Task | Tool | Why | +|------|------|-----| +| Find files by pattern | Glob | Fast pattern matching | +| Search code content | Grep | Optimized regex search | +| Read specific files | Read | Direct file access | +| Explore unknown scope | Task | Multi-step investigation | + +## Error Recovery + +### Change Conflicts +1. Run `openspec list` to see active changes +2. Check for overlapping specs +3. Coordinate with change owners +4. Consider combining proposals + +### Validation Failures +1. Run with `--strict` flag +2. Check JSON output for details +3. Verify spec file format +4. Ensure scenarios properly formatted + +### Missing Context +1. Read project.md first +2. Check related specs +3. Review recent archives +4. Ask for clarification + +## Quick Reference + +### Stage Indicators +- `changes/` - Proposed, not yet built +- `specs/` - Built and deployed +- `archive/` - Completed changes + +### File Purposes +- `proposal.md` - Why and what +- `tasks.md` - Implementation steps +- `design.md` - Technical decisions +- `spec.md` - Requirements and behavior + +### CLI Essentials +```bash +openspec list # What's in progress? +openspec show [item] # View details +openspec validate --strict # Is it correct? +openspec archive [--yes|-y] # Mark complete (add --yes for automation) +``` + +Remember: Specs are truth. Changes are proposals. Keep them in sync. diff --git a/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/proposal.md b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/proposal.md new file mode 100644 index 0000000..b01bdeb --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/proposal.md @@ -0,0 +1,17 @@ +# Change: Fix Kafka Partitioning and Schema Issues + +## Why +Production deployment revealed issues with data ingestion: +1. Kafka Topic name changed to include partition suffix. +2. Legacy data contains second-level timestamps (1970s) causing partition lookup failures in PostgreSQL (which expects ms). +3. Variable-length fields (reboot reason, status) exceeded VARCHAR(10) limits, causing crashes. + +## What Changes +- **Modified Requirement**: Update Kafka Topic to `blwlog4Nodejs-rcu-onoffline-topic-0`. +- **New Requirement**: Implement heuristic timestamp conversion (Sec -> MS) for values < 100B. +- **New Requirement**: Truncate specific fields to VARCHAR(255) to prevent DB rejection. +- **Modified Requirement**: Update DB Schema to VARCHAR(255) for robustness. + +## Impact +- Affected specs: `onoffline` +- Affected code: `src/processor/index.js`, `scripts/init_db.sql` diff --git a/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/specs/onoffline/spec.md b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/specs/onoffline/spec.md new file mode 100644 index 0000000..d5d68f5 --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/specs/onoffline/spec.md @@ -0,0 +1,25 @@ +## MODIFIED Requirements +### Requirement: 消费并落库 +系统 SHALL 从 blwlog4Nodejs-rcu-onoffline-topic-0 消费消息,并写入 log_platform.onoffline.onoffline_record。 + +#### Scenario: 非重启数据写入 +- **GIVEN** RebootReason 为空或不存在 +- **WHEN** 消息被处理 +- **THEN** current_status 等于 CurrentStatus (截断至 255 字符) + +## ADDED Requirements +### Requirement: 字段长度限制与截断 +系统 SHALL 将部分变长字段截断至数据库允许的最大长度 (VARCHAR(255)),防止写入失败。 + +#### Scenario: 超长字段处理 +- **GIVEN** LauncherVersion, CurrentStatus 或 RebootReason 超过 255 字符 +- **WHEN** 消息被处理 +- **THEN** 字段被截断为前 255 个字符并入库 + +### Requirement: 时间戳单位自动识别 +系统 SHALL 自动识别 UnixTime 字段是秒还是毫秒,并统一转换为毫秒。 + +#### Scenario: 秒级时间戳转换 +- **GIVEN** UnixTime < 100000000000 (约 1973 年前) +- **WHEN** 解析时间戳 +- **THEN** 自动乘以 1000 转换为毫秒 diff --git a/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/tasks.md b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/tasks.md new file mode 100644 index 0000000..4cd00c5 --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-fix-kafka-partition-schema/tasks.md @@ -0,0 +1,6 @@ +## 1. Implementation +- [x] Update Kafka Topic in .env and config +- [x] Implement timestamp unit detection and conversion in processor +- [x] Implement field truncation logic in processor +- [x] Update database schema definition (init_db.sql) to VARCHAR(255) +- [x] Verify data ingestion with production stream diff --git a/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/proposal.md b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/proposal.md new file mode 100644 index 0000000..1ef4611 --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/proposal.md @@ -0,0 +1,18 @@ +# Change: Optimize Kafka Consumption Performance + +## Why +User reports extremely slow Kafka consumption. Current implementation processes and inserts messages one-by-one, which creates a bottleneck at the database network round-trip time (RTT). + +## What Changes +- **New Requirement**: Implement Batch Processing for Kafka messages. +- **Refactor**: Decouple message parsing from insertion in `processor`. +- **Logic**: + - Accumulate messages in a buffer (e.g., 500ms or 500 items). + - Perform Batch Insert into PostgreSQL. + - Implement Row-by-Row fallback for batch failures (to isolate bad data). + - Handle DB connection errors with retry loop at batch level. + +## Impact +- Affected specs: `onoffline` +- Affected code: `src/index.js`, `src/processor/index.js` +- Performance: Expected 10x-100x throughput increase. diff --git a/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/specs/onoffline/spec.md b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/specs/onoffline/spec.md new file mode 100644 index 0000000..6b0b52b --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/specs/onoffline/spec.md @@ -0,0 +1,13 @@ +## ADDED Requirements +### Requirement: 批量消费与写入 +系统 SHALL 对 Kafka 消息进行缓冲,并按批次写入数据库,以提高吞吐量。 + +#### Scenario: 批量写入 +- **GIVEN** 短时间内收到多条消息 (e.g., 500条) +- **WHEN** 缓冲区满或超时 (e.g., 200ms) +- **THEN** 执行一次批量数据库插入操作 + +#### Scenario: 写入失败降级 +- **GIVEN** 批量写入因数据错误失败 (非连接错误) +- **WHEN** 捕获异常 +- **THEN** 自动降级为逐条写入,以隔离错误数据并确保有效数据入库 diff --git a/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/tasks.md b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/tasks.md new file mode 100644 index 0000000..54e3a35 --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-02-04-optimize-kafka-consumption/tasks.md @@ -0,0 +1,5 @@ +## 1. Implementation +- [ ] Refactor `src/processor/index.js` to export `parseMessageToRows` +- [ ] Implement `BatchProcessor` logic in `src/index.js` +- [ ] Update `handleMessage` to use `BatchProcessor` +- [ ] Verify performance improvement diff --git a/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/proposal.md b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/proposal.md new file mode 100644 index 0000000..67e0e57 --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/proposal.md @@ -0,0 +1,11 @@ +# Proposal: Refactor Partition Indexes + +## Goal +利用 PostgreSQL 默认的支持,改变每日分区创立时的索引策略,不再在代码中对每个分区单独创建索引。 + +## Context +当前 `PartitionManager` 在动态创建子分区后,会隐式调用查询在子分区上创建六个单列索引。由于我们使用的是 PostgreSQL 11+,且我们在初始化脚本中的主分区表 `onoffline.onoffline_record` 上已经创建了所有的索引,此主表上的索引会自动应用于所有的子分区,不需要我们在创建分区时另外手动添加。 + +## Proposed Changes +1. 在 `src/db/partitionManager.js` 中移除子分区显式创建索引的方法 `ensurePartitionIndexes` 以及针对已有子分区的循环索引检查函数 `ensureIndexesForExistingPartitions`。 +2. 在更新分区流程 `ensurePartitions` 以及 `ensurePartitionsForTimestamps` 中,移除对 `ensurePartitionIndexes` 的调用。 \ No newline at end of file diff --git a/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/specs/onoffline/spec.md b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/specs/onoffline/spec.md new file mode 100644 index 0000000..e42cc97 --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/specs/onoffline/spec.md @@ -0,0 +1,11 @@ +# Spec Delta: onoffline-backend + +## MODIFIED Requirements + +### Requirement: 数据库分区策略 +系统 SHALL 使用 Range Partitioning 按天分区,并自动维护未来 30 天的分区表,子表依赖 PostgreSQL 原生机制继承主表索引。 + +#### Scenario: 分区预创建 +- **GIVEN** 系统启动或每日凌晨 +- **WHEN** 运行分区维护任务 +- **THEN** 确保数据库中存在未来 30 天的分区表,无需对子表显式创建单列表索引 diff --git a/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/tasks.md b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/tasks.md new file mode 100644 index 0000000..b205407 --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-2026-03-03-refactor-partition-indexes/tasks.md @@ -0,0 +1,6 @@ +# Tasks: Refactor Partition Indexes + +- [x] refactor `src/db/partitionManager.js`: remove `ensurePartitionIndexes` and `ensureIndexesForExistingPartitions`. +- [x] refactor `src/db/partitionManager.js`: update `ensurePartitions` and `ensurePartitionsForTimestamps` to remove calls to `ensurePartitionIndexes`. +- [x] refactor `src/db/initializer.js` (and any other occurrences) to reflect the removal. +- [x] update openspec requirements to clarify that index propagation relies on PostgreSQL parent-table indexes. \ No newline at end of file diff --git a/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/proposal.md b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/proposal.md new file mode 100644 index 0000000..31905ce --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/proposal.md @@ -0,0 +1,14 @@ +# Change: remove runtime db provisioning + +## Why +当前服务在运行时承担了建库、建表和分区维护职责,导致服务职责边界不清晰,也会引入启动阶段 DDL 风险。现已将该能力剥离到根目录 `SQL_Script/`,需要通过 OpenSpec 正式记录为规范变更。 + +## What Changes +- 移除服务启动阶段的数据库初始化与定时分区维护要求。 +- 移除服务在写入失败时自动创建缺失分区的要求。 +- 明确数据库结构与分区维护由外部脚本(`SQL_Script/`)负责。 +- 保留服务的核心职责:Kafka 消费、解析、写库、重试与监控。 + +## Impact +- Affected specs: `openspec/specs/onoffline/spec.md` +- Affected code: `src/index.js`, `src/config/config.js`, `src/db/initializer.js`, `src/db/partitionManager.js`, `scripts/init_db.sql`, `scripts/verify_partitions.js`, `../SQL_Script/*` diff --git a/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/specs/onoffline/spec.md b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/specs/onoffline/spec.md new file mode 100644 index 0000000..4fa7887 --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/specs/onoffline/spec.md @@ -0,0 +1,32 @@ +## MODIFIED Requirements + +### Requirement: 数据库分区策略 +系统 SHALL 使用 Range Partitioning 按天分区;运行服务本身 SHALL NOT 执行建库、建表、分区创建或定时分区维护。 + +#### Scenario: 服务启动不执行 DDL +- **GIVEN** 服务进程启动 +- **WHEN** 进入 bootstrap 过程 +- **THEN** 仅初始化消费、处理、监控相关能力,不执行数据库创建、表结构初始化与分区创建 + +#### Scenario: 分区由外部脚本维护 +- **GIVEN** 需要创建数据库对象或新增未来分区 +- **WHEN** 执行外部 SQL/JS 工具 +- **THEN** 通过根目录 `SQL_Script/` 完成建库和分区维护,而不是由服务运行时自动执行 + +### Requirement: 批量消费与写入 +系统 SHALL 对 Kafka 消息进行缓冲,并按批次写入数据库,以提高吞吐量;当写入失败时,系统 SHALL 执行连接恢复重试与降级策略,但不在运行时创建数据库分区。 + +#### Scenario: 批量写入 +- **GIVEN** 短时间内收到多条消息 (e.g., 500条) +- **WHEN** 缓冲区满或超时 (e.g., 200ms) +- **THEN** 执行一次批量数据库插入操作 + +#### Scenario: 写入失败降级 +- **GIVEN** 批量写入因数据错误失败 (非连接错误) +- **WHEN** 捕获异常 +- **THEN** 自动降级为逐条写入,以隔离错误数据并确保有效数据入库 + +#### Scenario: 分区缺失错误处理 +- **GIVEN** 写入时数据库返回分区缺失错误 +- **WHEN** 服务处理该错误 +- **THEN** 服务记录错误并按既有错误处理机制处理,不在运行时执行分区创建 diff --git a/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/tasks.md b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/tasks.md new file mode 100644 index 0000000..bc99b8c --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/changes/archive/2026-03-04-remove-runtime-db-provisioning/tasks.md @@ -0,0 +1,12 @@ +## 1. Implementation +- [x] 1.1 Remove runtime DB initialization from bootstrap flow (`src/index.js`). +- [x] 1.2 Remove scheduled partition maintenance job from runtime service. +- [x] 1.3 Remove runtime missing-partition auto-fix behavior. +- [x] 1.4 Remove legacy DB provisioning modules and scripts from service project. +- [x] 1.5 Add external SQL/JS provisioning scripts under root `SQL_Script/` for DB/schema/partition management. +- [x] 1.6 Update project docs to point DB provisioning to `SQL_Script/`. + +## 2. Validation +- [x] 2.1 Run `npm run lint` in `bls-onoffline-backend`. +- [x] 2.2 Run `npm run build` in `bls-onoffline-backend`. +- [x] 2.3 Run `openspec validate remove-runtime-db-provisioning --strict`. diff --git a/docs/template/bls-onoffline-backend/openspec/project.md b/docs/template/bls-onoffline-backend/openspec/project.md new file mode 100644 index 0000000..3da5119 --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/project.md @@ -0,0 +1,31 @@ +# Project Context + +## Purpose +[Describe your project's purpose and goals] + +## Tech Stack +- [List your primary technologies] +- [e.g., TypeScript, React, Node.js] + +## Project Conventions + +### Code Style +[Describe your code style preferences, formatting rules, and naming conventions] + +### Architecture Patterns +[Document your architectural decisions and patterns] + +### Testing Strategy +[Explain your testing approach and requirements] + +### Git Workflow +[Describe your branching strategy and commit conventions] + +## Domain Context +[Add domain-specific knowledge that AI assistants need to understand] + +## Important Constraints +[List any technical, business, or regulatory constraints] + +## External Dependencies +[Document key external services, APIs, or systems] diff --git a/docs/template/bls-onoffline-backend/openspec/specs/onoffline/spec.md b/docs/template/bls-onoffline-backend/openspec/specs/onoffline/spec.md new file mode 100644 index 0000000..a3bc8b0 --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/specs/onoffline/spec.md @@ -0,0 +1,103 @@ +# Spec: onoffline-backend + +## Purpose +从 Kafka 消费设备上下线事件并按规则写入 PostgreSQL 分区表,确保高可靠性、幂等写入和错误恢复能力。 +## Requirements +### Requirement: 消费并落库 +系统 SHALL 从 blwlog4Nodejs-rcu-onoffline-topic-0 消费消息,并写入 log_platform.onoffline.onoffline_record。 + +#### Scenario: 非重启数据写入 +- **GIVEN** RebootReason 为空或不存在 +- **WHEN** 消息被处理 +- **THEN** current_status 等于 CurrentStatus (截断至 255 字符) + +### Requirement: 重启数据处理 +系统 SHALL 在 RebootReason 非空时强制 current_status 为 on。 + +#### Scenario: 重启数据写入 +- **GIVEN** RebootReason 为非空值 +- **WHEN** 消息被处理 +- **THEN** current_status 等于 on + +### Requirement: 空值保留 +系统 SHALL 保留上游空值,不对字段进行补 0。 + +#### Scenario: 空值写入 +- **GIVEN** LauncherVersion 或 RebootReason 为空字符串 +- **WHEN** 消息被处理 +- **THEN** 数据库存储值为对应的空字符串 + +### Requirement: 数据库分区策略 +系统 SHALL 使用 Range Partitioning 按天分区,并自动维护未来 30 天的分区表,子表依赖 PostgreSQL 原生机制继承主表索引。 + +#### Scenario: 分区预创建 +- **GIVEN** 系统启动或每日凌晨 +- **WHEN** 运行分区维护任务 +- **THEN** 确保数据库中存在未来 30 天的分区表,无需对子表显式创建单列表索引 + +### Requirement: 消费可靠性 (At-Least-Once) +系统 SHALL 仅在数据成功写入数据库后,才向 Kafka 提交消费位点。 + +#### Scenario: 逐条确认与顺序提交 +- **GIVEN** 并发处理多条消息 (Offset 1, 2, 3) +- **WHEN** Offset 2 先完成,Offset 1 尚未完成 +- **THEN** 系统不提交 Offset 2,直到 Offset 1 也完成,才提交 Offset 3 (即 1, 2, 3 都完成) + +### Requirement: 数据库离线保护 +系统 SHALL 在数据库连接丢失时暂停消费,防止数据堆积或丢失。 + +#### Scenario: 数据库断连 +- **GIVEN** 数据库连接失败 (ECONNREFUSED 等) +- **WHEN** 消费者尝试写入 +- **THEN** 暂停 Kafka 消费 1 分钟,并进入轮询检测模式,直到数据库恢复 + +### Requirement: 幂等写入 +系统 SHALL 处理重复消费的数据,防止主键冲突。 + +#### Scenario: 重复数据处理 +- **GIVEN** Kafka 重新投递已处理过的消息 +- **WHEN** 尝试写入数据库 +- **THEN** 使用 `ON CONFLICT DO NOTHING` 忽略冲突,视为处理成功 + +### Requirement: 性能与日志 +系统 SHALL 最小化正常运行时的日志输出。 + +#### Scenario: 正常运行日志 +- **GIVEN** 数据正常处理 +- **WHEN** 写入成功 +- **THEN** 不输出单条日志,仅每分钟输出聚合统计 (Pulled/Inserted) + +### Requirement: 字段长度限制与截断 +系统 SHALL 将部分变长字段截断至数据库允许的最大长度 (VARCHAR(255)),防止写入失败。 + +#### Scenario: 超长字段处理 +- **GIVEN** LauncherVersion, CurrentStatus 或 RebootReason 超过 255 字符 +- **WHEN** 消息被处理 +- **THEN** 字段被截断为前 255 个字符并入库 + +### Requirement: 时间戳单位自动识别 +系统 SHALL 自动识别 UnixTime 字段是秒还是毫秒,并统一转换为毫秒。 + +#### Scenario: 秒级时间戳转换 +- **GIVEN** UnixTime < 100000000000 (约 1973 年前) +- **WHEN** 解析时间戳 +- **THEN** 自动乘以 1000 转换为毫秒 + +### Requirement: 批量消费与写入 +系统 SHALL 对 Kafka 消息进行缓冲,并按批次写入数据库,以提高吞吐量;当写入失败时,系统 SHALL 执行连接恢复重试与降级策略,但不在运行时创建数据库分区。 + +#### Scenario: 批量写入 +- **GIVEN** 短时间内收到多条消息 (e.g., 500条) +- **WHEN** 缓冲区满或超时 (e.g., 200ms) +- **THEN** 执行一次批量数据库插入操作 + +#### Scenario: 写入失败降级 +- **GIVEN** 批量写入因数据错误失败 (非连接错误) +- **WHEN** 捕获异常 +- **THEN** 自动降级为逐条写入,以隔离错误数据并确保有效数据入库 + +#### Scenario: 分区缺失错误处理 +- **GIVEN** 写入时数据库返回分区缺失错误 +- **WHEN** 服务处理该错误 +- **THEN** 服务记录错误并按既有错误处理机制处理,不在运行时执行分区创建 + diff --git a/docs/template/bls-onoffline-backend/openspec/specs/onoffline/status.md b/docs/template/bls-onoffline-backend/openspec/specs/onoffline/status.md new file mode 100644 index 0000000..a80909a --- /dev/null +++ b/docs/template/bls-onoffline-backend/openspec/specs/onoffline/status.md @@ -0,0 +1,11 @@ + +## Implementation Status +- **Date**: 2026-02-04 +- **Status**: Completed +- **Notes**: + - 已完成核心消费逻辑、分区管理、数据库幂等写入。 + - 已处理数据库连接泄露 (EADDRINUSE) 问题,增加了离线保护机制。 + - 已修复时间戳单位问题 (Seconds -> MS)。 + - 已将关键字段长度扩展至 VARCHAR(255) 并增加了代码层截断保护。 + - 验证了数据积压消费能力。 + - 本阶段开发任务已归档。 diff --git a/docs/template/bls-onoffline-backend/package-lock.json b/docs/template/bls-onoffline-backend/package-lock.json new file mode 100644 index 0000000..5288e54 --- /dev/null +++ b/docs/template/bls-onoffline-backend/package-lock.json @@ -0,0 +1,3526 @@ +{ + "name": "bls-onoffline-backend", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "bls-onoffline-backend", + "version": "1.0.0", + "dependencies": { + "dotenv": "^16.4.5", + "kafka-node": "^5.0.0", + "node-cron": "^4.2.1", + "pg": "^8.11.5", + "redis": "^4.6.13", + "zod": "^4.3.6" + }, + "devDependencies": { + "vite": "^5.4.0", + "vitest": "^4.0.18" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.2.tgz", + "integrity": "sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.2.tgz", + "integrity": "sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.2.tgz", + "integrity": "sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmmirror.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@redis/bloom": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/@redis/bloom/-/bloom-1.2.0.tgz", + "integrity": "sha512-HG2DFjYKbpNmVXsa0keLHp/3leGJz1mjh09f2RLGGLQZzSHpkmZWuwJbAvo3QcRY8p80m5+ZdXZdYOSBLlp7Cg==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/client": { + "version": "1.6.1", + "resolved": "https://registry.npmmirror.com/@redis/client/-/client-1.6.1.tgz", + "integrity": "sha512-/KCsg3xSlR+nCK8/8ZYSknYxvXHwubJrU82F3Lm1Fp6789VQ0/3RJKfsmRXjqfaTA++23CvC3hqmqe/2GEt6Kw==", + "license": "MIT", + "peer": true, + "dependencies": { + "cluster-key-slot": "1.1.2", + "generic-pool": "3.9.0", + "yallist": "4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@redis/graph": { + "version": "1.1.1", + "resolved": "https://registry.npmmirror.com/@redis/graph/-/graph-1.1.1.tgz", + "integrity": "sha512-FEMTcTHZozZciLRl6GiiIB4zGm5z5F3F6a6FZCyrfxdKOhFlGkiAqlexWMBzCi4DcRoyiOsuLfW+cjlGWyExOw==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/json": { + "version": "1.0.7", + "resolved": "https://registry.npmmirror.com/@redis/json/-/json-1.0.7.tgz", + "integrity": "sha512-6UyXfjVaTBTJtKNG4/9Z8PSpKE6XgSyEb8iwaqDcy+uKrd/DGYHTWkUdnQDyzm727V7p21WUMhsqz5oy65kPcQ==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/search": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/@redis/search/-/search-1.2.0.tgz", + "integrity": "sha512-tYoDBbtqOVigEDMAcTGsRlMycIIjwMCgD8eR2t0NANeQmgK/lvxNAvYyb6bZDD4frHRhIHkJu2TBRvB0ERkOmw==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/time-series": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/@redis/time-series/-/time-series-1.1.0.tgz", + "integrity": "sha512-c1Q99M5ljsIuc4YdaCwfUEXsofakb9c8+Zse2qxTadu8TalLXuAESzLvFAvNVbkmSlvlzIQOLpBCmWI9wTOt+g==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.57.0.tgz", + "integrity": "sha512-tPgXB6cDTndIe1ah7u6amCI1T0SsnlOuKgg10Xh3uizJk4e5M1JGaUMk7J4ciuAUcFpbOiNhm2XIjP9ON0dUqA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.57.0.tgz", + "integrity": "sha512-sa4LyseLLXr1onr97StkU1Nb7fWcg6niokTwEVNOO7awaKaoRObQ54+V/hrF/BP1noMEaaAW6Fg2d/CfLiq3Mg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.57.0.tgz", + "integrity": "sha512-/NNIj9A7yLjKdmkx5dC2XQ9DmjIECpGpwHoGmA5E1AhU0fuICSqSWScPhN1yLCkEdkCwJIDu2xIeLPs60MNIVg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.57.0.tgz", + "integrity": "sha512-xoh8abqgPrPYPr7pTYipqnUi1V3em56JzE/HgDgitTqZBZ3yKCWI+7KUkceM6tNweyUKYru1UMi7FC060RyKwA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.57.0.tgz", + "integrity": "sha512-PCkMh7fNahWSbA0OTUQ2OpYHpjZZr0hPr8lId8twD7a7SeWrvT3xJVyza+dQwXSSq4yEQTMoXgNOfMCsn8584g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.57.0.tgz", + "integrity": "sha512-1j3stGx+qbhXql4OCDZhnK7b01s6rBKNybfsX+TNrEe9JNq4DLi1yGiR1xW+nL+FNVvI4D02PUnl6gJ/2y6WJA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.57.0.tgz", + "integrity": "sha512-eyrr5W08Ms9uM0mLcKfM/Uzx7hjhz2bcjv8P2uynfj0yU8GGPdz8iYrBPhiLOZqahoAMB8ZiolRZPbbU2MAi6Q==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.57.0.tgz", + "integrity": "sha512-Xds90ITXJCNyX9pDhqf85MKWUI4lqjiPAipJ8OLp8xqI2Ehk+TCVhF9rvOoN8xTbcafow3QOThkNnrM33uCFQA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.57.0.tgz", + "integrity": "sha512-Xws2KA4CLvZmXjy46SQaXSejuKPhwVdaNinldoYfqruZBaJHqVo6hnRa8SDo9z7PBW5x84SH64+izmldCgbezw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.57.0.tgz", + "integrity": "sha512-hrKXKbX5FdaRJj7lTMusmvKbhMJSGWJ+w++4KmjiDhpTgNlhYobMvKfDoIWecy4O60K6yA4SnztGuNTQF+Lplw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.57.0.tgz", + "integrity": "sha512-6A+nccfSDGKsPm00d3xKcrsBcbqzCTAukjwWK6rbuAnB2bHaL3r9720HBVZ/no7+FhZLz/U3GwwZZEh6tOSI8Q==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.57.0.tgz", + "integrity": "sha512-4P1VyYUe6XAJtQH1Hh99THxr0GKMMwIXsRNOceLrJnaHTDgk1FTcTimDgneRJPvB3LqDQxUmroBclQ1S0cIJwQ==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.57.0.tgz", + "integrity": "sha512-8Vv6pLuIZCMcgXre6c3nOPhE0gjz1+nZP6T+hwWjr7sVH8k0jRkH+XnfjjOTglyMBdSKBPPz54/y1gToSKwrSQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.57.0.tgz", + "integrity": "sha512-r1te1M0Sm2TBVD/RxBPC6RZVwNqUTwJTA7w+C/IW5v9Ssu6xmxWEi+iJQlpBhtUiT1raJ5b48pI8tBvEjEFnFA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.57.0.tgz", + "integrity": "sha512-say0uMU/RaPm3CDQLxUUTF2oNWL8ysvHkAjcCzV2znxBr23kFfaxocS9qJm+NdkRhF8wtdEEAJuYcLPhSPbjuQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.57.0.tgz", + "integrity": "sha512-/MU7/HizQGsnBREtRpcSbSV1zfkoxSTR7wLsRmBPQ8FwUj5sykrP1MyJTvsxP5KBq9SyE6kH8UQQQwa0ASeoQQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.57.0.tgz", + "integrity": "sha512-Q9eh+gUGILIHEaJf66aF6a414jQbDnn29zeu0eX3dHMuysnhTvsUvZTCAyZ6tJhUjnvzBKE4FtuaYxutxRZpOg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.57.0.tgz", + "integrity": "sha512-OR5p5yG5OKSxHReWmwvM0P+VTPMwoBS45PXTMYaskKQqybkS3Kmugq1W+YbNWArF8/s7jQScgzXUhArzEQ7x0A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.57.0.tgz", + "integrity": "sha512-XeatKzo4lHDsVEbm1XDHZlhYZZSQYym6dg2X/Ko0kSFgio+KXLsxwJQprnR48GvdIKDOpqWqssC3iBCjoMcMpw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.57.0.tgz", + "integrity": "sha512-Lu71y78F5qOfYmubYLHPcJm74GZLU6UJ4THkf/a1K7Tz2ycwC2VUbsqbJAXaR6Bx70SRdlVrt2+n5l7F0agTUw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.57.0.tgz", + "integrity": "sha512-v5xwKDWcu7qhAEcsUubiav7r+48Uk/ENWdr82MBZZRIm7zThSxCIVDfb3ZeRRq9yqk+oIzMdDo6fCcA5DHfMyA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.57.0.tgz", + "integrity": "sha512-XnaaaSMGSI6Wk8F4KK3QP7GfuuhjGchElsVerCplUuxRIzdvZ7hRBpLR0omCmw+kI2RFJB80nenhOoGXlJ5TfQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.57.0.tgz", + "integrity": "sha512-3K1lP+3BXY4t4VihLw5MEg6IZD3ojSYzqzBG571W3kNQe4G4CcFpSUQVgurYgib5d+YaCjeFow8QivWp8vuSvA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.57.0.tgz", + "integrity": "sha512-MDk610P/vJGc5L5ImE4k5s+GZT3en0KoK1MKPXCRgzmksAMk79j4h3k1IerxTNqwDLxsGxStEZVBqG0gIqZqoA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.57.0.tgz", + "integrity": "sha512-Zv7v6q6aV+VslnpwzqKAmrk5JdVkLUzok2208ZXGipjb+msxBr/fJPZyeEXiFgH7k62Ak0SLIfxQRZQvTuf7rQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@standard-schema/spec": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/chai": { + "version": "5.2.3", + "resolved": "https://registry.npmmirror.com/@types/chai/-/chai-5.2.3.tgz", + "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*", + "assertion-error": "^2.0.1" + } + }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmmirror.com/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmmirror.com/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@vitest/expect": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/@vitest/expect/-/expect-4.0.18.tgz", + "integrity": "sha512-8sCWUyckXXYvx4opfzVY03EOiYVxyNrHS5QxX3DAIi5dpJAAkyJezHCP77VMX4HKA2LDT/Jpfo8i2r5BE3GnQQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@standard-schema/spec": "^1.0.0", + "@types/chai": "^5.2.2", + "@vitest/spy": "4.0.18", + "@vitest/utils": "4.0.18", + "chai": "^6.2.1", + "tinyrainbow": "^3.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/pretty-format": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/@vitest/pretty-format/-/pretty-format-4.0.18.tgz", + "integrity": "sha512-P24GK3GulZWC5tz87ux0m8OADrQIUVDPIjjj65vBXYG17ZeU3qD7r+MNZ1RNv4l8CGU2vtTRqixrOi9fYk/yKw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^3.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/@vitest/runner/-/runner-4.0.18.tgz", + "integrity": "sha512-rpk9y12PGa22Jg6g5M3UVVnTS7+zycIGk9ZNGN+m6tZHKQb7jrP7/77WfZy13Y/EUDd52NDsLRQhYKtv7XfPQw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/utils": "4.0.18", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/snapshot": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/@vitest/snapshot/-/snapshot-4.0.18.tgz", + "integrity": "sha512-PCiV0rcl7jKQjbgYqjtakly6T1uwv/5BQ9SwBLekVg/EaYeQFPiXcgrC2Y7vDMA8dM1SUEAEV82kgSQIlXNMvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "4.0.18", + "magic-string": "^0.30.21", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/@vitest/spy/-/spy-4.0.18.tgz", + "integrity": "sha512-cbQt3PTSD7P2OARdVW3qWER5EGq7PHlvE+QfzSC0lbwO+xnt7+XH06ZzFjFRgzUX//JmpxrCu92VdwvEPlWSNw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/@vitest/utils/-/utils-4.0.18.tgz", + "integrity": "sha512-msMRKLMVLWygpK3u2Hybgi4MNjcYJvwTb0Ru09+fOyCXIgT5raYP041DRRdiJiI3k/2U6SEbAETB3YtBrUkCFA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "4.0.18", + "tinyrainbow": "^3.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmmirror.com/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", + "license": "ISC", + "optional": true + }, + "node_modules/are-we-there-yet": { + "version": "1.1.7", + "resolved": "https://registry.npmmirror.com/are-we-there-yet/-/are-we-there-yet-1.1.7.tgz", + "integrity": "sha512-nxwy40TuMiUGqMyRHgCSWZ9FM4VAoRP4xUYSTv5ImRog+h9yISPbVH7H8fASCIzYn9wlEv4zvFL7uKDMCFQm3g==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "optional": true, + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^2.0.6" + } + }, + "node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/async": { + "version": "2.6.4", + "resolved": "https://registry.npmmirror.com/async/-/async-2.6.4.tgz", + "integrity": "sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==", + "license": "MIT", + "dependencies": { + "lodash": "^4.17.14" + } + }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmmirror.com/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" + }, + "node_modules/binary": { + "version": "0.3.0", + "resolved": "https://registry.npmmirror.com/binary/-/binary-0.3.0.tgz", + "integrity": "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg==", + "license": "MIT", + "dependencies": { + "buffers": "~0.1.1", + "chainsaw": "~0.1.0" + }, + "engines": { + "node": "*" + } + }, + "node_modules/bindings": { + "version": "1.5.0", + "resolved": "https://registry.npmmirror.com/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "file-uri-to-path": "1.0.0" + } + }, + "node_modules/bl": { + "version": "2.2.1", + "resolved": "https://registry.npmmirror.com/bl/-/bl-2.2.1.tgz", + "integrity": "sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g==", + "license": "MIT", + "dependencies": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/buffer-alloc": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz", + "integrity": "sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow==", + "license": "MIT", + "optional": true, + "dependencies": { + "buffer-alloc-unsafe": "^1.1.0", + "buffer-fill": "^1.0.0" + } + }, + "node_modules/buffer-alloc-unsafe": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz", + "integrity": "sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg==", + "license": "MIT", + "optional": true + }, + "node_modules/buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmmirror.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/buffer-fill": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/buffer-fill/-/buffer-fill-1.0.0.tgz", + "integrity": "sha512-T7zexNBwiiaCOGDg9xNX9PBmjrubblRkENuptryuI64URkXDFum9il/JGL8Lm8wYfAXpredVXXZz7eMHilimiQ==", + "license": "MIT", + "optional": true + }, + "node_modules/buffermaker": { + "version": "1.2.1", + "resolved": "https://registry.npmmirror.com/buffermaker/-/buffermaker-1.2.1.tgz", + "integrity": "sha512-IdnyU2jDHU65U63JuVQNTHiWjPRH0CS3aYd/WPaEwyX84rFdukhOduAVb1jwUScmb5X0JWPw8NZOrhoLMiyAHQ==", + "license": "MIT", + "dependencies": { + "long": "1.1.2" + } + }, + "node_modules/buffers": { + "version": "0.1.1", + "resolved": "https://registry.npmmirror.com/buffers/-/buffers-0.1.1.tgz", + "integrity": "sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ==", + "engines": { + "node": ">=0.2.0" + } + }, + "node_modules/call-bind": { + "version": "1.0.8", + "resolved": "https://registry.npmmirror.com/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", + "license": "MIT", + "optional": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmmirror.com/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "optional": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/chai": { + "version": "6.2.2", + "resolved": "https://registry.npmmirror.com/chai/-/chai-6.2.2.tgz", + "integrity": "sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/chainsaw": { + "version": "0.1.0", + "resolved": "https://registry.npmmirror.com/chainsaw/-/chainsaw-0.1.0.tgz", + "integrity": "sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ==", + "license": "MIT/X11", + "dependencies": { + "traverse": ">=0.3.0 <0.4" + }, + "engines": { + "node": "*" + } + }, + "node_modules/chownr": { + "version": "1.1.4", + "resolved": "https://registry.npmmirror.com/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", + "license": "ISC", + "optional": true + }, + "node_modules/cluster-key-slot": { + "version": "1.1.2", + "resolved": "https://registry.npmmirror.com/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz", + "integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==", + "license": "Apache-2.0", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha512-RpAVKQA5T63xEj6/giIbUEtZwJ4UFIc3ZtvEkiaUERylqe8xb5IvqcgOurZLahv93CLKfxcw5YI+DZcUBRyLXA==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmmirror.com/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "license": "MIT" + }, + "node_modules/console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", + "license": "ISC", + "optional": true + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "license": "MIT" + }, + "node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmmirror.com/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/decompress-response": { + "version": "3.3.0", + "resolved": "https://registry.npmmirror.com/decompress-response/-/decompress-response-3.3.0.tgz", + "integrity": "sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA==", + "license": "MIT", + "optional": true, + "dependencies": { + "mimic-response": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmmirror.com/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmmirror.com/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "license": "MIT", + "optional": true, + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", + "license": "MIT", + "optional": true + }, + "node_modules/denque": { + "version": "1.5.1", + "resolved": "https://registry.npmmirror.com/denque/-/denque-1.5.1.tgz", + "integrity": "sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw==", + "license": "Apache-2.0", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/detect-libc": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/detect-libc/-/detect-libc-1.0.3.tgz", + "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==", + "license": "Apache-2.0", + "optional": true, + "bin": { + "detect-libc": "bin/detect-libc.js" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmmirror.com/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "optional": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "resolved": "https://registry.npmmirror.com/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "license": "MIT", + "optional": true, + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmmirror.com/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-module-lexer": { + "version": "1.7.0", + "resolved": "https://registry.npmmirror.com/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", + "dev": true, + "license": "MIT" + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmmirror.com/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "optional": true, + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmmirror.com/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmmirror.com/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/expand-template": { + "version": "2.0.3", + "resolved": "https://registry.npmmirror.com/expand-template/-/expand-template-2.0.3.tgz", + "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==", + "license": "(MIT OR WTFPL)", + "optional": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/expect-type": { + "version": "1.3.0", + "resolved": "https://registry.npmmirror.com/expect-type/-/expect-type-1.3.0.tgz", + "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmmirror.com/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", + "license": "MIT", + "optional": true + }, + "node_modules/for-each": { + "version": "0.3.5", + "resolved": "https://registry.npmmirror.com/for-each/-/for-each-0.3.5.tgz", + "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", + "license": "MIT", + "optional": true, + "dependencies": { + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", + "license": "MIT", + "optional": true + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmmirror.com/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmmirror.com/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "optional": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gauge": { + "version": "2.7.4", + "resolved": "https://registry.npmmirror.com/gauge/-/gauge-2.7.4.tgz", + "integrity": "sha512-14x4kjc6lkD3ltw589k0NrPD6cCNTD6CWoVUNpB85+DrtONoZn+Rug6xZU5RvSC4+TZPxA5AnBibQYAvZn41Hg==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "optional": true, + "dependencies": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + } + }, + "node_modules/generic-pool": { + "version": "3.9.0", + "resolved": "https://registry.npmmirror.com/generic-pool/-/generic-pool-3.9.0.tgz", + "integrity": "sha512-hymDOu5B53XvN4QT9dBmZxPX4CWhBPPLguTZ9MMFeFa/Kg0xWVfylOVNlJji/E7yTZWFd/q9GO5TxDLq156D7g==", + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmmirror.com/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "optional": true, + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/github-from-package": { + "version": "0.0.0", + "resolved": "https://registry.npmmirror.com/github-from-package/-/github-from-package-0.0.0.tgz", + "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==", + "license": "MIT", + "optional": true + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "license": "MIT", + "optional": true, + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "optional": true, + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", + "license": "ISC", + "optional": true + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmmirror.com/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmmirror.com/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmmirror.com/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "license": "ISC", + "optional": true + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmmirror.com/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha512-1pqUqRjkhPJ9miNq9SwMfdvi6lBJcd6eFxvfaivQhaH3SgisfiuudvFntdKOmxuee/77l+FPjKrQjWvmPjWrRw==", + "license": "MIT", + "optional": true, + "dependencies": { + "number-is-nan": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmmirror.com/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmmirror.com/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "license": "MIT" + }, + "node_modules/kafka-node": { + "version": "5.0.0", + "resolved": "https://registry.npmmirror.com/kafka-node/-/kafka-node-5.0.0.tgz", + "integrity": "sha512-dD2ga5gLcQhsq1yNoQdy1MU4x4z7YnXM5bcG9SdQuiNr5KKuAmXixH1Mggwdah5o7EfholFbcNDPSVA6BIfaug==", + "license": "MIT", + "dependencies": { + "async": "^2.6.2", + "binary": "~0.3.0", + "bl": "^2.2.0", + "buffer-crc32": "~0.2.5", + "buffermaker": "~1.2.0", + "debug": "^2.1.3", + "denque": "^1.3.0", + "lodash": "^4.17.4", + "minimatch": "^3.0.2", + "nested-error-stacks": "^2.0.0", + "optional": "^0.1.3", + "retry": "^0.10.1", + "uuid": "^3.0.0" + }, + "engines": { + "node": ">=8.5.1" + }, + "optionalDependencies": { + "snappy": "^6.0.1" + } + }, + "node_modules/lodash": { + "version": "4.17.23", + "resolved": "https://registry.npmmirror.com/lodash/-/lodash-4.17.23.tgz", + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", + "license": "MIT" + }, + "node_modules/long": { + "version": "1.1.2", + "resolved": "https://registry.npmmirror.com/long/-/long-1.1.2.tgz", + "integrity": "sha512-pjR3OP1X2VVQhCQlrq3s8UxugQsuoucwMOn9Yj/kN/61HMc+lDFJS5bvpNEHneZ9NVaSm8gNWxZvtGS7lqHb3Q==", + "license": "Apache-2.0", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmmirror.com/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mimic-response": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmmirror.com/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmmirror.com/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "license": "MIT", + "optional": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmmirror.com/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "license": "MIT", + "optional": true, + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/nan": { + "version": "2.25.0", + "resolved": "https://registry.npmmirror.com/nan/-/nan-2.25.0.tgz", + "integrity": "sha512-0M90Ag7Xn5KMLLZ7zliPWP3rT90P6PN+IzVFS0VqmnPktBk3700xUVv8Ikm9EUaUE5SDWdp/BIxdENzVznpm1g==", + "license": "MIT", + "optional": true + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmmirror.com/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/napi-build-utils": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/napi-build-utils/-/napi-build-utils-1.0.2.tgz", + "integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==", + "license": "MIT", + "optional": true + }, + "node_modules/nested-error-stacks": { + "version": "2.1.1", + "resolved": "https://registry.npmmirror.com/nested-error-stacks/-/nested-error-stacks-2.1.1.tgz", + "integrity": "sha512-9iN1ka/9zmX1ZvLV9ewJYEk9h7RyRRtqdK0woXcqohu8EWIerfPUjYJPg0ULy0UqP7cslmdGc8xKDJcojlKiaw==", + "license": "MIT" + }, + "node_modules/node-abi": { + "version": "2.30.1", + "resolved": "https://registry.npmmirror.com/node-abi/-/node-abi-2.30.1.tgz", + "integrity": "sha512-/2D0wOQPgaUWzVSVgRMx+trKJRC2UG4SUc4oCJoXx9Uxjtp0Vy3/kt7zcbxHF8+Z/pK3UloLWzBISg72brfy1w==", + "license": "MIT", + "optional": true, + "dependencies": { + "semver": "^5.4.1" + } + }, + "node_modules/node-cron": { + "version": "4.2.1", + "resolved": "https://registry.npmmirror.com/node-cron/-/node-cron-4.2.1.tgz", + "integrity": "sha512-lgimEHPE/QDgFlywTd8yTR61ptugX3Qer29efeyWw2rv259HtGBNn1vZVmp8lB9uo9wC0t/AT4iGqXxia+CJFg==", + "license": "ISC", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/noop-logger": { + "version": "0.1.1", + "resolved": "https://registry.npmmirror.com/noop-logger/-/noop-logger-0.1.1.tgz", + "integrity": "sha512-6kM8CLXvuW5crTxsAtva2YLrRrDaiTIkIePWs9moLHqbFWT94WpNFjwS/5dfLfECg5i/lkmw3aoqVidxt23TEQ==", + "license": "MIT", + "optional": true + }, + "node_modules/npmlog": { + "version": "4.1.2", + "resolved": "https://registry.npmmirror.com/npmlog/-/npmlog-4.1.2.tgz", + "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "optional": true, + "dependencies": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + } + }, + "node_modules/number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha512-4jbtZXNAsfZbAHiiqjLPBiCl16dES1zI4Hpzzxw61Tk+loF+sBDBKx1ICKKKwIqQ7M0mFn1TmkN7euSncWgHiQ==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmmirror.com/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/obug": { + "version": "2.1.1", + "resolved": "https://registry.npmmirror.com/obug/-/obug-2.1.1.tgz", + "integrity": "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/sxzz", + "https://opencollective.com/debug" + ], + "license": "MIT" + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmmirror.com/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "optional": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/optional": { + "version": "0.1.4", + "resolved": "https://registry.npmmirror.com/optional/-/optional-0.1.4.tgz", + "integrity": "sha512-gtvrrCfkE08wKcgXaVwQVgwEQ8vel2dc5DDBn9RLQZ3YtmtkBss6A2HY6BnJH4N/4Ku97Ri/SF8sNWE2225WJw==", + "license": "MIT" + }, + "node_modules/os-homedir": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/os-homedir/-/os-homedir-1.0.2.tgz", + "integrity": "sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmmirror.com/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/pg": { + "version": "8.17.2", + "resolved": "https://registry.npmmirror.com/pg/-/pg-8.17.2.tgz", + "integrity": "sha512-vjbKdiBJRqzcYw1fNU5KuHyYvdJ1qpcQg1CeBrHFqV1pWgHeVR6j/+kX0E1AAXfyuLUGY1ICrN2ELKA/z2HWzw==", + "license": "MIT", + "peer": true, + "dependencies": { + "pg-connection-string": "^2.10.1", + "pg-pool": "^3.11.0", + "pg-protocol": "^1.11.0", + "pg-types": "2.2.0", + "pgpass": "1.0.5" + }, + "engines": { + "node": ">= 16.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.3.0" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.3.0", + "resolved": "https://registry.npmmirror.com/pg-cloudflare/-/pg-cloudflare-1.3.0.tgz", + "integrity": "sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ==", + "license": "MIT", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.10.1", + "resolved": "https://registry.npmmirror.com/pg-connection-string/-/pg-connection-string-2.10.1.tgz", + "integrity": "sha512-iNzslsoeSH2/gmDDKiyMqF64DATUCWj3YJ0wP14kqcsf2TUklwimd+66yYojKwZCA7h2yRNLGug71hCBA2a4sw==", + "license": "MIT" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "license": "ISC", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.11.0", + "resolved": "https://registry.npmmirror.com/pg-pool/-/pg-pool-3.11.0.tgz", + "integrity": "sha512-MJYfvHwtGp870aeusDh+hg9apvOe2zmpZJpyt+BMtzUWlVqbhFmMK6bOBXLBUPd7iRtIF9fZplDc7KrPN3PN7w==", + "license": "MIT", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.11.0", + "resolved": "https://registry.npmmirror.com/pg-protocol/-/pg-protocol-1.11.0.tgz", + "integrity": "sha512-pfsxk2M9M3BuGgDOfuy37VNRRX3jmKgMjcvAcWqNDpZSf4cUmv8HSOl5ViRQFsfARFn0KuUQTgLxVMbNq5NW3g==", + "license": "MIT" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmmirror.com/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "license": "MIT", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmmirror.com/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "license": "MIT", + "dependencies": { + "split2": "^4.1.0" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmmirror.com/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmmirror.com/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/possible-typed-array-names": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", + "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmmirror.com/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/postgres-bytea/-/postgres-bytea-1.0.1.tgz", + "integrity": "sha512-5+5HqXnsZPE65IJZSMkZtURARZelel2oXUEO8rH83VS/hxH5vv1uHquPg5wZs8yMAfdv971IU+kcPUczi7NVBQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmmirror.com/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmmirror.com/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "license": "MIT", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/prebuild-install": { + "version": "5.3.0", + "resolved": "https://registry.npmmirror.com/prebuild-install/-/prebuild-install-5.3.0.tgz", + "integrity": "sha512-aaLVANlj4HgZweKttFNUVNRxDukytuIuxeK2boIMHjagNJCiVKWFsKF4tCE3ql3GbrD2tExPQ7/pwtEJcHNZeg==", + "license": "MIT", + "optional": true, + "dependencies": { + "detect-libc": "^1.0.3", + "expand-template": "^2.0.3", + "github-from-package": "0.0.0", + "minimist": "^1.2.0", + "mkdirp": "^0.5.1", + "napi-build-utils": "^1.0.1", + "node-abi": "^2.7.0", + "noop-logger": "^0.1.1", + "npmlog": "^4.0.1", + "os-homedir": "^1.0.1", + "pump": "^2.0.1", + "rc": "^1.2.7", + "simple-get": "^2.7.0", + "tar-fs": "^1.13.0", + "tunnel-agent": "^0.6.0", + "which-pm-runs": "^1.0.0" + }, + "bin": { + "prebuild-install": "bin.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "license": "MIT" + }, + "node_modules/pump": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/pump/-/pump-2.0.1.tgz", + "integrity": "sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==", + "license": "MIT", + "optional": true, + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/rc": { + "version": "1.2.8", + "resolved": "https://registry.npmmirror.com/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "license": "(BSD-2-Clause OR MIT OR Apache-2.0)", + "optional": true, + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "bin": { + "rc": "cli.js" + } + }, + "node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmmirror.com/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/readable-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmmirror.com/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "license": "MIT" + }, + "node_modules/redis": { + "version": "4.7.1", + "resolved": "https://registry.npmmirror.com/redis/-/redis-4.7.1.tgz", + "integrity": "sha512-S1bJDnqLftzHXHP8JsT5II/CtHWQrASX5K96REjWjlmWKrviSOLWmM7QnRLstAWsu1VBBV1ffV6DzCvxNP0UJQ==", + "license": "MIT", + "workspaces": [ + "./packages/*" + ], + "dependencies": { + "@redis/bloom": "1.2.0", + "@redis/client": "1.6.1", + "@redis/graph": "1.1.1", + "@redis/json": "1.0.7", + "@redis/search": "1.2.0", + "@redis/time-series": "1.1.0" + } + }, + "node_modules/retry": { + "version": "0.10.1", + "resolved": "https://registry.npmmirror.com/retry/-/retry-0.10.1.tgz", + "integrity": "sha512-ZXUSQYTHdl3uS7IuCehYfMzKyIDBNoAuUblvy5oGO5UJSUTmStUUVPXbA9Qxd173Bgre53yCQczQuHgRWAdvJQ==", + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/rollup": { + "version": "4.57.0", + "resolved": "https://registry.npmmirror.com/rollup/-/rollup-4.57.0.tgz", + "integrity": "sha512-e5lPJi/aui4TO1LpAXIRLySmwXSE8k3b9zoGfd42p67wzxog4WHjiZF3M2uheQih4DGyc25QEV4yRBbpueNiUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.57.0", + "@rollup/rollup-android-arm64": "4.57.0", + "@rollup/rollup-darwin-arm64": "4.57.0", + "@rollup/rollup-darwin-x64": "4.57.0", + "@rollup/rollup-freebsd-arm64": "4.57.0", + "@rollup/rollup-freebsd-x64": "4.57.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.57.0", + "@rollup/rollup-linux-arm-musleabihf": "4.57.0", + "@rollup/rollup-linux-arm64-gnu": "4.57.0", + "@rollup/rollup-linux-arm64-musl": "4.57.0", + "@rollup/rollup-linux-loong64-gnu": "4.57.0", + "@rollup/rollup-linux-loong64-musl": "4.57.0", + "@rollup/rollup-linux-ppc64-gnu": "4.57.0", + "@rollup/rollup-linux-ppc64-musl": "4.57.0", + "@rollup/rollup-linux-riscv64-gnu": "4.57.0", + "@rollup/rollup-linux-riscv64-musl": "4.57.0", + "@rollup/rollup-linux-s390x-gnu": "4.57.0", + "@rollup/rollup-linux-x64-gnu": "4.57.0", + "@rollup/rollup-linux-x64-musl": "4.57.0", + "@rollup/rollup-openbsd-x64": "4.57.0", + "@rollup/rollup-openharmony-arm64": "4.57.0", + "@rollup/rollup-win32-arm64-msvc": "4.57.0", + "@rollup/rollup-win32-ia32-msvc": "4.57.0", + "@rollup/rollup-win32-x64-gnu": "4.57.0", + "@rollup/rollup-win32-x64-msvc": "4.57.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmmirror.com/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmmirror.com/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "license": "ISC", + "optional": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", + "license": "ISC", + "optional": true + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmmirror.com/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "license": "MIT", + "optional": true, + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/siginfo": { + "version": "2.0.0", + "resolved": "https://registry.npmmirror.com/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", + "dev": true, + "license": "ISC" + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmmirror.com/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "license": "ISC", + "optional": true + }, + "node_modules/simple-concat": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/simple-concat/-/simple-concat-1.0.1.tgz", + "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "optional": true + }, + "node_modules/simple-get": { + "version": "2.8.2", + "resolved": "https://registry.npmmirror.com/simple-get/-/simple-get-2.8.2.tgz", + "integrity": "sha512-Ijd/rV5o+mSBBs4F/x9oDPtTx9Zb6X9brmnXvMW4J7IR15ngi9q5xxqWBKU744jTZiaXtxaPL7uHG6vtN8kUkw==", + "license": "MIT", + "optional": true, + "dependencies": { + "decompress-response": "^3.3.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, + "node_modules/snappy": { + "version": "6.3.5", + "resolved": "https://registry.npmmirror.com/snappy/-/snappy-6.3.5.tgz", + "integrity": "sha512-lonrUtdp1b1uDn1dbwgQbBsb5BbaiLeKq+AGwOk2No+en+VvJThwmtztwulEQsLinRF681pBqib0NUZaizKLIA==", + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "dependencies": { + "bindings": "^1.3.1", + "nan": "^2.14.1", + "prebuild-install": "5.3.0" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmmirror.com/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmmirror.com/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/stackback": { + "version": "0.0.2", + "resolved": "https://registry.npmmirror.com/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", + "dev": true, + "license": "MIT" + }, + "node_modules/std-env": { + "version": "3.10.0", + "resolved": "https://registry.npmmirror.com/std-env/-/std-env-3.10.0.tgz", + "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==", + "dev": true, + "license": "MIT" + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmmirror.com/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmmirror.com/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "license": "MIT" + }, + "node_modules/string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha512-0XsVpQLnVCXHJfyEs8tC0zpTVIr5PKKsQtkT29IwupnPTjtPmQ3xT/4yCREF9hYkV/3M3kzcUTSAZT6a6h81tw==", + "license": "MIT", + "optional": true, + "dependencies": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmmirror.com/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==", + "license": "MIT", + "optional": true, + "dependencies": { + "ansi-regex": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/tar-fs": { + "version": "1.16.6", + "resolved": "https://registry.npmmirror.com/tar-fs/-/tar-fs-1.16.6.tgz", + "integrity": "sha512-JkOgFt3FxM/2v2CNpAVHqMW2QASjc/Hxo7IGfNd3MHaDYSW/sBFiS7YVmmhmr8x6vwN1VFQDQGdT2MWpmIuVKA==", + "license": "MIT", + "optional": true, + "dependencies": { + "chownr": "^1.0.1", + "mkdirp": "^0.5.1", + "pump": "^1.0.0", + "tar-stream": "^1.1.2" + } + }, + "node_modules/tar-fs/node_modules/pump": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/pump/-/pump-1.0.3.tgz", + "integrity": "sha512-8k0JupWme55+9tCVE+FS5ULT3K6AbgqrGa58lTT49RpyfwwcGedHqaC5LlQNdEAumn/wFsu6aPwkuPMioy8kqw==", + "license": "MIT", + "optional": true, + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/tar-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmmirror.com/tar-stream/-/tar-stream-1.6.2.tgz", + "integrity": "sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A==", + "license": "MIT", + "optional": true, + "dependencies": { + "bl": "^1.0.0", + "buffer-alloc": "^1.2.0", + "end-of-stream": "^1.0.0", + "fs-constants": "^1.0.0", + "readable-stream": "^2.3.0", + "to-buffer": "^1.1.1", + "xtend": "^4.0.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/tar-stream/node_modules/bl": { + "version": "1.2.3", + "resolved": "https://registry.npmmirror.com/bl/-/bl-1.2.3.tgz", + "integrity": "sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww==", + "license": "MIT", + "optional": true, + "dependencies": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/tinybench": { + "version": "2.9.0", + "resolved": "https://registry.npmmirror.com/tinybench/-/tinybench-2.9.0.tgz", + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyexec": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/tinyexec/-/tinyexec-1.0.2.tgz", + "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmmirror.com/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyrainbow": { + "version": "3.0.3", + "resolved": "https://registry.npmmirror.com/tinyrainbow/-/tinyrainbow-3.0.3.tgz", + "integrity": "sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/to-buffer": { + "version": "1.2.2", + "resolved": "https://registry.npmmirror.com/to-buffer/-/to-buffer-1.2.2.tgz", + "integrity": "sha512-db0E3UJjcFhpDhAF4tLo03oli3pwl3dbnzXOUIlRKrp+ldk/VUxzpWYZENsw2SZiuBjHAk7DfB0VU7NKdpb6sw==", + "license": "MIT", + "optional": true, + "dependencies": { + "isarray": "^2.0.5", + "safe-buffer": "^5.2.1", + "typed-array-buffer": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/to-buffer/node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmmirror.com/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "license": "MIT", + "optional": true + }, + "node_modules/traverse": { + "version": "0.3.9", + "resolved": "https://registry.npmmirror.com/traverse/-/traverse-0.3.9.tgz", + "integrity": "sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ==", + "license": "MIT/X11", + "engines": { + "node": "*" + } + }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmmirror.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/typed-array-buffer": { + "version": "1.0.3", + "resolved": "https://registry.npmmirror.com/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", + "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", + "license": "MIT", + "optional": true, + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "license": "MIT" + }, + "node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmmirror.com/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "license": "MIT", + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/vite": { + "version": "5.4.21", + "resolved": "https://registry.npmmirror.com/vite/-/vite-5.4.21.tgz", + "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/vitest": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/vitest/-/vitest-4.0.18.tgz", + "integrity": "sha512-hOQuK7h0FGKgBAas7v0mSAsnvrIgAvWmRFjmzpJ7SwFHH3g1k2u37JtYwOwmEKhK6ZO3v9ggDBBm0La1LCK4uQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/expect": "4.0.18", + "@vitest/mocker": "4.0.18", + "@vitest/pretty-format": "4.0.18", + "@vitest/runner": "4.0.18", + "@vitest/snapshot": "4.0.18", + "@vitest/spy": "4.0.18", + "@vitest/utils": "4.0.18", + "es-module-lexer": "^1.7.0", + "expect-type": "^1.2.2", + "magic-string": "^0.30.21", + "obug": "^2.1.1", + "pathe": "^2.0.3", + "picomatch": "^4.0.3", + "std-env": "^3.10.0", + "tinybench": "^2.9.0", + "tinyexec": "^1.0.2", + "tinyglobby": "^0.2.15", + "tinyrainbow": "^3.0.3", + "vite": "^6.0.0 || ^7.0.0", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@opentelemetry/api": "^1.9.0", + "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0", + "@vitest/browser-playwright": "4.0.18", + "@vitest/browser-preview": "4.0.18", + "@vitest/browser-webdriverio": "4.0.18", + "@vitest/ui": "4.0.18", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@opentelemetry/api": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser-playwright": { + "optional": true + }, + "@vitest/browser-preview": { + "optional": true + }, + "@vitest/browser-webdriverio": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/vitest/node_modules/@esbuild/aix-ppc64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/aix-ppc64/-/aix-ppc64-0.27.2.tgz", + "integrity": "sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/android-arm": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/android-arm/-/android-arm-0.27.2.tgz", + "integrity": "sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/android-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/android-arm64/-/android-arm64-0.27.2.tgz", + "integrity": "sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/android-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/android-x64/-/android-x64-0.27.2.tgz", + "integrity": "sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/darwin-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/darwin-arm64/-/darwin-arm64-0.27.2.tgz", + "integrity": "sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/darwin-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/darwin-x64/-/darwin-x64-0.27.2.tgz", + "integrity": "sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.2.tgz", + "integrity": "sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/freebsd-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/freebsd-x64/-/freebsd-x64-0.27.2.tgz", + "integrity": "sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-arm": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm/-/linux-arm-0.27.2.tgz", + "integrity": "sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-arm64/-/linux-arm64-0.27.2.tgz", + "integrity": "sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-ia32": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-ia32/-/linux-ia32-0.27.2.tgz", + "integrity": "sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-loong64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-loong64/-/linux-loong64-0.27.2.tgz", + "integrity": "sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-mips64el": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-mips64el/-/linux-mips64el-0.27.2.tgz", + "integrity": "sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-ppc64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-ppc64/-/linux-ppc64-0.27.2.tgz", + "integrity": "sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-riscv64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-riscv64/-/linux-riscv64-0.27.2.tgz", + "integrity": "sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-s390x": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-s390x/-/linux-s390x-0.27.2.tgz", + "integrity": "sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/linux-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/linux-x64/-/linux-x64-0.27.2.tgz", + "integrity": "sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/netbsd-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/netbsd-x64/-/netbsd-x64-0.27.2.tgz", + "integrity": "sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/openbsd-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/openbsd-x64/-/openbsd-x64-0.27.2.tgz", + "integrity": "sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/sunos-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/sunos-x64/-/sunos-x64-0.27.2.tgz", + "integrity": "sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/win32-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/win32-arm64/-/win32-arm64-0.27.2.tgz", + "integrity": "sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/win32-ia32": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/win32-ia32/-/win32-ia32-0.27.2.tgz", + "integrity": "sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@esbuild/win32-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/@esbuild/win32-x64/-/win32-x64-0.27.2.tgz", + "integrity": "sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/@vitest/mocker": { + "version": "4.0.18", + "resolved": "https://registry.npmmirror.com/@vitest/mocker/-/mocker-4.0.18.tgz", + "integrity": "sha512-HhVd0MDnzzsgevnOWCBj5Otnzobjy5wLBe4EdeeFGv8luMsGcYqDuFRMcttKWZA5vVO8RFjexVovXvAM4JoJDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "4.0.18", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.21" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^6.0.0 || ^7.0.0-0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "node_modules/vitest/node_modules/esbuild": { + "version": "0.27.2", + "resolved": "https://registry.npmmirror.com/esbuild/-/esbuild-0.27.2.tgz", + "integrity": "sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.2", + "@esbuild/android-arm": "0.27.2", + "@esbuild/android-arm64": "0.27.2", + "@esbuild/android-x64": "0.27.2", + "@esbuild/darwin-arm64": "0.27.2", + "@esbuild/darwin-x64": "0.27.2", + "@esbuild/freebsd-arm64": "0.27.2", + "@esbuild/freebsd-x64": "0.27.2", + "@esbuild/linux-arm": "0.27.2", + "@esbuild/linux-arm64": "0.27.2", + "@esbuild/linux-ia32": "0.27.2", + "@esbuild/linux-loong64": "0.27.2", + "@esbuild/linux-mips64el": "0.27.2", + "@esbuild/linux-ppc64": "0.27.2", + "@esbuild/linux-riscv64": "0.27.2", + "@esbuild/linux-s390x": "0.27.2", + "@esbuild/linux-x64": "0.27.2", + "@esbuild/netbsd-arm64": "0.27.2", + "@esbuild/netbsd-x64": "0.27.2", + "@esbuild/openbsd-arm64": "0.27.2", + "@esbuild/openbsd-x64": "0.27.2", + "@esbuild/openharmony-arm64": "0.27.2", + "@esbuild/sunos-x64": "0.27.2", + "@esbuild/win32-arm64": "0.27.2", + "@esbuild/win32-ia32": "0.27.2", + "@esbuild/win32-x64": "0.27.2" + } + }, + "node_modules/vitest/node_modules/vite": { + "version": "7.3.1", + "resolved": "https://registry.npmmirror.com/vite/-/vite-7.3.1.tgz", + "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "esbuild": "^0.27.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/which-pm-runs": { + "version": "1.1.0", + "resolved": "https://registry.npmmirror.com/which-pm-runs/-/which-pm-runs-1.1.0.tgz", + "integrity": "sha512-n1brCuqClxfFfq/Rb0ICg9giSZqCS+pLtccdag6C2HyufBrh3fBOiy9nb6ggRMvWOVH5GrdJskj5iGTZNxd7SA==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/which-typed-array": { + "version": "1.1.20", + "resolved": "https://registry.npmmirror.com/which-typed-array/-/which-typed-array-1.1.20.tgz", + "integrity": "sha512-LYfpUkmqwl0h9A2HL09Mms427Q1RZWuOHsukfVcKRq9q95iQxdw0ix1JQrqbcDR9PH1QDwf5Qo8OZb5lksZ8Xg==", + "license": "MIT", + "optional": true, + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "for-each": "^0.3.5", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/why-is-node-running": { + "version": "2.3.0", + "resolved": "https://registry.npmmirror.com/why-is-node-running/-/why-is-node-running-2.3.0.tgz", + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", + "dev": true, + "license": "MIT", + "dependencies": { + "siginfo": "^2.0.0", + "stackback": "0.0.2" + }, + "bin": { + "why-is-node-running": "cli.js" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmmirror.com/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "license": "ISC", + "optional": true, + "dependencies": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmmirror.com/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC", + "optional": true + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmmirror.com/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "license": "MIT", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmmirror.com/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "license": "ISC" + }, + "node_modules/zod": { + "version": "4.3.6", + "resolved": "https://registry.npmmirror.com/zod/-/zod-4.3.6.tgz", + "integrity": "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + } + } +} diff --git a/docs/template/bls-onoffline-backend/package.json b/docs/template/bls-onoffline-backend/package.json new file mode 100644 index 0000000..f77629f --- /dev/null +++ b/docs/template/bls-onoffline-backend/package.json @@ -0,0 +1,27 @@ +{ + "name": "bls-onoffline-backend", + "version": "1.0.0", + "type": "module", + "private": true, + "scripts": { + "dev": "node src/index.js", + "build": "vite build --ssr src/index.js --outDir dist", + "test": "vitest run", + "lint": "node scripts/lint.js", + "spec:lint": "openspec validate --specs --strict --no-interactive", + "spec:validate": "openspec validate --specs --no-interactive", + "start": "node dist/index.js" + }, + "dependencies": { + "dotenv": "^16.4.5", + "kafka-node": "^5.0.0", + "node-cron": "^4.2.1", + "pg": "^8.11.5", + "redis": "^4.6.13", + "zod": "^4.3.6" + }, + "devDependencies": { + "vite": "^5.4.0", + "vitest": "^4.0.18" + } +} diff --git a/docs/template/bls-onoffline-backend/scripts/lint.js b/docs/template/bls-onoffline-backend/scripts/lint.js new file mode 100644 index 0000000..eaa9035 --- /dev/null +++ b/docs/template/bls-onoffline-backend/scripts/lint.js @@ -0,0 +1,41 @@ +import fs from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; +import { spawnSync } from 'child_process'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); +const projectRoot = path.resolve(__dirname, '..'); +const targets = ['src', 'tests']; + +const collectFiles = (dir) => { + if (!fs.existsSync(dir)) { + return []; + } + const entries = fs.readdirSync(dir, { withFileTypes: true }); + return entries.flatMap((entry) => { + const fullPath = path.join(dir, entry.name); + if (entry.isDirectory()) { + return collectFiles(fullPath); + } + if (entry.isFile() && fullPath.endsWith('.js')) { + return [fullPath]; + } + return []; + }); +}; + +const files = targets.flatMap((target) => collectFiles(path.join(projectRoot, target))); + +const failures = []; + +files.forEach((file) => { + const result = spawnSync(process.execPath, ['--check', file], { stdio: 'inherit' }); + if (result.status !== 0) { + failures.push(file); + } +}); + +if (failures.length > 0) { + process.exit(1); +} diff --git a/docs/template/bls-onoffline-backend/scripts/verify_data.js b/docs/template/bls-onoffline-backend/scripts/verify_data.js new file mode 100644 index 0000000..4440be0 --- /dev/null +++ b/docs/template/bls-onoffline-backend/scripts/verify_data.js @@ -0,0 +1,36 @@ + +import { config } from '../src/config/config.js'; +import dbManager from '../src/db/databaseManager.js'; +import { logger } from '../src/utils/logger.js'; + +const verifyData = async () => { + const client = await dbManager.pool.connect(); + try { + console.log('Verifying data in database...'); + + // Count total rows + const countSql = `SELECT count(*) FROM ${config.db.schema}.${config.db.table}`; + const countRes = await client.query(countSql); + console.log(`Total rows in ${config.db.schema}.${config.db.table}: ${countRes.rows[0].count}`); + + // Check recent rows + const recentSql = ` + SELECT * FROM ${config.db.schema}.${config.db.table} + ORDER BY ts_ms DESC + LIMIT 5 + `; + const recentRes = await client.query(recentSql); + console.log('Recent 5 rows:'); + recentRes.rows.forEach(row => { + console.log(JSON.stringify(row)); + }); + + } catch (err) { + console.error('Error verifying data:', err); + } finally { + client.release(); + await dbManager.pool.end(); + } +}; + +verifyData(); diff --git a/docs/template/bls-onoffline-backend/spec/onoffline-spec.md b/docs/template/bls-onoffline-backend/spec/onoffline-spec.md new file mode 100644 index 0000000..8115bef --- /dev/null +++ b/docs/template/bls-onoffline-backend/spec/onoffline-spec.md @@ -0,0 +1,50 @@ +bls-onoffline-backend 规格说明 + +1. Kafka 数据结构 +{ + "HotelCode": "1085", + "MAC": "00:1A:2B:3C:4D:5E", + "HostNumber": "091123987456", + "RoomNumber": "8888房", + "EndPoint": "50.2.60.1:6543", + "CurrentStatus": "on", + "CurrentTime": "2026-02-02T10:30:00Z", + "UnixTime": 1770000235000, + "LauncherVersion": "1.0.0", + "RebootReason": "1" +} + +2. Kafka 主题 +Topic:blwlog4Nodejs-rcu-onoffline-topic + +3. 数据库结构 +数据库:log_platform +表:onoffline_record +字段: + guid varchar(32) + ts_ms int8 + write_ts_ms int8 + hotel_id int2 + mac varchar(21) + device_id varchar(64) + room_id varchar(64) + ip varchar(21) + current_status varchar(10) + launcher_version varchar(10) + reboot_reason varchar(10) +主键:(ts_ms, mac, device_id, room_id) +按 ts_ms 每日分区 + +G5库结构(双写,临时接入): +库同为:log_platform +表:onoffline_record_g5 +差异字段: + - guid 为 int4,由库自己生成。 + - record_source 固定为 CRICS。 + - current_status 为 int2,on映射为1,off映射为2,其余为0。 +支持通过环境变量开关双写。 + +4. 数据处理规则 +非重启数据:reboot_reason 为空或不存在,current_status 取 CurrentStatus +重启数据:reboot_reason 不为空,current_status 固定为 on +其余字段直接按 Kafka 原值落库,空值不补 0 diff --git a/docs/template/bls-onoffline-backend/src/config/config.js b/docs/template/bls-onoffline-backend/src/config/config.js new file mode 100644 index 0000000..1669d9f --- /dev/null +++ b/docs/template/bls-onoffline-backend/src/config/config.js @@ -0,0 +1,72 @@ +import dotenv from 'dotenv'; + +dotenv.config(); + +const parseNumber = (value, defaultValue) => { + const parsed = Number(value); + return Number.isFinite(parsed) ? parsed : defaultValue; +}; + +const parseList = (value) => + (value || '') + .split(',') + .map((item) => item.trim()) + .filter(Boolean); + +export const config = { + env: process.env.NODE_ENV || 'development', + port: parseNumber(process.env.PORT, 3001), + kafka: { + brokers: parseList(process.env.KAFKA_BROKERS), + topic: process.env.KAFKA_TOPIC || process.env.KAFKA_TOPICS || 'blwlog4Nodejs-rcu-onoffline-topic', + groupId: process.env.KAFKA_GROUP_ID || 'bls-onoffline-group', + clientId: process.env.KAFKA_CLIENT_ID || 'bls-onoffline-client', + consumerInstances: parseNumber(process.env.KAFKA_CONSUMER_INSTANCES, 1), + maxInFlight: parseNumber(process.env.KAFKA_MAX_IN_FLIGHT, 20000), + fetchMaxBytes: parseNumber(process.env.KAFKA_FETCH_MAX_BYTES, 50 * 1024 * 1024), + fetchMinBytes: parseNumber(process.env.KAFKA_FETCH_MIN_BYTES, 256 * 1024), + fetchMaxWaitMs: parseNumber(process.env.KAFKA_FETCH_MAX_WAIT_MS, 100), + autoCommitIntervalMs: parseNumber(process.env.KAFKA_AUTO_COMMIT_INTERVAL_MS, 5000), + commitIntervalMs: parseNumber(process.env.KAFKA_COMMIT_INTERVAL_MS, 200), + commitOnAttempt: process.env.KAFKA_COMMIT_ON_ATTEMPT === 'true', + batchSize: parseNumber(process.env.KAFKA_BATCH_SIZE, 5000), + batchTimeoutMs: parseNumber(process.env.KAFKA_BATCH_TIMEOUT_MS, 50), + logMessages: process.env.KAFKA_LOG_MESSAGES === 'true', + sasl: process.env.KAFKA_SASL_USERNAME && process.env.KAFKA_SASL_PASSWORD ? { + mechanism: process.env.KAFKA_SASL_MECHANISM || 'plain', + username: process.env.KAFKA_SASL_USERNAME, + password: process.env.KAFKA_SASL_PASSWORD + } : undefined + }, + db: { + host: process.env.DB_HOST || process.env.POSTGRES_HOST || 'localhost', + port: parseNumber(process.env.DB_PORT || process.env.POSTGRES_PORT, 5432), + user: process.env.DB_USER || process.env.POSTGRES_USER || 'postgres', + password: process.env.DB_PASSWORD || process.env.POSTGRES_PASSWORD || '', + database: process.env.DB_DATABASE || process.env.POSTGRES_DATABASE || 'log_platform', + max: parseNumber(process.env.DB_MAX_CONNECTIONS || process.env.POSTGRES_MAX_CONNECTIONS, 10), + ssl: process.env.DB_SSL === 'true' ? { rejectUnauthorized: false } : undefined, + schema: process.env.DB_SCHEMA || 'onoffline', + table: process.env.DB_TABLE || 'onoffline_record' + }, + g5db: { + enabled: !!process.env.POSTGRES_HOST_G5, + host: process.env.POSTGRES_HOST_G5, + port: parseNumber(process.env.POSTGRES_PORT_G5, 5434), + user: process.env.POSTGRES_USER_G5, + password: process.env.POSTGRES_PASSWORD_G5, + database: process.env.POSTGRES_DATABASE_G5, + max: parseNumber(process.env.POSTGRES_MAX_CONNECTIONS_G5, 3), + ssl: process.env.POSTGRES_SSL_G5 === 'true' ? { rejectUnauthorized: false } : undefined, + schema: process.env.DB_SCHEMA_G5 || 'onoffline', + table: process.env.DB_TABLE_G5 || 'onoffline_record_g5' + }, + redis: { + host: process.env.REDIS_HOST || 'localhost', + port: parseNumber(process.env.REDIS_PORT, 6379), + password: process.env.REDIS_PASSWORD || undefined, + db: parseNumber(process.env.REDIS_DB, 0), + projectName: process.env.REDIS_PROJECT_NAME || 'bls-onoffline', + apiBaseUrl: process.env.REDIS_API_BASE_URL || `http://localhost:${parseNumber(process.env.PORT, 3001)}` + } +}; diff --git a/docs/template/bls-onoffline-backend/src/db/databaseManager.js b/docs/template/bls-onoffline-backend/src/db/databaseManager.js new file mode 100644 index 0000000..9c807de --- /dev/null +++ b/docs/template/bls-onoffline-backend/src/db/databaseManager.js @@ -0,0 +1,108 @@ +import pg from 'pg'; +import { config } from '../config/config.js'; +import { logger } from '../utils/logger.js'; + +const { Pool } = pg; + +const columns = [ + 'guid', + 'ts_ms', + 'write_ts_ms', + 'hotel_id', + 'mac', + 'device_id', + 'room_id', + 'ip', + 'current_status', + 'launcher_version', + 'reboot_reason' +]; + +export class DatabaseManager { + constructor(dbConfig) { + this.pool = new Pool({ + host: dbConfig.host, + port: dbConfig.port, + user: dbConfig.user, + password: dbConfig.password, + database: dbConfig.database, + max: dbConfig.max, + ssl: dbConfig.ssl + }); + } + + async insertRows({ schema, table, rows }) { + if (!rows || rows.length === 0) { + return; + } + const statement = ` + INSERT INTO ${schema}.${table} (${columns.join(', ')}) + SELECT * + FROM UNNEST( + $1::text[], + $2::int8[], + $3::int8[], + $4::int2[], + $5::text[], + $6::text[], + $7::text[], + $8::text[], + $9::text[], + $10::text[], + $11::text[] + ) + ON CONFLICT DO NOTHING + `; + try { + const params = columns.map((column) => rows.map((row) => row[column] ?? null)); + await this.pool.query(statement, params); + } catch (error) { + logger.error('Database insert failed', { + error: error?.message, + schema, + table, + rowsLength: rows.length + }); + throw error; + } + } + + async checkConnection() { + let client; + try { + const connectPromise = this.pool.connect(); + + // Create a timeout promise that rejects after 5000ms + const timeoutPromise = new Promise((_, reject) => { + setTimeout(() => reject(new Error('Connection timeout')), 5000); + }); + + try { + // Race the connection attempt against the timeout + client = await Promise.race([connectPromise, timeoutPromise]); + } catch (raceError) { + // If we timed out, the connectPromise might still resolve later. + // We must ensure that if it does, the client is released back to the pool immediately. + connectPromise.then(c => c.release()).catch(() => {}); + throw raceError; + } + + await client.query('SELECT 1'); + return true; + } catch (err) { + logger.error('Database check connection failed', { error: err.message }); + return false; + } finally { + if (client) { + client.release(); + } + } + } + + async close() { + await this.pool.end(); + } +} + +const dbManager = new DatabaseManager(config.db); +export default dbManager; diff --git a/docs/template/bls-onoffline-backend/src/db/g5DatabaseManager.js b/docs/template/bls-onoffline-backend/src/db/g5DatabaseManager.js new file mode 100644 index 0000000..99c8a01 --- /dev/null +++ b/docs/template/bls-onoffline-backend/src/db/g5DatabaseManager.js @@ -0,0 +1,121 @@ +import pg from 'pg'; +import { config } from '../config/config.js'; +import { logger } from '../utils/logger.js'; + +const { Pool } = pg; + +const g5Columns = [ + 'ts_ms', + 'write_ts_ms', + 'hotel_id', + 'mac', + 'device_id', + 'room_id', + 'ip', + 'current_status', + 'launcher_version', + 'reboot_reason', + 'record_source' +]; + +export class G5DatabaseManager { + constructor(dbConfig) { + if (!dbConfig.enabled) return; + this.pool = new Pool({ + host: dbConfig.host, + port: dbConfig.port, + user: dbConfig.user, + password: dbConfig.password, + database: dbConfig.database, + max: dbConfig.max, + ssl: dbConfig.ssl + }); + } + + async insertRows({ schema, table, rows }) { + if (!this.pool || !rows || rows.length === 0) { + return; + } + + const statement = ` + INSERT INTO ${schema}.${table} (${g5Columns.join(', ')}) + SELECT * + FROM UNNEST( + $1::int8[], + $2::int8[], + $3::int2[], + $4::text[], + $5::text[], + $6::text[], + $7::text[], + $8::int2[], + $9::text[], + $10::text[], + $11::text[] + ) + ON CONFLICT DO NOTHING + `; + + try { + const params = g5Columns.map((column) => { + return rows.map((row) => { + if (column === 'record_source') { + return 'CRICS'; + } + if (column === 'current_status') { + // current_status in G5 is int2 + if (row.current_status === 'on') return 1; + if (row.current_status === 'off') return 2; + return 0; + } + return row[column] ?? null; + }); + }); + + await this.pool.query(statement, params); + } catch (error) { + logger.error('G5 Database insert failed', { + error: error?.message, + schema, + table, + rowsLength: rows.length + }); + throw error; + } + } + + async checkConnection() { + if (!this.pool) return true; // Pretend it's ok if disabled + let client; + try { + const connectPromise = this.pool.connect(); + const timeoutPromise = new Promise((_, reject) => { + setTimeout(() => reject(new Error('Connection timeout')), 5000); + }); + try { + client = await Promise.race([connectPromise, timeoutPromise]); + } catch (raceError) { + connectPromise.then(c => c.release()).catch(() => { }); + throw raceError; + } + await client.query('SELECT 1'); + return true; + } catch (err) { + logger.error('G5 Database check connection failed', { error: err.message }); + return false; + } finally { + if (client) { + client.release(); + } + } + } + + async close() { + if (this.pool) { + await this.pool.end(); + } + } +} + +const g5DbManager = new G5DatabaseManager(config.g5db); +export default g5DbManager; diff --git a/docs/template/bls-onoffline-backend/src/index.js b/docs/template/bls-onoffline-backend/src/index.js new file mode 100644 index 0000000..e060710 --- /dev/null +++ b/docs/template/bls-onoffline-backend/src/index.js @@ -0,0 +1,469 @@ +import cron from 'node-cron'; +import { config } from './config/config.js'; +import dbManager from './db/databaseManager.js'; +import g5DbManager from './db/g5DatabaseManager.js'; +import { createKafkaConsumers } from './kafka/consumer.js'; +import { parseMessageToRows } from './processor/index.js'; +import { createRedisClient } from './redis/redisClient.js'; +import { RedisIntegration } from './redis/redisIntegration.js'; +import { buildErrorQueueKey, enqueueError, startErrorRetryWorker } from './redis/errorQueue.js'; +import { MetricCollector } from './utils/metricCollector.js'; +import { logger } from './utils/logger.js'; + +const bootstrap = async () => { + // Log startup config (masked) + logger.info('Starting application with config', { + env: process.env.NODE_ENV, + db: { + host: config.db.host, + port: config.db.port, + user: config.db.user, + database: config.db.database, + schema: config.db.schema + }, + kafka: { + brokers: config.kafka.brokers, + topic: config.kafka.topic, + groupId: config.kafka.groupId + }, + redis: { + host: config.redis.host, + port: config.redis.port + } + }); + + // Metric Collector + const metricCollector = new MetricCollector(); + + // 1.1 Setup Metric Reporting Cron Job (Every minute) + // Moved after redisIntegration initialization + + const redisClient = await createRedisClient(config.redis); + const redisIntegration = new RedisIntegration( + redisClient, + config.redis.projectName, + config.redis.apiBaseUrl + ); + redisIntegration.startHeartbeat(); + + // 1.1 Setup Metric Reporting Cron Job (Every minute) + cron.schedule('* * * * *', async () => { + const metrics = metricCollector.getAndReset(); + const flushAvgMs = metrics.batch_flush_count > 0 ? (metrics.batch_flush_ms_sum / metrics.batch_flush_count).toFixed(1) : '0.0'; + const dbAvgMs = metrics.db_insert_count > 0 ? (metrics.db_insert_ms_sum / metrics.db_insert_count).toFixed(1) : '0.0'; + const report = `[Metrics] Pulled:${metrics.kafka_pulled} ParseErr:${metrics.parse_error} Inserted:${metrics.db_inserted} Failed:${metrics.db_failed} FlushAvg:${flushAvgMs}ms DbAvg:${dbAvgMs}ms`; + console.log(report); + logger.info(report); + + try { + await redisIntegration.info('Minute Metrics', metrics); + } catch (err) { + logger.error('Failed to report metrics to Redis', { error: err?.message }); + } + }); + + const errorQueueKey = buildErrorQueueKey(config.redis.projectName); + + const handleError = async (error, message) => { + logger.error('Kafka processing error', { + error: error?.message, + type: error?.type, + stack: error?.stack + }); + try { + await redisIntegration.error('Kafka processing error', { + module: 'kafka', + stack: error?.stack || error?.message + }); + } catch (redisError) { + logger.error('Redis error log failed', { error: redisError?.message }); + } + if (message) { + const messageValue = Buffer.isBuffer(message.value) + ? message.value.toString('utf8') + : message.value; + try { + await enqueueError(redisClient, errorQueueKey, { + attempts: 0, + value: messageValue, + meta: { + topic: message.topic, + partition: message.partition, + offset: message.offset, + key: message.key + }, + timestamp: Date.now() + }); + } catch (enqueueError) { + logger.error('Enqueue error payload failed', { error: enqueueError?.message }); + } + } + }; + + const configuredBatchSize = Number.isFinite(config.kafka.batchSize) ? config.kafka.batchSize : 1000; + const configuredBatchTimeoutMs = Number.isFinite(config.kafka.batchTimeoutMs) ? config.kafka.batchTimeoutMs : 20; + const configuredMaxInFlight = Number.isFinite(config.kafka.maxInFlight) ? config.kafka.maxInFlight : 5000; + + const BATCH_SIZE = Math.max(10, Math.min(configuredBatchSize, configuredMaxInFlight)); + const BATCH_TIMEOUT_MS = Math.max(1, configuredBatchTimeoutMs); + const commitOnAttempt = config.kafka.commitOnAttempt === true; + + const batchStates = new Map(); + + const partitionKeyFromMessage = (message) => { + if (message?.topic !== undefined && message?.partition !== undefined) { + return `${message.topic}-${message.partition}`; + } + return 'retry'; + }; + + const dayKeyFromTsMs = (tsMs) => { + const numeric = typeof tsMs === 'string' ? Number(tsMs) : tsMs; + if (!Number.isFinite(numeric)) return null; + const d = new Date(numeric); + if (Number.isNaN(d.getTime())) return null; + const yyyy = d.getFullYear(); + const mm = String(d.getMonth() + 1).padStart(2, '0'); + const dd = String(d.getDate()).padStart(2, '0'); + return `${yyyy}${mm}${dd}`; + }; + + const getBatchState = (key) => { + if (!batchStates.has(key)) { + batchStates.set(key, { items: [], timer: null, flushing: null }); + } + return batchStates.get(key); + }; + + const isDbConnectionError = (err) => { + const code = err?.code; + if (typeof code === 'string') { + const networkCodes = new Set([ + 'ECONNREFUSED', + 'ECONNRESET', + 'EPIPE', + 'ETIMEDOUT', + 'ENOTFOUND', + 'EHOSTUNREACH', + 'ENETUNREACH', + '57P03', + '08006', + '08001', + '08000', + '08003' + ]); + if (networkCodes.has(code)) return true; + } + + const message = typeof err?.message === 'string' ? err.message : ''; + if (!message) return false; + const lower = message.toLowerCase(); + return ( + lower.includes('connection timeout') || + lower.includes('connection terminated') || + lower.includes('connection refused') || + lower.includes('terminating connection') || + lower.includes('econnrefused') || + lower.includes('econnreset') || + lower.includes('etimedout') || + lower.includes('could not connect') || + lower.includes('the database system is starting up') || + lower.includes('no pg_hba.conf entry') + ); + }; + + const insertRowsWithRetry = async (rows) => { + const startedAt = Date.now(); + while (true) { + try { + const promises = [ + dbManager.insertRows({ schema: config.db.schema, table: config.db.table, rows }) + ]; + if (config.g5db.enabled) { + promises.push(g5DbManager.insertRows({ schema: config.g5db.schema, table: config.g5db.table, rows }).catch(e => { + logger.error('G5 Database insert failed but non-blocking', { error: e.message }); + })); + } + await Promise.all(promises); + + metricCollector.increment('db_insert_count', 1); + metricCollector.increment('db_insert_ms_sum', Date.now() - startedAt); + return; + } catch (err) { + if (isDbConnectionError(err)) { + logger.error('Database offline during batch insert. Retrying in 5s...', { error: err.message }); + await new Promise(r => setTimeout(r, 5000)); + while (!(await dbManager.checkConnection())) { + logger.warn('Database still offline. Waiting 5s...'); + await new Promise(r => setTimeout(r, 5000)); + } + continue; + } + throw err; + } + } + }; + + const insertRowsOnce = async (rows) => { + const startedAt = Date.now(); + const promises = [ + dbManager.insertRows({ schema: config.db.schema, table: config.db.table, rows }) + ]; + if (config.g5db.enabled) { + promises.push(g5DbManager.insertRows({ schema: config.g5db.schema, table: config.g5db.table, rows }).catch(e => { + logger.error('G5 Database insert failed in insertOnce', { error: e.message }); + })); + } + await Promise.all(promises); + metricCollector.increment('db_insert_count', 1); + metricCollector.increment('db_insert_ms_sum', Date.now() - startedAt); + }; + + const resolveInsertedItems = (partitionKey, items) => { + let insertedRows = 0; + for (const p of items) { + insertedRows += p.rows.length; + const dayKey = dayKeyFromTsMs(p.rows?.[0]?.ts_ms); + if (dayKey) { + metricCollector.incrementKeyed('db_inserted_by_day', dayKey, p.rows.length); + } + p.item.resolve(); + } + metricCollector.increment('db_inserted', insertedRows); + metricCollector.incrementKeyed('db_inserted_by_partition', partitionKey, insertedRows); + }; + + const handleFailedItem = async (partitionKey, p, err) => { + metricCollector.increment('db_failed'); + metricCollector.incrementKeyed('db_failed_by_partition', partitionKey, 1); + const dayKey = dayKeyFromTsMs(p.rows?.[0]?.ts_ms); + if (dayKey) { + metricCollector.incrementKeyed('db_failed_by_day', dayKey, 1); + } + await handleError(err, p.item.message); + p.item.resolve(); + }; + + const insertItemsDegraded = async (partitionKey, items) => { + if (items.length === 0) return; + const rows = items.flatMap(p => p.rows); + if (commitOnAttempt) { + try { + await insertRowsOnce(rows); + resolveInsertedItems(partitionKey, items); + } catch (err) { + for (const item of items) { + await handleFailedItem(partitionKey, item, err); + } + } + return; + } + try { + await insertRowsWithRetry(rows); + resolveInsertedItems(partitionKey, items); + return; + } catch (err) { + if (items.length === 1) { + try { + await insertRowsWithRetry(items[0].rows); + resolveInsertedItems(partitionKey, items); + } catch (innerErr) { + await handleFailedItem(partitionKey, items[0], innerErr); + } + return; + } + const mid = Math.floor(items.length / 2); + await insertItemsDegraded(partitionKey, items.slice(0, mid)); + await insertItemsDegraded(partitionKey, items.slice(mid)); + } + }; + + const flushBatchForKey = async (partitionKey) => { + const state = getBatchState(partitionKey); + if (state.flushing) return state.flushing; + + state.flushing = (async () => { + if (state.timer) { + clearTimeout(state.timer); + state.timer = null; + } + + if (state.items.length === 0) return; + + const startedAt = Date.now(); + const currentBatch = state.items; + state.items = []; + + const pendingDbItems = []; + const unresolvedItems = []; + + try { + for (const item of currentBatch) { + try { + const rows = parseMessageToRows(item.message); + pendingDbItems.push({ item, rows }); + unresolvedItems.push(item); + } catch (err) { + metricCollector.increment('parse_error'); + metricCollector.incrementKeyed('parse_error_by_partition', partitionKey, 1); + logger.error('Message processing failed (Parse/Validation)', { error: err.message }); + await handleError(err, item.message); + item.resolve(); + } + } + + if (pendingDbItems.length > 0) { + const firstTs = pendingDbItems[0]?.rows?.[0]?.ts_ms; + const dayKey = dayKeyFromTsMs(firstTs); + if (dayKey) { + const dayStartMs = Date.now(); + await insertItemsDegraded(partitionKey, pendingDbItems); + metricCollector.incrementKeyed('db_insert_ms_sum_by_day', dayKey, Date.now() - dayStartMs); + } else { + await insertItemsDegraded(partitionKey, pendingDbItems); + } + } + + metricCollector.increment('batch_flush_count', 1); + metricCollector.increment('batch_flush_ms_sum', Date.now() - startedAt); + } catch (err) { + if (!commitOnAttempt && isDbConnectionError(err)) { + state.items = unresolvedItems.concat(state.items); + if (!state.timer) { + state.timer = setTimeout(() => { + state.timer = null; + flushBatchForKey(partitionKey); + }, 5000); + } + return; + } + + logger.error('Batch flush failed (non-network). Marking as consumed', { + error: err?.message, + partitionKey, + batchSize: currentBatch.length + }); + + for (const item of unresolvedItems) { + try { + await handleError(err, item.message); + } catch { } + item.resolve(); + } + } + })().finally(() => { + state.flushing = null; + if (state.items.length > 0) { + if (state.items.length >= BATCH_SIZE) { + flushBatchForKey(partitionKey); + } else if (!state.timer) { + state.timer = setTimeout(() => { + state.timer = null; + flushBatchForKey(partitionKey); + }, BATCH_TIMEOUT_MS); + } + } + }); + + return state.flushing; + }; + + const handleMessage = (message) => { + if (message.topic) { + metricCollector.increment('kafka_pulled'); + metricCollector.incrementKeyed('kafka_pulled_by_partition', `${message.topic}-${message.partition}`, 1); + } + + // const messageValue = Buffer.isBuffer(message.value) + // ? message.value.toString('utf8') + // : message.value; + // const messageKey = Buffer.isBuffer(message.key) + // ? message.key.toString('utf8') + // : message.key; + + // const logDetails = { + // topic: message.topic, + // partition: message.partition, + // offset: message.offset, + // key: messageKey, + // value: config.kafka.logMessages ? messageValue : undefined, + // valueLength: !config.kafka.logMessages && typeof messageValue === 'string' ? messageValue.length : null + // }; + + // logger.info('Kafka message received', logDetails); + + const partitionKey = partitionKeyFromMessage(message); + const state = getBatchState(partitionKey); + + return new Promise((resolve, reject) => { + state.items.push({ message, resolve, reject }); + if (state.items.length >= BATCH_SIZE) { + flushBatchForKey(partitionKey); + } else if (!state.timer) { + state.timer = setTimeout(() => { + state.timer = null; + flushBatchForKey(partitionKey); + }, BATCH_TIMEOUT_MS); + } + }); + }; + + const consumers = createKafkaConsumers({ + kafkaConfig: config.kafka, + onMessage: handleMessage, + onError: handleError + }); + + // Start retry worker (non-blocking) + startErrorRetryWorker({ + client: redisClient, + queueKey: errorQueueKey, + redisIntegration, + handler: async (item) => { + if (!item?.value) { + throw new Error('Missing value in retry payload'); + } + await handleMessage({ value: item.value }); + } + }).catch(err => { + logger.error('Retry worker failed', { error: err?.message }); + }); + + // Graceful Shutdown Logic + const shutdown = async (signal) => { + logger.info(`Received ${signal}, shutting down...`); + + try { + // 1. Close Kafka Consumer + if (consumers && consumers.length > 0) { + await Promise.all(consumers.map(c => new Promise((resolve) => c.close(true, resolve)))); + logger.info('Kafka consumer closed', { count: consumers.length }); + } + + // 2. Stop Redis Heartbeat (if method exists, otherwise just close client) + // redisIntegration.stopHeartbeat(); // Assuming implementation or just rely on client close + + // 3. Close Redis Client + await redisClient.quit(); + logger.info('Redis client closed'); + + // 4. Close Database Pools + await dbManager.close(); + await g5DbManager.close(); + logger.info('Database connection closed'); + + process.exit(0); + } catch (err) { + logger.error('Error during shutdown', { error: err?.message }); + process.exit(1); + } + }; + + process.on('SIGTERM', () => shutdown('SIGTERM')); + process.on('SIGINT', () => shutdown('SIGINT')); +}; + +bootstrap().catch((error) => { + logger.error('Service bootstrap failed', { error: error?.message }); + process.exit(1); +}); diff --git a/docs/template/bls-onoffline-backend/src/kafka/consumer.js b/docs/template/bls-onoffline-backend/src/kafka/consumer.js new file mode 100644 index 0000000..4c14cc3 --- /dev/null +++ b/docs/template/bls-onoffline-backend/src/kafka/consumer.js @@ -0,0 +1,175 @@ +import kafka from 'kafka-node'; +import { logger } from '../utils/logger.js'; + +const { ConsumerGroup } = kafka; + +import { OffsetTracker } from './offsetTracker.js'; + +const createOneConsumer = ({ kafkaConfig, onMessage, onError, instanceIndex }) => { + const kafkaHost = kafkaConfig.brokers.join(','); + const clientId = instanceIndex === 0 ? kafkaConfig.clientId : `${kafkaConfig.clientId}-${instanceIndex}`; + const id = `${clientId}-${process.pid}-${Date.now()}`; + const maxInFlight = Number.isFinite(kafkaConfig.maxInFlight) ? kafkaConfig.maxInFlight : 5000; + const commitIntervalMs = Number.isFinite(kafkaConfig.commitIntervalMs) ? kafkaConfig.commitIntervalMs : 200; + let inFlight = 0; + + const tracker = new OffsetTracker(); + let pendingCommits = new Map(); // key: `${topic}-${partition}` -> { topic, partition, offset } + let commitTimer = null; + + const flushCommits = () => { + if (pendingCommits.size === 0) return; + const batch = pendingCommits; + pendingCommits = new Map(); + + consumer.sendOffsetCommitRequest( + Array.from(batch.values()), + (err) => { + if (err) { + for (const [k, v] of batch.entries()) { + pendingCommits.set(k, v); + } + logger.error('Kafka commit failed', { error: err?.message, count: batch.size }); + } + } + ); + }; + + const scheduleCommitFlush = () => { + if (commitTimer) return; + commitTimer = setTimeout(() => { + commitTimer = null; + flushCommits(); + }, commitIntervalMs); + }; + + const consumer = new ConsumerGroup( + { + kafkaHost, + groupId: kafkaConfig.groupId, + clientId, + id, + fromOffset: 'earliest', + protocol: ['roundrobin'], + outOfRangeOffset: 'latest', + autoCommit: false, + autoCommitIntervalMs: kafkaConfig.autoCommitIntervalMs, + fetchMaxBytes: kafkaConfig.fetchMaxBytes, + fetchMinBytes: kafkaConfig.fetchMinBytes, + fetchMaxWaitMs: kafkaConfig.fetchMaxWaitMs, + sasl: kafkaConfig.sasl + }, + kafkaConfig.topic + ); + + const tryResume = () => { + if (inFlight < maxInFlight && consumer.paused) { + consumer.resume(); + } + }; + + consumer.on('message', (message) => { + inFlight += 1; + tracker.add(message.topic, message.partition, message.offset); + + if (inFlight >= maxInFlight) { + consumer.pause(); + } + Promise.resolve(onMessage(message)) + .then(() => {}) + .catch((error) => { + logger.error('Kafka message handling failed', { error: error?.message }); + if (onError) { + onError(error, message); + } + }) + .finally(() => { + const commitOffset = tracker.markDone(message.topic, message.partition, message.offset); + if (commitOffset !== null) { + const key = `${message.topic}-${message.partition}`; + pendingCommits.set(key, { + topic: message.topic, + partition: message.partition, + offset: commitOffset, + metadata: 'm' + }); + scheduleCommitFlush(); + } + inFlight -= 1; + tryResume(); + }); + }); + + consumer.on('error', (error) => { + logger.error('Kafka consumer error', { error: error?.message }); + if (onError) { + onError(error); + } + }); + + consumer.on('connect', () => { + logger.info(`Kafka Consumer connected`, { + groupId: kafkaConfig.groupId, + clientId: clientId + }); + }); + + consumer.on('rebalancing', () => { + logger.info(`Kafka Consumer rebalancing`, { + groupId: kafkaConfig.groupId, + clientId: clientId + }); + tracker.clear(); + pendingCommits.clear(); + if (commitTimer) { + clearTimeout(commitTimer); + commitTimer = null; + } + }); + + consumer.on('rebalanced', () => { + logger.info('Kafka Consumer rebalanced', { clientId, groupId: kafkaConfig.groupId }); + }); + + consumer.on('error', (err) => { + logger.error('Kafka Consumer Error', { error: err.message }); + }); + + consumer.on('offsetOutOfRange', (err) => { + logger.warn('Offset out of range', { error: err.message, topic: err.topic, partition: err.partition }); + }); + + + consumer.on('offsetOutOfRange', (error) => { + logger.warn(`Kafka Consumer offset out of range`, { + error: error?.message, + groupId: kafkaConfig.groupId, + clientId: clientId + }); + }); + + consumer.on('close', () => { + if (commitTimer) { + clearTimeout(commitTimer); + commitTimer = null; + } + flushCommits(); + logger.warn(`Kafka Consumer closed`, { + groupId: kafkaConfig.groupId, + clientId: clientId + }); + }); + + return consumer; +}; + +export const createKafkaConsumers = ({ kafkaConfig, onMessage, onError }) => { + const instances = Number.isFinite(kafkaConfig.consumerInstances) ? kafkaConfig.consumerInstances : 1; + const count = Math.max(1, instances); + return Array.from({ length: count }, (_, idx) => + createOneConsumer({ kafkaConfig, onMessage, onError, instanceIndex: idx }) + ); +}; + +export const createKafkaConsumer = ({ kafkaConfig, onMessage, onError }) => + createKafkaConsumers({ kafkaConfig, onMessage, onError })[0]; diff --git a/docs/template/bls-onoffline-backend/src/kafka/offsetTracker.js b/docs/template/bls-onoffline-backend/src/kafka/offsetTracker.js new file mode 100644 index 0000000..7ba557c --- /dev/null +++ b/docs/template/bls-onoffline-backend/src/kafka/offsetTracker.js @@ -0,0 +1,53 @@ +export class OffsetTracker { + constructor() { + // Map }> + this.partitions = new Map(); + } + + // Called when a message is received (before processing) + add(topic, partition, offset) { + const key = `${topic}-${partition}`; + if (!this.partitions.has(key)) { + this.partitions.set(key, { nextCommitOffset: null, done: new Set() }); + } + const state = this.partitions.get(key); + const numericOffset = Number(offset); + if (!Number.isFinite(numericOffset)) return; + if (state.nextCommitOffset === null) { + state.nextCommitOffset = numericOffset; + } else if (numericOffset < state.nextCommitOffset) { + state.nextCommitOffset = numericOffset; + } + } + + // Called when a message is successfully processed + // Returns the next offset to commit (if any advancement is possible), or null + markDone(topic, partition, offset) { + const key = `${topic}-${partition}`; + const state = this.partitions.get(key); + if (!state) return null; + + const numericOffset = Number(offset); + if (!Number.isFinite(numericOffset)) return null; + + state.done.add(numericOffset); + + if (state.nextCommitOffset === null) { + state.nextCommitOffset = numericOffset; + } + + let advanced = false; + while (state.nextCommitOffset !== null && state.done.has(state.nextCommitOffset)) { + state.done.delete(state.nextCommitOffset); + state.nextCommitOffset += 1; + advanced = true; + } + + if (!advanced) return null; + return state.nextCommitOffset; + } + + clear() { + this.partitions.clear(); + } +} diff --git a/docs/template/bls-onoffline-backend/src/processor/index.js b/docs/template/bls-onoffline-backend/src/processor/index.js new file mode 100644 index 0000000..d8d4840 --- /dev/null +++ b/docs/template/bls-onoffline-backend/src/processor/index.js @@ -0,0 +1,142 @@ +import { createGuid } from '../utils/uuid.js'; +import { kafkaPayloadSchema } from '../schema/kafkaPayload.js'; + +const parseKafkaPayload = (value) => { + const raw = Buffer.isBuffer(value) ? value.toString('utf8') : value; + if (typeof raw !== 'string') { + throw new Error('Invalid kafka message value'); + } + return JSON.parse(raw); +}; + +const normalizeText = (value, maxLength) => { + if (value === undefined || value === null) { + return null; + } + const str = String(value); + if (maxLength && str.length > maxLength) { + return str.substring(0, maxLength); + } + return str; +}; + +export const buildRowsFromMessageValue = (value) => { + const payload = parseKafkaPayload(value); + return buildRowsFromPayload(payload); +}; + +export const buildRowsFromPayload = (rawPayload) => { + const payload = kafkaPayloadSchema.parse(rawPayload); + + // Database limit is VARCHAR(255) + const rebootReason = normalizeText(payload.RebootReason, 255); + const currentStatusRaw = normalizeText(payload.CurrentStatus, 255); + const hasRebootReason = rebootReason !== null && rebootReason !== ''; + const currentStatus = hasRebootReason ? 'on' : currentStatusRaw; + + // Derive timestamp: UnixTime -> CurrentTime -> Date.now() + let tsMs = payload.UnixTime; + + // Heuristic: If timestamp is small (e.g., < 100000000000), assume it's seconds and convert to ms + if (typeof tsMs === 'number' && tsMs < 100000000000) { + tsMs = tsMs * 1000; + } + + if (!tsMs && payload.CurrentTime) { + const parsed = Date.parse(payload.CurrentTime); + if (!isNaN(parsed)) { + tsMs = parsed; + } + } + if (!tsMs) { + tsMs = Date.now(); + } + + // Ensure PK fields are not null + const mac = normalizeText(payload.MAC) || ''; + const deviceId = normalizeText(payload.HostNumber) || ''; + const roomId = normalizeText(payload.RoomNumber) || ''; + + // Handle hotel_id boundary for PostgreSQL smallint (-32768 to 32767) + let hotelId = payload.HotelCode; + if (typeof hotelId !== 'number' || Number.isNaN(hotelId) || hotelId < -32768 || hotelId > 32767) { + hotelId = 0; + } + + const row = { + guid: createGuid(), + ts_ms: tsMs, + write_ts_ms: Date.now(), + hotel_id: hotelId, + mac: mac, + device_id: deviceId, + room_id: roomId, + ip: normalizeText(payload.EndPoint), + current_status: currentStatus, + launcher_version: normalizeText(payload.LauncherVersion, 255), + reboot_reason: rebootReason + }; + + return [row]; +}; + +export const parseMessageToRows = (message) => { + const rawValue = message.value.toString(); + // logger.info('Processing message', { offset: message.offset, rawValuePreview: rawValue.substring(0, 100) }); + + let payload; + try { + payload = JSON.parse(rawValue); + } catch (e) { + const error = new Error(`JSON Parse Error: ${e.message}`); + error.type = 'PARSE_ERROR'; + throw error; + } + + // logger.info('Payload parsed', { payload }); + + const validationResult = kafkaPayloadSchema.safeParse(payload); + + if (!validationResult.success) { + const error = new Error(`Schema Validation Failed: ${JSON.stringify(validationResult.error.errors)}`); + error.type = 'VALIDATION_ERROR'; + throw error; + } + + return buildRowsFromPayload(payload); +}; + +export const processKafkaMessage = async ({ message, dbManager, config }) => { + let rows; + try { + rows = parseMessageToRows(message); + } catch (error) { + throw error; + } + + try { + await dbManager.insertRows({ schema: config.db.schema, table: config.db.table, rows }); + // if (rows.length > 0) { + // console.log(`Inserted ${rows.length} rows. Sample GUID: ${rows[0].guid}, TS: ${rows[0].ts_ms}`); + // } + } catch (error) { + error.type = 'DB_ERROR'; + const sample = rows?.[0]; + error.dbContext = { + rowsLength: rows?.length || 0, + sampleRow: sample + ? { + guid: sample.guid, + ts_ms: sample.ts_ms, + mac: sample.mac, + device_id: sample.device_id, + room_id: sample.room_id, + current_status: sample.current_status + } + : null + }; + throw error; + } + + return rows.length; +}; diff --git a/docs/template/bls-onoffline-backend/src/processor/udpParser.js b/docs/template/bls-onoffline-backend/src/processor/udpParser.js new file mode 100644 index 0000000..ccecfc9 --- /dev/null +++ b/docs/template/bls-onoffline-backend/src/processor/udpParser.js @@ -0,0 +1,83 @@ +const normalizeHex = (hex) => { + if (typeof hex !== 'string') { + return ''; + } + let cleaned = hex.trim().replace(/^0x/i, '').replace(/\s+/g, ''); + if (cleaned.length % 2 === 1) { + cleaned = `0${cleaned}`; + } + return cleaned; +}; + +const toHex = (value) => `0x${value.toString(16).padStart(2, '0')}`; + +const readUInt16 = (buffer, offset) => buffer.readUInt16BE(offset); + +export const parse0x36 = (udpRaw) => { + const cleaned = normalizeHex(udpRaw); + const buffer = cleaned ? Buffer.from(cleaned, 'hex') : Buffer.alloc(0); + const sysLockStatus = buffer.length > 0 ? buffer[0] : null; + const reportCount = buffer.length > 7 ? buffer[7] : null; + let offset = 8; + const devices = []; + for (let i = 0; i < (reportCount || 0) && offset + 5 < buffer.length; i += 1) { + devices.push({ + dev_type: buffer[offset], + dev_addr: buffer[offset + 1], + dev_loop: readUInt16(buffer, offset + 2), + dev_data: readUInt16(buffer, offset + 4) + }); + offset += 6; + } + const faultCount = offset < buffer.length ? buffer[offset] : null; + offset += 1; + const faults = []; + for (let i = 0; i < (faultCount || 0) && offset + 5 < buffer.length; i += 1) { + faults.push({ + fault_dev_type: buffer[offset], + fault_dev_addr: buffer[offset + 1], + fault_dev_loop: readUInt16(buffer, offset + 2), + error_type: buffer[offset + 4], + error_data: buffer[offset + 5] + }); + offset += 6; + } + return { + sysLockStatus, + reportCount, + faultCount, + devices, + faults + }; +}; + +export const parse0x0fDownlink = (udpRaw) => { + const cleaned = normalizeHex(udpRaw); + const buffer = cleaned ? Buffer.from(cleaned, 'hex') : Buffer.alloc(0); + const controlCount = buffer.length > 0 ? buffer[0] : null; + let offset = 1; + const controlParams = []; + for (let i = 0; i < (controlCount || 0) && offset + 5 < buffer.length; i += 1) { + const typeValue = readUInt16(buffer, offset + 4); + controlParams.push({ + dev_type: buffer[offset], + dev_addr: buffer[offset + 1], + loop: readUInt16(buffer, offset + 2), + type: typeValue, + type_l: buffer[offset + 4], + type_h: buffer[offset + 5] + }); + offset += 6; + } + return { + controlCount, + controlParams + }; +}; + +export const parse0x0fAck = (udpRaw) => { + const cleaned = normalizeHex(udpRaw); + const buffer = cleaned ? Buffer.from(cleaned, 'hex') : Buffer.alloc(0); + const ackCode = buffer.length > 1 ? toHex(buffer[1]) : null; + return { ackCode }; +}; diff --git a/docs/template/bls-onoffline-backend/src/redis/errorQueue.js b/docs/template/bls-onoffline-backend/src/redis/errorQueue.js new file mode 100644 index 0000000..226f863 --- /dev/null +++ b/docs/template/bls-onoffline-backend/src/redis/errorQueue.js @@ -0,0 +1,53 @@ +import { logger } from '../utils/logger.js'; + +export const buildErrorQueueKey = (projectName) => `${projectName}_error_queue`; + +export const enqueueError = async (client, queueKey, payload) => { + try { + await client.rPush(queueKey, JSON.stringify(payload)); + } catch (error) { + logger.error('Redis enqueue error failed', { error: error?.message }); + throw error; + } +}; + +export const startErrorRetryWorker = async ({ + client, + queueKey, + handler, + redisIntegration, + maxAttempts = 5 +}) => { + while (true) { + const result = await client.blPop(queueKey, 0); + const raw = result?.element; + if (!raw) { + continue; + } + let item; + try { + item = JSON.parse(raw); + } catch (error) { + logger.error('Invalid error payload', { error: error?.message }); + await redisIntegration.error('Invalid error payload', { module: 'redis', stack: error?.message }); + continue; + } + const attempts = item.attempts || 0; + try { + await handler(item); + } catch (error) { + logger.error('Retry handler failed', { error: error?.message, stack: error?.stack }); + const nextPayload = { + ...item, + attempts: attempts + 1, + lastError: error?.message, + lastAttemptAt: Date.now() + }; + if (nextPayload.attempts >= maxAttempts) { + await redisIntegration.error('Retry attempts exceeded', { module: 'retry', stack: JSON.stringify(nextPayload) }); + } else { + await enqueueError(client, queueKey, nextPayload); + } + } + } +}; diff --git a/docs/template/bls-onoffline-backend/src/redis/redisClient.js b/docs/template/bls-onoffline-backend/src/redis/redisClient.js new file mode 100644 index 0000000..e19e036 --- /dev/null +++ b/docs/template/bls-onoffline-backend/src/redis/redisClient.js @@ -0,0 +1,14 @@ +import { createClient } from 'redis'; + +export const createRedisClient = async (config) => { + const client = createClient({ + socket: { + host: config.host, + port: config.port + }, + password: config.password, + database: config.db + }); + await client.connect(); + return client; +}; diff --git a/docs/template/bls-onoffline-backend/src/redis/redisIntegration.js b/docs/template/bls-onoffline-backend/src/redis/redisIntegration.js new file mode 100644 index 0000000..4502d16 --- /dev/null +++ b/docs/template/bls-onoffline-backend/src/redis/redisIntegration.js @@ -0,0 +1,40 @@ +export class RedisIntegration { + constructor(client, projectName, apiBaseUrl) { + this.client = client; + this.projectName = projectName; + this.apiBaseUrl = apiBaseUrl; + this.heartbeatKey = '项目心跳'; + this.logKey = `${projectName}_项目控制台`; + } + + async info(message, context) { + const payload = { + timestamp: new Date().toISOString(), + level: 'info', + message, + metadata: context || undefined + }; + await this.client.rPush(this.logKey, JSON.stringify(payload)); + } + + async error(message, context) { + const payload = { + timestamp: new Date().toISOString(), + level: 'error', + message, + metadata: context || undefined + }; + await this.client.rPush(this.logKey, JSON.stringify(payload)); + } + + startHeartbeat() { + setInterval(() => { + const payload = { + projectName: this.projectName, + apiBaseUrl: this.apiBaseUrl, + lastActiveAt: Date.now() + }; + this.client.rPush(this.heartbeatKey, JSON.stringify(payload)); + }, 3000); + } +} diff --git a/docs/template/bls-onoffline-backend/src/schema/kafkaPayload.js b/docs/template/bls-onoffline-backend/src/schema/kafkaPayload.js new file mode 100644 index 0000000..210eba9 --- /dev/null +++ b/docs/template/bls-onoffline-backend/src/schema/kafkaPayload.js @@ -0,0 +1,32 @@ +import { z } from 'zod'; + +const toNumber = (value) => { + if (value === undefined || value === null || value === '') { + return value; + } + if (typeof value === 'number') { + return value; + } + const parsed = Number(value); + return Number.isFinite(parsed) ? parsed : value; +}; + +const toStringAllowEmpty = (value) => { + if (value === undefined || value === null) { + return value; + } + return String(value); +}; + +export const kafkaPayloadSchema = z.object({ + HotelCode: z.preprocess(toNumber, z.number()), + MAC: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + HostNumber: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + RoomNumber: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + EndPoint: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + CurrentStatus: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + CurrentTime: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + UnixTime: z.preprocess(toNumber, z.number().nullable()).optional().nullable(), + LauncherVersion: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable(), + RebootReason: z.preprocess(toStringAllowEmpty, z.string().nullable()).optional().nullable() +}); diff --git a/docs/template/bls-onoffline-backend/src/utils/logger.js b/docs/template/bls-onoffline-backend/src/utils/logger.js new file mode 100644 index 0000000..a671e5a --- /dev/null +++ b/docs/template/bls-onoffline-backend/src/utils/logger.js @@ -0,0 +1,21 @@ +const format = (level, message, context) => { + const payload = { + level, + message, + timestamp: Date.now(), + ...(context ? { context } : {}) + }; + return JSON.stringify(payload); +}; + +export const logger = { + info(message, context) { + process.stdout.write(`${format('info', message, context)}\n`); + }, + error(message, context) { + process.stderr.write(`${format('error', message, context)}\n`); + }, + warn(message, context) { + process.stderr.write(`${format('warn', message, context)}\n`); + } +}; diff --git a/docs/template/bls-onoffline-backend/src/utils/metricCollector.js b/docs/template/bls-onoffline-backend/src/utils/metricCollector.js new file mode 100644 index 0000000..dc5b3af --- /dev/null +++ b/docs/template/bls-onoffline-backend/src/utils/metricCollector.js @@ -0,0 +1,43 @@ +export class MetricCollector { + constructor() { + this.reset(); + } + + reset() { + this.metrics = { + kafka_pulled: 0, + parse_error: 0, + db_inserted: 0, + db_failed: 0, + db_insert_count: 0, + db_insert_ms_sum: 0, + batch_flush_count: 0, + batch_flush_ms_sum: 0 + }; + this.keyed = {}; + } + + increment(metric, count = 1) { + if (this.metrics.hasOwnProperty(metric)) { + this.metrics[metric] += count; + } + } + + incrementKeyed(metric, key, count = 1) { + if (!key) return; + if (!this.keyed[metric]) { + this.keyed[metric] = {}; + } + if (!Object.prototype.hasOwnProperty.call(this.keyed[metric], key)) { + this.keyed[metric][key] = 0; + } + this.keyed[metric][key] += count; + } + + getAndReset() { + const current = { ...this.metrics }; + const keyed = JSON.parse(JSON.stringify(this.keyed)); + this.reset(); + return { ...current, keyed }; + } +} diff --git a/docs/template/bls-onoffline-backend/src/utils/uuid.js b/docs/template/bls-onoffline-backend/src/utils/uuid.js new file mode 100644 index 0000000..e76a340 --- /dev/null +++ b/docs/template/bls-onoffline-backend/src/utils/uuid.js @@ -0,0 +1,3 @@ +import { randomUUID } from 'crypto'; + +export const createGuid = () => randomUUID().replace(/-/g, ''); diff --git a/docs/template/bls-onoffline-backend/tests/processor.test.js b/docs/template/bls-onoffline-backend/tests/processor.test.js new file mode 100644 index 0000000..a19fbfa --- /dev/null +++ b/docs/template/bls-onoffline-backend/tests/processor.test.js @@ -0,0 +1,45 @@ +import { describe, it, expect } from 'vitest'; +import { buildRowsFromPayload } from '../src/processor/index.js'; + +describe('Processor Logic', () => { + const basePayload = { + HotelCode: '1085', + MAC: '00:1A:2B:3C:4D:5E', + HostNumber: '091123987456', + RoomNumber: '8888房', + EndPoint: '50.2.60.1:6543', + CurrentStatus: 'off', + CurrentTime: '2026-02-02T10:30:00Z', + UnixTime: 1770000235000, + LauncherVersion: '1.0.0' + }; + + it('should validate required fields', () => { + expect(() => buildRowsFromPayload({})).toThrow(); + expect(() => buildRowsFromPayload({ ...basePayload, HotelCode: undefined })).toThrow(); + }); + + it('should use current_status from payload for non-reboot data', () => { + const rows = buildRowsFromPayload({ ...basePayload, RebootReason: null }); + expect(rows).toHaveLength(1); + expect(rows[0].current_status).toBe('off'); + expect(rows[0].reboot_reason).toBeNull(); + }); + + it('should override current_status to on for reboot data', () => { + const rows = buildRowsFromPayload({ ...basePayload, CurrentStatus: 'off', RebootReason: '0x01' }); + expect(rows).toHaveLength(1); + expect(rows[0].current_status).toBe('on'); + expect(rows[0].reboot_reason).toBe('0x01'); + }); + + it('should keep empty optional fields as empty strings', () => { + const rows = buildRowsFromPayload({ + ...basePayload, + LauncherVersion: '', + RebootReason: '' + }); + expect(rows[0].launcher_version).toBe(''); + expect(rows[0].reboot_reason).toBe(''); + }); +}); diff --git a/docs/template/bls-onoffline-backend/vite.config.js b/docs/template/bls-onoffline-backend/vite.config.js new file mode 100644 index 0000000..54d63c4 --- /dev/null +++ b/docs/template/bls-onoffline-backend/vite.config.js @@ -0,0 +1,12 @@ +import { defineConfig } from 'vite'; + +export default defineConfig({ + build: { + ssr: 'src/index.js', + outDir: 'dist', + target: 'node18', + rollupOptions: { + external: ['dotenv', 'kafka-node', 'pg', 'redis'] + } + } +});