feat: 升级心跳数据库为高吞吐日分区模型(v2)
- 新增 heartbeat 数据库与表结构文档,描述心跳明细表设计及字段约束。 - 新增 OpenSpec 符合性说明文档,指出与规范的一致点及偏差。 - 新增 Kafka 心跳数据推送说明文档,定义消息格式与推送方式。 - 更新数据库创建脚本,支持 UTF-8 编码与中文排序规则。 - 更新心跳表结构脚本,定义主表及索引,采用 ts_ms 日分区。 - 实现自动分区机制,确保按天创建分区以支持高吞吐写入。 - 添加数据库应用脚本,自动执行 SQL 文件并验证表结构。 - 添加运行时烟雾测试脚本,验证数据库连接与基本操作。 - 添加完整的烟雾测试脚本,验证数据插入与分区创建。
This commit is contained in:
107
scripts/db/apply.js
Normal file
107
scripts/db/apply.js
Normal file
@@ -0,0 +1,107 @@
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { Client } from 'pg';
|
||||
|
||||
import config from '../../src/config/config.js';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
function getEnv(name, fallback) {
|
||||
return process.env[name] ?? fallback;
|
||||
}
|
||||
|
||||
function buildClientConfig(database) {
|
||||
const db = config.db;
|
||||
return {
|
||||
host: getEnv('PGHOST', db.host),
|
||||
port: Number(getEnv('PGPORT', db.port)),
|
||||
user: getEnv('PGUSER', db.user),
|
||||
password: getEnv('PGPASSWORD', db.password),
|
||||
database,
|
||||
};
|
||||
}
|
||||
|
||||
async function runSqlFile(client, filePath) {
|
||||
const sql = await fs.readFile(filePath, 'utf8');
|
||||
const trimmed = sql.trim();
|
||||
if (!trimmed) return;
|
||||
await client.query(sql);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const scriptsDir = __dirname;
|
||||
|
||||
const schemaFile = path.join(scriptsDir, '010_heartbeat_schema.sql');
|
||||
const partitionFile = path.join(scriptsDir, '020_partitioning_auto_daily.sql');
|
||||
|
||||
const targetDb = getEnv('PGTARGETDB', config.db.database);
|
||||
|
||||
console.log(`[db] Connecting to target db: ${targetDb}`);
|
||||
const targetClient = new Client(buildClientConfig(targetDb));
|
||||
await targetClient.connect();
|
||||
|
||||
try {
|
||||
const dbMeta = await targetClient.query(
|
||||
`SELECT
|
||||
current_database() AS db,
|
||||
pg_encoding_to_char(encoding) AS encoding,
|
||||
datcollate,
|
||||
datctype,
|
||||
datlocprovider
|
||||
FROM pg_database
|
||||
WHERE datname = current_database()`
|
||||
);
|
||||
if (dbMeta.rowCount === 1) {
|
||||
const m = dbMeta.rows[0];
|
||||
console.log(
|
||||
`[db] ${m.db} meta: encoding=${m.encoding} collate=${m.datcollate} ctype=${m.datctype} provider=${m.datlocprovider}`
|
||||
);
|
||||
if (String(m.encoding).toUpperCase() !== 'UTF8') {
|
||||
console.warn(`[db] WARN: ${m.db} encoding is not UTF8`);
|
||||
}
|
||||
const coll = String(m.datcollate ?? '').toLowerCase();
|
||||
if (coll && !coll.includes('zh') && !coll.includes('chinese')) {
|
||||
console.warn(
|
||||
`[db] WARN: ${m.db} collation is not obviously Chinese; if required, use ICU collation per-column or rebuild DB with zh locale`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`[db] Applying: ${path.basename(schemaFile)}`);
|
||||
await runSqlFile(targetClient, schemaFile);
|
||||
|
||||
console.log(`[db] Applying: ${path.basename(partitionFile)}`);
|
||||
await runSqlFile(targetClient, partitionFile);
|
||||
|
||||
const tableCheck = await targetClient.query(
|
||||
"SELECT to_regclass('heartbeat.heartbeat_events') AS reg"
|
||||
);
|
||||
if (!tableCheck.rows?.[0]?.reg) {
|
||||
throw new Error('heartbeat.heartbeat_events was not created');
|
||||
}
|
||||
|
||||
const indexCheck = await targetClient.query(
|
||||
`SELECT indexname
|
||||
FROM pg_indexes
|
||||
WHERE schemaname = 'heartbeat'
|
||||
AND tablename = 'heartbeat_events'
|
||||
ORDER BY indexname`
|
||||
);
|
||||
|
||||
console.log('[db] Parent table indexes:');
|
||||
for (const row of indexCheck.rows) {
|
||||
console.log(` - ${row.indexname}`);
|
||||
}
|
||||
|
||||
console.log('[db] Done');
|
||||
} finally {
|
||||
await targetClient.end();
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error('[db] Failed:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
Reference in New Issue
Block a user