- 新增Redis集成模块,支持心跳写入与控制台日志队列 - 优化Kafka消费者实现,支持多实例与自动重连 - 改进消息处理器,支持批量处理与多层解码 - 更新数据库表结构,调整字段类型与约束 - 添加Redis与Kafka的配置项和环境变量支持 - 补充测试用例和文档说明
108 lines
3.2 KiB
JavaScript
108 lines
3.2 KiB
JavaScript
import fs from 'node:fs/promises';
|
|
import path from 'node:path';
|
|
import { fileURLToPath } from 'node:url';
|
|
import { Client } from 'pg';
|
|
|
|
import config from '../../src/config/config.js';
|
|
|
|
const __filename = fileURLToPath(import.meta.url);
|
|
const __dirname = path.dirname(__filename);
|
|
|
|
function getEnv(name, fallback) {
|
|
return process.env[name] ?? fallback;
|
|
}
|
|
|
|
function buildClientConfig(database) {
|
|
const db = config.db;
|
|
return {
|
|
host: getEnv('POSTGRES_HOST', getEnv('PGHOST', db.host)),
|
|
port: Number(getEnv('POSTGRES_PORT', getEnv('PGPORT', db.port))),
|
|
user: getEnv('POSTGRES_USER', getEnv('PGUSER', db.user)),
|
|
password: getEnv('POSTGRES_PASSWORD', getEnv('PGPASSWORD', db.password)),
|
|
database,
|
|
};
|
|
}
|
|
|
|
async function runSqlFile(client, filePath) {
|
|
const sql = await fs.readFile(filePath, 'utf8');
|
|
const trimmed = sql.trim();
|
|
if (!trimmed) return;
|
|
await client.query(sql);
|
|
}
|
|
|
|
async function main() {
|
|
const scriptsDir = __dirname;
|
|
|
|
const schemaFile = path.join(scriptsDir, '010_heartbeat_schema.sql');
|
|
const partitionFile = path.join(scriptsDir, '020_partitioning_auto_daily.sql');
|
|
|
|
const targetDb = getEnv('POSTGRES_DATABASE', getEnv('PGTARGETDB', config.db.database));
|
|
|
|
console.log(`[db] Connecting to target db: ${targetDb}`);
|
|
const targetClient = new Client(buildClientConfig(targetDb));
|
|
await targetClient.connect();
|
|
|
|
try {
|
|
const dbMeta = await targetClient.query(
|
|
`SELECT
|
|
current_database() AS db,
|
|
pg_encoding_to_char(encoding) AS encoding,
|
|
datcollate,
|
|
datctype,
|
|
datlocprovider
|
|
FROM pg_database
|
|
WHERE datname = current_database()`
|
|
);
|
|
if (dbMeta.rowCount === 1) {
|
|
const m = dbMeta.rows[0];
|
|
console.log(
|
|
`[db] ${m.db} meta: encoding=${m.encoding} collate=${m.datcollate} ctype=${m.datctype} provider=${m.datlocprovider}`
|
|
);
|
|
if (String(m.encoding).toUpperCase() !== 'UTF8') {
|
|
console.warn(`[db] WARN: ${m.db} encoding is not UTF8`);
|
|
}
|
|
const coll = String(m.datcollate ?? '').toLowerCase();
|
|
if (coll && !coll.includes('zh') && !coll.includes('chinese')) {
|
|
console.warn(
|
|
`[db] WARN: ${m.db} collation is not obviously Chinese; if required, use ICU collation per-column or rebuild DB with zh locale`
|
|
);
|
|
}
|
|
}
|
|
|
|
console.log(`[db] Applying: ${path.basename(schemaFile)}`);
|
|
await runSqlFile(targetClient, schemaFile);
|
|
|
|
console.log(`[db] Applying: ${path.basename(partitionFile)}`);
|
|
await runSqlFile(targetClient, partitionFile);
|
|
|
|
const tableCheck = await targetClient.query(
|
|
"SELECT to_regclass('heartbeat.heartbeat_events') AS reg"
|
|
);
|
|
if (!tableCheck.rows?.[0]?.reg) {
|
|
throw new Error('heartbeat.heartbeat_events was not created');
|
|
}
|
|
|
|
const indexCheck = await targetClient.query(
|
|
`SELECT indexname
|
|
FROM pg_indexes
|
|
WHERE schemaname = 'heartbeat'
|
|
AND tablename = 'heartbeat_events'
|
|
ORDER BY indexname`
|
|
);
|
|
|
|
console.log('[db] Parent table indexes:');
|
|
for (const row of indexCheck.rows) {
|
|
console.log(` - ${row.indexname}`);
|
|
}
|
|
|
|
console.log('[db] Done');
|
|
} finally {
|
|
await targetClient.end();
|
|
}
|
|
}
|
|
|
|
main().catch((err) => {
|
|
console.error('[db] Failed:', err);
|
|
process.exit(1);
|
|
});
|