feat(mocks): mesages_all.json mock script
This commit is contained in:
256
mocks/scripts/ensureTimespan.mjs
Normal file
256
mocks/scripts/ensureTimespan.mjs
Normal file
@@ -0,0 +1,256 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Ensure a minimum time span for chart JSON files (DIA0..DIA2) under chartsData by backfilling
|
||||
* older entries up to N days (default 30) from today. Keeps original array order and structure.
|
||||
*
|
||||
* Rules:
|
||||
* - Only arrays of objects with a parsable 't' (YYYY-MM-DD HH:mm:ss) are processed.
|
||||
* - Determine array order (descending: newest->oldest vs ascending) from first two items.
|
||||
* - Compute coverage: newestDate to oldestDate. If coverage < days, append older items until
|
||||
* oldestDate <= today - days.
|
||||
* - For DIA2 (daily data), generate one item per missing day at 00:00:00, cloning oldest values.
|
||||
* - For DIA0/DIA1 (high-res), estimate interval from the last 5 deltas near the oldest tail
|
||||
* (median). If not computable, default to 15 minutes. Clone oldest item values for backfill.
|
||||
*
|
||||
* Usage:
|
||||
* node ./mocks/scripts/ensureTimespan.mjs [baseDir] [days] [maxItems]
|
||||
* - baseDir: defaults to mocks/device-cgi-simulator/chartsData
|
||||
* - days: defaults to 30
|
||||
* - maxItems: defaults to 4000
|
||||
*/
|
||||
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const workspaceRoot = path.resolve(__dirname, "../..");
|
||||
|
||||
const DIA_FILE_RE = /DIA[012]\.json$/i;
|
||||
|
||||
function pad(n) {
|
||||
return String(n).padStart(2, "0");
|
||||
}
|
||||
function formatDate(d) {
|
||||
return `${d.getFullYear()}-${pad(d.getMonth() + 1)}-${pad(d.getDate())} ${pad(
|
||||
d.getHours()
|
||||
)}:${pad(d.getMinutes())}:${pad(d.getSeconds())}`;
|
||||
}
|
||||
function parseDateTime(str) {
|
||||
const m =
|
||||
typeof str === "string" &&
|
||||
str.match(/(\d{4})-(\d{2})-(\d{2})\s+(\d{2}):(\d{2}):(\d{2})/);
|
||||
if (!m) return null;
|
||||
const [, y, mo, d, h, mi, s] = m.map(Number);
|
||||
return new Date(y, mo - 1, d, h, mi, s, 0);
|
||||
}
|
||||
function startOfDay(d) {
|
||||
const x = new Date(d);
|
||||
x.setHours(0, 0, 0, 0);
|
||||
return x;
|
||||
}
|
||||
|
||||
async function readJson(filePath) {
|
||||
return JSON.parse(await fs.readFile(filePath, "utf-8"));
|
||||
}
|
||||
async function writeJson(filePath, data) {
|
||||
await fs.writeFile(filePath, JSON.stringify(data, null, 2) + "\n", "utf-8");
|
||||
}
|
||||
|
||||
function detectOrder(dates) {
|
||||
if (dates.length < 2) return "desc";
|
||||
return dates[0] >= dates[1] ? "desc" : "asc";
|
||||
}
|
||||
|
||||
function fileTypeFromName(name) {
|
||||
if (/DIA2\.json$/i.test(name)) return "DIA2";
|
||||
if (/DIA1\.json$/i.test(name)) return "DIA1";
|
||||
return "DIA0";
|
||||
}
|
||||
|
||||
function median(arr) {
|
||||
if (!arr.length) return 0;
|
||||
const s = [...arr].sort((a, b) => a - b);
|
||||
const m = Math.floor(s.length / 2);
|
||||
return s.length % 2 ? s[m] : (s[m - 1] + s[m]) / 2;
|
||||
}
|
||||
|
||||
function estimateIntervalMs(dates) {
|
||||
// Use last 6 deltas near the oldest end to avoid using very recent irregularities
|
||||
if (dates.length < 2) return 15 * 60 * 1000;
|
||||
// Dates array is ordered by detectOrder; compute deltas as positive ms between neighbors in chronological direction
|
||||
const order = detectOrder(dates);
|
||||
const chron = order === "desc" ? [...dates].reverse() : [...dates]; // ascending by time
|
||||
const tail = chron.slice(0 - Math.min(7, chron.length));
|
||||
const deltas = [];
|
||||
for (let i = 1; i < tail.length; i++) {
|
||||
deltas.push(tail[i] - tail[i - 1]);
|
||||
}
|
||||
const med = median(deltas.filter((x) => x > 0));
|
||||
return med || 15 * 60 * 1000;
|
||||
}
|
||||
|
||||
function cloneWithNewT(obj, tStr) {
|
||||
const copy = { ...obj };
|
||||
copy.t = tStr;
|
||||
return copy;
|
||||
}
|
||||
|
||||
function capByOrder(arr, order, maxItems) {
|
||||
if (arr.length <= maxItems) return arr;
|
||||
return order === "desc"
|
||||
? arr.slice(0, maxItems)
|
||||
: arr.slice(Math.max(0, arr.length - maxItems));
|
||||
}
|
||||
|
||||
async function ensureTimespanForFile(filePath, days, maxItems) {
|
||||
let data;
|
||||
try {
|
||||
data = await readJson(filePath);
|
||||
} catch (e) {
|
||||
return { status: "error", message: e.message };
|
||||
}
|
||||
if (!Array.isArray(data) || data.length === 0)
|
||||
return { status: "skip", reason: "not an array or empty" };
|
||||
const name = path.basename(filePath);
|
||||
|
||||
// Extract dates for items that have t
|
||||
const dates = data.map((it) => parseDateTime(it?.t)).filter(Boolean);
|
||||
if (!dates.length) return { status: "skip", reason: "no parsable t fields" };
|
||||
|
||||
const order = detectOrder(dates);
|
||||
const oldest = new Date(Math.min(...dates.map((d) => d.getTime())));
|
||||
const targetOldest = (() => {
|
||||
const x = new Date();
|
||||
x.setHours(0, 0, 0, 0);
|
||||
x.setDate(x.getDate() - days);
|
||||
return x;
|
||||
})();
|
||||
|
||||
if (oldest <= targetOldest) return { status: "ok", changed: false };
|
||||
|
||||
const type = fileTypeFromName(name);
|
||||
|
||||
let updated = [...data];
|
||||
if (type === "DIA2") {
|
||||
// Daily backfill at 00:00:00
|
||||
// Identify oldest item template
|
||||
const idxOldest = dates.findIndex((d) => d.getTime() === oldest.getTime());
|
||||
const template = data[idxOldest] || data[data.length - 1];
|
||||
// For each day older than current oldest, append items until targetOldest
|
||||
let current = startOfDay(oldest);
|
||||
current.setDate(current.getDate() - 1);
|
||||
while (current > targetOldest) {
|
||||
const item = cloneWithNewT(template, formatDate(current));
|
||||
// Place at end if array is desc (newest first), else at start
|
||||
if (order === "desc") updated.push(item);
|
||||
else updated.unshift(item);
|
||||
current.setDate(current.getDate() - 1);
|
||||
}
|
||||
// Ensure we include exactly targetOldest
|
||||
const at = formatDate(targetOldest);
|
||||
const item = cloneWithNewT(template, at);
|
||||
if (order === "desc") updated.push(item);
|
||||
else updated.unshift(item);
|
||||
} else {
|
||||
// High-res backfill with estimated interval
|
||||
let interval = estimateIntervalMs(dates);
|
||||
// Clamp interval to fit desired days within maxItems if needed
|
||||
const daysMs = days * 24 * 60 * 60 * 1000;
|
||||
const minInterval = Math.ceil(daysMs / Math.max(1, maxItems));
|
||||
if (interval < minInterval) interval = minInterval;
|
||||
// Use oldest template
|
||||
const idxOldest = dates.findIndex((d) => d.getTime() === oldest.getTime());
|
||||
const template = data[idxOldest] || data[data.length - 1];
|
||||
let currentMs = oldest.getTime() - interval;
|
||||
while (currentMs > targetOldest.getTime()) {
|
||||
const item = cloneWithNewT(template, formatDate(new Date(currentMs)));
|
||||
if (order === "desc") updated.push(item);
|
||||
else updated.unshift(item);
|
||||
currentMs -= interval;
|
||||
}
|
||||
// Add boundary item exactly at targetOldest
|
||||
const boundary = cloneWithNewT(template, formatDate(targetOldest));
|
||||
if (order === "desc") updated.push(boundary);
|
||||
else updated.unshift(boundary);
|
||||
}
|
||||
|
||||
// Enforce cap
|
||||
const capped = capByOrder(updated, order, maxItems);
|
||||
|
||||
try {
|
||||
await writeJson(filePath, capped);
|
||||
return {
|
||||
status: "ok",
|
||||
changed: true,
|
||||
added: capped.length - data.length,
|
||||
truncated: Math.max(0, updated.length - capped.length),
|
||||
};
|
||||
} catch (e) {
|
||||
return { status: "error", message: e.message };
|
||||
}
|
||||
}
|
||||
|
||||
async function walk(dir, visitor) {
|
||||
const entries = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const p = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) await walk(p, visitor);
|
||||
else if (entry.isFile() && DIA_FILE_RE.test(entry.name)) await visitor(p);
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const baseArg = process.argv[2];
|
||||
const daysArg = process.argv[3];
|
||||
const baseDir = path.resolve(
|
||||
workspaceRoot,
|
||||
baseArg || "mocks/device-cgi-simulator/chartsData"
|
||||
);
|
||||
const maxArg = process.argv[4];
|
||||
const days = Number.isFinite(Number(daysArg)) ? Number(daysArg) : 30;
|
||||
const maxItems = Number.isFinite(Number(maxArg)) ? Number(maxArg) : 4000;
|
||||
|
||||
let ok = 0,
|
||||
changed = 0,
|
||||
skipped = 0,
|
||||
errors = 0,
|
||||
addedTotal = 0;
|
||||
const rel = (p) => path.relative(workspaceRoot, p) || p;
|
||||
|
||||
try {
|
||||
await walk(baseDir, async (file) => {
|
||||
const res = await ensureTimespanForFile(file, days, maxItems);
|
||||
if (res.status === "ok") {
|
||||
ok++;
|
||||
if (res.changed) {
|
||||
changed++;
|
||||
addedTotal += res.added || 0;
|
||||
const truncStr = res.truncated ? `, truncated ${res.truncated}` : "";
|
||||
console.log(
|
||||
`[ok] Backfilled ${rel(file)} (+${res.added || 0}${truncStr})`
|
||||
);
|
||||
} else console.log(`[ok] OK (no change) ${rel(file)}`);
|
||||
} else if (res.status === "skip") {
|
||||
skipped++;
|
||||
console.log(`[skip] ${rel(file)}: ${res.reason}`);
|
||||
} else {
|
||||
errors++;
|
||||
console.log(`[error] ${rel(file)}: ${res.message}`);
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
console.error("Failed to ensure timespan:", err);
|
||||
process.exitCode = 1;
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`\nDone. files=${
|
||||
ok + skipped + errors
|
||||
}, ok=${ok}, changed=${changed}, skipped=${skipped}, errors=${errors}, added=${addedTotal}`
|
||||
);
|
||||
}
|
||||
|
||||
await main();
|
||||
Reference in New Issue
Block a user