feat(mocks): mesages_all.json mock script
This commit is contained in:
256
mocks/scripts/ensureTimespan.mjs
Normal file
256
mocks/scripts/ensureTimespan.mjs
Normal file
@@ -0,0 +1,256 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Ensure a minimum time span for chart JSON files (DIA0..DIA2) under chartsData by backfilling
|
||||
* older entries up to N days (default 30) from today. Keeps original array order and structure.
|
||||
*
|
||||
* Rules:
|
||||
* - Only arrays of objects with a parsable 't' (YYYY-MM-DD HH:mm:ss) are processed.
|
||||
* - Determine array order (descending: newest->oldest vs ascending) from first two items.
|
||||
* - Compute coverage: newestDate to oldestDate. If coverage < days, append older items until
|
||||
* oldestDate <= today - days.
|
||||
* - For DIA2 (daily data), generate one item per missing day at 00:00:00, cloning oldest values.
|
||||
* - For DIA0/DIA1 (high-res), estimate interval from the last 5 deltas near the oldest tail
|
||||
* (median). If not computable, default to 15 minutes. Clone oldest item values for backfill.
|
||||
*
|
||||
* Usage:
|
||||
* node ./mocks/scripts/ensureTimespan.mjs [baseDir] [days] [maxItems]
|
||||
* - baseDir: defaults to mocks/device-cgi-simulator/chartsData
|
||||
* - days: defaults to 30
|
||||
* - maxItems: defaults to 4000
|
||||
*/
|
||||
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const workspaceRoot = path.resolve(__dirname, "../..");
|
||||
|
||||
const DIA_FILE_RE = /DIA[012]\.json$/i;
|
||||
|
||||
function pad(n) {
|
||||
return String(n).padStart(2, "0");
|
||||
}
|
||||
function formatDate(d) {
|
||||
return `${d.getFullYear()}-${pad(d.getMonth() + 1)}-${pad(d.getDate())} ${pad(
|
||||
d.getHours()
|
||||
)}:${pad(d.getMinutes())}:${pad(d.getSeconds())}`;
|
||||
}
|
||||
function parseDateTime(str) {
|
||||
const m =
|
||||
typeof str === "string" &&
|
||||
str.match(/(\d{4})-(\d{2})-(\d{2})\s+(\d{2}):(\d{2}):(\d{2})/);
|
||||
if (!m) return null;
|
||||
const [, y, mo, d, h, mi, s] = m.map(Number);
|
||||
return new Date(y, mo - 1, d, h, mi, s, 0);
|
||||
}
|
||||
function startOfDay(d) {
|
||||
const x = new Date(d);
|
||||
x.setHours(0, 0, 0, 0);
|
||||
return x;
|
||||
}
|
||||
|
||||
async function readJson(filePath) {
|
||||
return JSON.parse(await fs.readFile(filePath, "utf-8"));
|
||||
}
|
||||
async function writeJson(filePath, data) {
|
||||
await fs.writeFile(filePath, JSON.stringify(data, null, 2) + "\n", "utf-8");
|
||||
}
|
||||
|
||||
function detectOrder(dates) {
|
||||
if (dates.length < 2) return "desc";
|
||||
return dates[0] >= dates[1] ? "desc" : "asc";
|
||||
}
|
||||
|
||||
function fileTypeFromName(name) {
|
||||
if (/DIA2\.json$/i.test(name)) return "DIA2";
|
||||
if (/DIA1\.json$/i.test(name)) return "DIA1";
|
||||
return "DIA0";
|
||||
}
|
||||
|
||||
function median(arr) {
|
||||
if (!arr.length) return 0;
|
||||
const s = [...arr].sort((a, b) => a - b);
|
||||
const m = Math.floor(s.length / 2);
|
||||
return s.length % 2 ? s[m] : (s[m - 1] + s[m]) / 2;
|
||||
}
|
||||
|
||||
function estimateIntervalMs(dates) {
|
||||
// Use last 6 deltas near the oldest end to avoid using very recent irregularities
|
||||
if (dates.length < 2) return 15 * 60 * 1000;
|
||||
// Dates array is ordered by detectOrder; compute deltas as positive ms between neighbors in chronological direction
|
||||
const order = detectOrder(dates);
|
||||
const chron = order === "desc" ? [...dates].reverse() : [...dates]; // ascending by time
|
||||
const tail = chron.slice(0 - Math.min(7, chron.length));
|
||||
const deltas = [];
|
||||
for (let i = 1; i < tail.length; i++) {
|
||||
deltas.push(tail[i] - tail[i - 1]);
|
||||
}
|
||||
const med = median(deltas.filter((x) => x > 0));
|
||||
return med || 15 * 60 * 1000;
|
||||
}
|
||||
|
||||
function cloneWithNewT(obj, tStr) {
|
||||
const copy = { ...obj };
|
||||
copy.t = tStr;
|
||||
return copy;
|
||||
}
|
||||
|
||||
function capByOrder(arr, order, maxItems) {
|
||||
if (arr.length <= maxItems) return arr;
|
||||
return order === "desc"
|
||||
? arr.slice(0, maxItems)
|
||||
: arr.slice(Math.max(0, arr.length - maxItems));
|
||||
}
|
||||
|
||||
async function ensureTimespanForFile(filePath, days, maxItems) {
|
||||
let data;
|
||||
try {
|
||||
data = await readJson(filePath);
|
||||
} catch (e) {
|
||||
return { status: "error", message: e.message };
|
||||
}
|
||||
if (!Array.isArray(data) || data.length === 0)
|
||||
return { status: "skip", reason: "not an array or empty" };
|
||||
const name = path.basename(filePath);
|
||||
|
||||
// Extract dates for items that have t
|
||||
const dates = data.map((it) => parseDateTime(it?.t)).filter(Boolean);
|
||||
if (!dates.length) return { status: "skip", reason: "no parsable t fields" };
|
||||
|
||||
const order = detectOrder(dates);
|
||||
const oldest = new Date(Math.min(...dates.map((d) => d.getTime())));
|
||||
const targetOldest = (() => {
|
||||
const x = new Date();
|
||||
x.setHours(0, 0, 0, 0);
|
||||
x.setDate(x.getDate() - days);
|
||||
return x;
|
||||
})();
|
||||
|
||||
if (oldest <= targetOldest) return { status: "ok", changed: false };
|
||||
|
||||
const type = fileTypeFromName(name);
|
||||
|
||||
let updated = [...data];
|
||||
if (type === "DIA2") {
|
||||
// Daily backfill at 00:00:00
|
||||
// Identify oldest item template
|
||||
const idxOldest = dates.findIndex((d) => d.getTime() === oldest.getTime());
|
||||
const template = data[idxOldest] || data[data.length - 1];
|
||||
// For each day older than current oldest, append items until targetOldest
|
||||
let current = startOfDay(oldest);
|
||||
current.setDate(current.getDate() - 1);
|
||||
while (current > targetOldest) {
|
||||
const item = cloneWithNewT(template, formatDate(current));
|
||||
// Place at end if array is desc (newest first), else at start
|
||||
if (order === "desc") updated.push(item);
|
||||
else updated.unshift(item);
|
||||
current.setDate(current.getDate() - 1);
|
||||
}
|
||||
// Ensure we include exactly targetOldest
|
||||
const at = formatDate(targetOldest);
|
||||
const item = cloneWithNewT(template, at);
|
||||
if (order === "desc") updated.push(item);
|
||||
else updated.unshift(item);
|
||||
} else {
|
||||
// High-res backfill with estimated interval
|
||||
let interval = estimateIntervalMs(dates);
|
||||
// Clamp interval to fit desired days within maxItems if needed
|
||||
const daysMs = days * 24 * 60 * 60 * 1000;
|
||||
const minInterval = Math.ceil(daysMs / Math.max(1, maxItems));
|
||||
if (interval < minInterval) interval = minInterval;
|
||||
// Use oldest template
|
||||
const idxOldest = dates.findIndex((d) => d.getTime() === oldest.getTime());
|
||||
const template = data[idxOldest] || data[data.length - 1];
|
||||
let currentMs = oldest.getTime() - interval;
|
||||
while (currentMs > targetOldest.getTime()) {
|
||||
const item = cloneWithNewT(template, formatDate(new Date(currentMs)));
|
||||
if (order === "desc") updated.push(item);
|
||||
else updated.unshift(item);
|
||||
currentMs -= interval;
|
||||
}
|
||||
// Add boundary item exactly at targetOldest
|
||||
const boundary = cloneWithNewT(template, formatDate(targetOldest));
|
||||
if (order === "desc") updated.push(boundary);
|
||||
else updated.unshift(boundary);
|
||||
}
|
||||
|
||||
// Enforce cap
|
||||
const capped = capByOrder(updated, order, maxItems);
|
||||
|
||||
try {
|
||||
await writeJson(filePath, capped);
|
||||
return {
|
||||
status: "ok",
|
||||
changed: true,
|
||||
added: capped.length - data.length,
|
||||
truncated: Math.max(0, updated.length - capped.length),
|
||||
};
|
||||
} catch (e) {
|
||||
return { status: "error", message: e.message };
|
||||
}
|
||||
}
|
||||
|
||||
async function walk(dir, visitor) {
|
||||
const entries = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const p = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) await walk(p, visitor);
|
||||
else if (entry.isFile() && DIA_FILE_RE.test(entry.name)) await visitor(p);
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const baseArg = process.argv[2];
|
||||
const daysArg = process.argv[3];
|
||||
const baseDir = path.resolve(
|
||||
workspaceRoot,
|
||||
baseArg || "mocks/device-cgi-simulator/chartsData"
|
||||
);
|
||||
const maxArg = process.argv[4];
|
||||
const days = Number.isFinite(Number(daysArg)) ? Number(daysArg) : 30;
|
||||
const maxItems = Number.isFinite(Number(maxArg)) ? Number(maxArg) : 4000;
|
||||
|
||||
let ok = 0,
|
||||
changed = 0,
|
||||
skipped = 0,
|
||||
errors = 0,
|
||||
addedTotal = 0;
|
||||
const rel = (p) => path.relative(workspaceRoot, p) || p;
|
||||
|
||||
try {
|
||||
await walk(baseDir, async (file) => {
|
||||
const res = await ensureTimespanForFile(file, days, maxItems);
|
||||
if (res.status === "ok") {
|
||||
ok++;
|
||||
if (res.changed) {
|
||||
changed++;
|
||||
addedTotal += res.added || 0;
|
||||
const truncStr = res.truncated ? `, truncated ${res.truncated}` : "";
|
||||
console.log(
|
||||
`[ok] Backfilled ${rel(file)} (+${res.added || 0}${truncStr})`
|
||||
);
|
||||
} else console.log(`[ok] OK (no change) ${rel(file)}`);
|
||||
} else if (res.status === "skip") {
|
||||
skipped++;
|
||||
console.log(`[skip] ${rel(file)}: ${res.reason}`);
|
||||
} else {
|
||||
errors++;
|
||||
console.log(`[error] ${rel(file)}: ${res.message}`);
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
console.error("Failed to ensure timespan:", err);
|
||||
process.exitCode = 1;
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`\nDone. files=${
|
||||
ok + skipped + errors
|
||||
}, ok=${ok}, changed=${changed}, skipped=${skipped}, errors=${errors}, added=${addedTotal}`
|
||||
);
|
||||
}
|
||||
|
||||
await main();
|
||||
231
mocks/scripts/retimeMessages.mjs
Normal file
231
mocks/scripts/retimeMessages.mjs
Normal file
@@ -0,0 +1,231 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Retime messages JSON so the newest entry is today (same time-of-day),
|
||||
* then ensure at least N days of coverage and cap at M items by removing oldest.
|
||||
*
|
||||
* Usage:
|
||||
* node ./mocks/scripts/retimeMessages.mjs [filePath] [minDays=30] [maxItems=4000]
|
||||
* Default file: mocks/device-cgi-simulator/meldungen/messages_all.json
|
||||
*/
|
||||
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const workspaceRoot = path.resolve(__dirname, "../..");
|
||||
|
||||
function pad(n) {
|
||||
return String(n).padStart(2, "0");
|
||||
}
|
||||
function formatDate(d) {
|
||||
return `${d.getFullYear()}-${pad(d.getMonth() + 1)}-${pad(d.getDate())} ${pad(
|
||||
d.getHours()
|
||||
)}:${pad(d.getMinutes())}:${pad(d.getSeconds())}`;
|
||||
}
|
||||
function parseDateTime(str) {
|
||||
const m =
|
||||
typeof str === "string" &&
|
||||
str.match(/(\d{4})-(\d{2})-(\d{2})\s+(\d{2}):(\d{2}):(\d{2})/);
|
||||
if (!m) return null;
|
||||
const [, y, mo, d, h, mi, s] = m.map(Number);
|
||||
return new Date(y, mo - 1, d, h, mi, s, 0);
|
||||
}
|
||||
function withTodayDateAndTimeOf(baseTime) {
|
||||
const today = new Date();
|
||||
return new Date(
|
||||
today.getFullYear(),
|
||||
today.getMonth(),
|
||||
today.getDate(),
|
||||
baseTime.getHours(),
|
||||
baseTime.getMinutes(),
|
||||
baseTime.getSeconds(),
|
||||
0
|
||||
);
|
||||
}
|
||||
|
||||
async function readJson(fp) {
|
||||
return JSON.parse(await fs.readFile(fp, "utf-8"));
|
||||
}
|
||||
async function writeJson(fp, data) {
|
||||
await fs.writeFile(fp, JSON.stringify(data, null, 2) + "\n", "utf-8");
|
||||
}
|
||||
|
||||
function detectOrder(dates) {
|
||||
if (dates.length < 2) return "desc";
|
||||
return dates[0] >= dates[1] ? "desc" : "asc";
|
||||
}
|
||||
|
||||
async function retimeMessages(filePath) {
|
||||
const arr = await readJson(filePath);
|
||||
if (!Array.isArray(arr) || arr.length === 0)
|
||||
return { status: "skip", reason: "not an array or empty" };
|
||||
|
||||
const times = arr.map((x) => parseDateTime(x?.t));
|
||||
const validIdx = times.map((t, i) => (t ? i : -1)).filter((i) => i !== -1);
|
||||
if (!validIdx.length)
|
||||
return { status: "skip", reason: "no parsable t fields" };
|
||||
|
||||
const order = detectOrder(validIdx.slice(0, 2).map((i) => times[i]));
|
||||
const baseIdx =
|
||||
order === "desc" ? validIdx[0] : validIdx[validIdx.length - 1];
|
||||
const baseOrig = times[baseIdx];
|
||||
const baseNew = withTodayDateAndTimeOf(baseOrig);
|
||||
|
||||
const newTimes = new Array(arr.length).fill(null);
|
||||
newTimes[baseIdx] = baseNew;
|
||||
|
||||
if (order === "desc") {
|
||||
for (let k = 1; k < validIdx.length; k++) {
|
||||
const prev = validIdx[k - 1];
|
||||
const i = validIdx[k];
|
||||
const delta = Math.max(0, times[prev].getTime() - times[i].getTime());
|
||||
const prevNew = newTimes[prev] || baseNew;
|
||||
newTimes[i] = new Date(prevNew.getTime() - delta);
|
||||
}
|
||||
} else {
|
||||
// asc
|
||||
for (let k = validIdx.length - 2; k >= 0; k--) {
|
||||
const next = validIdx[k + 1]; // newer
|
||||
const i = validIdx[k]; // older
|
||||
const delta = Math.max(0, times[next].getTime() - times[i].getTime());
|
||||
const nextNew = newTimes[next] || baseNew;
|
||||
newTimes[i] = new Date(nextNew.getTime() - delta);
|
||||
}
|
||||
}
|
||||
|
||||
const updated = arr.map((item, idx) => {
|
||||
const nt = newTimes[idx];
|
||||
if (!nt) return item;
|
||||
return { ...item, t: formatDate(nt) };
|
||||
});
|
||||
|
||||
return { status: "ok", updated, order };
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const arg = process.argv[2];
|
||||
const daysArg = process.argv[3];
|
||||
const maxArg = process.argv[4];
|
||||
const filePath = path.resolve(
|
||||
workspaceRoot,
|
||||
arg || "mocks/device-cgi-simulator/meldungen/messages_all.json"
|
||||
);
|
||||
const minDays = Number.isFinite(Number(daysArg)) ? Number(daysArg) : 30;
|
||||
const maxItems = Number.isFinite(Number(maxArg)) ? Number(maxArg) : 4000;
|
||||
|
||||
try {
|
||||
const res = await retimeMessages(filePath);
|
||||
if (res.status !== "ok") {
|
||||
console.log(
|
||||
`[skip] ${path.relative(workspaceRoot, filePath)}: ${res.reason}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let { updated, order } = res;
|
||||
|
||||
// Re-parse times after retime
|
||||
const times = updated.map((x) => parseDateTime(x?.t));
|
||||
const validIdx = times.map((t, i) => (t ? i : -1)).filter((i) => i !== -1);
|
||||
if (!validIdx.length) {
|
||||
await writeJson(filePath, updated);
|
||||
console.log(
|
||||
`[ok] Updated ${path.relative(workspaceRoot, filePath)} (${
|
||||
updated.length
|
||||
} items)`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine newest and oldest
|
||||
let newestIdx;
|
||||
if (order === "desc") {
|
||||
// first valid is newest
|
||||
newestIdx = validIdx[0];
|
||||
} else {
|
||||
newestIdx = validIdx[validIdx.length - 1];
|
||||
}
|
||||
const newestTime = times[newestIdx];
|
||||
let oldestIdx = validIdx[0];
|
||||
let oldestTime = times[oldestIdx];
|
||||
for (const i of validIdx) {
|
||||
if (times[i].getTime() < oldestTime.getTime()) {
|
||||
oldestTime = times[i];
|
||||
oldestIdx = i;
|
||||
}
|
||||
}
|
||||
|
||||
const dayMs = 24 * 60 * 60 * 1000;
|
||||
const targetOldestMs = newestTime.getTime() - minDays * dayMs + 1000;
|
||||
|
||||
// Build intervals list (positive sequential deltas in array order)
|
||||
const deltas = [];
|
||||
for (let k = 1; k < validIdx.length; k++) {
|
||||
const a = times[validIdx[k - 1]];
|
||||
const b = times[validIdx[k]];
|
||||
const d = Math.abs(a.getTime() - b.getTime());
|
||||
if (d > 0) deltas.push(d);
|
||||
}
|
||||
deltas.sort((x, y) => x - y);
|
||||
const median = deltas.length
|
||||
? deltas[Math.floor(deltas.length / 2)]
|
||||
: 15 * 60 * 1000; // fallback 15 minutes
|
||||
const interval = Math.max(1000, Math.min(median, 7 * 24 * 60 * 60 * 1000));
|
||||
|
||||
let added = 0;
|
||||
if (oldestTime.getTime() > targetOldestMs) {
|
||||
// Need backfill towards older end
|
||||
const template = updated[oldestIdx];
|
||||
const clones = [];
|
||||
let t = new Date(oldestTime.getTime());
|
||||
while (
|
||||
t.getTime() > targetOldestMs &&
|
||||
updated.length + clones.length < maxItems
|
||||
) {
|
||||
t = new Date(t.getTime() - interval);
|
||||
const clone = { ...template, t: formatDate(t) };
|
||||
clones.push(clone);
|
||||
}
|
||||
if (clones.length) {
|
||||
if (order === "desc") {
|
||||
// Older items at the end
|
||||
updated = updated.concat(clones);
|
||||
} else {
|
||||
// Older items at the beginning
|
||||
updated = clones.concat(updated);
|
||||
}
|
||||
added = clones.length;
|
||||
}
|
||||
}
|
||||
|
||||
// Cap to maxItems by removing oldest side
|
||||
let trimmed = 0;
|
||||
if (updated.length > maxItems) {
|
||||
const removeCount = updated.length - maxItems;
|
||||
if (order === "desc") {
|
||||
// remove from end (oldest)
|
||||
updated = updated.slice(0, updated.length - removeCount);
|
||||
} else {
|
||||
// remove from start (oldest)
|
||||
updated = updated.slice(removeCount);
|
||||
}
|
||||
trimmed = removeCount;
|
||||
}
|
||||
|
||||
await writeJson(filePath, updated);
|
||||
const rel = path.relative(workspaceRoot, filePath);
|
||||
console.log(
|
||||
`[ok] Updated ${rel} (${updated.length} items, +${added} added, -${trimmed} trimmed, minDays=${minDays}, maxItems=${maxItems})`
|
||||
);
|
||||
} catch (e) {
|
||||
console.error(
|
||||
`[error] ${path.relative(workspaceRoot, filePath)}:`,
|
||||
e.message
|
||||
);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
}
|
||||
|
||||
await main();
|
||||
Reference in New Issue
Block a user