feat: persistent run history logs for searcher and filter
- search_runs.json: append-only history of every searcher run (started_at, finished, added, seen, platforms, lookback_days) - search_progress_last.json: snapshot of final progress state after each completed run — answers 'what keywords/tracks were searched?' - filter_runs.json: append-only history of every filter batch (batch_id, submitted/collected timestamps, model, passed/filtered/errors) Fixes the 'did the 90-day run complete?' ambiguity going forward
This commit is contained in:
@@ -133,6 +133,21 @@ async function collect(state, settings) {
|
||||
|
||||
clearState();
|
||||
|
||||
// Append to filter run history
|
||||
const runsPath = resolve(__dir, 'data/filter_runs.json');
|
||||
const runs = existsSync(runsPath) ? JSON.parse(readFileSync(runsPath, 'utf8')) : [];
|
||||
runs.push({
|
||||
batch_id: state.batch_id,
|
||||
submitted_at: state.submitted_at,
|
||||
collected_at: new Date().toISOString(),
|
||||
job_count: state.job_count,
|
||||
model: state.model,
|
||||
passed,
|
||||
filtered,
|
||||
errors,
|
||||
});
|
||||
writeFileSync(runsPath, JSON.stringify(runs, null, 2));
|
||||
|
||||
const summary = `✅ Filter complete — ${passed} passed, ${filtered} filtered, ${errors} errors`;
|
||||
console.log(`\n${summary}`);
|
||||
|
||||
@@ -182,11 +197,13 @@ async function submit(settings, searchConfig, candidateProfile) {
|
||||
|
||||
const batchId = await submitBatch(filterable, jobProfilesByTrack, searchConfig, candidateProfile, model, apiKey);
|
||||
|
||||
const submittedAt = new Date().toISOString();
|
||||
writeState({
|
||||
batch_id: batchId,
|
||||
submitted_at: new Date().toISOString(),
|
||||
submitted_at: submittedAt,
|
||||
job_count: filterable.length,
|
||||
model,
|
||||
tracks: Object.keys(jobProfilesByTrack),
|
||||
});
|
||||
|
||||
console.log(` Batch submitted: ${batchId}`);
|
||||
|
||||
@@ -34,7 +34,7 @@ async function main() {
|
||||
const startedAt = Date.now();
|
||||
|
||||
const writeLastRun = (finished = false) => {
|
||||
writeFileSync(resolve(__dir, 'data/searcher_last_run.json'), JSON.stringify({
|
||||
const entry = {
|
||||
started_at: startedAt,
|
||||
finished_at: finished ? Date.now() : null,
|
||||
finished,
|
||||
@@ -42,7 +42,19 @@ async function main() {
|
||||
seen: totalSeen,
|
||||
skipped_dupes: totalSeen - totalAdded,
|
||||
platforms: platformsRun,
|
||||
}, null, 2));
|
||||
};
|
||||
// Always update last-run snapshot
|
||||
writeFileSync(resolve(__dir, 'data/searcher_last_run.json'), JSON.stringify(entry, null, 2));
|
||||
// Append to run history log
|
||||
const runsPath = resolve(__dir, 'data/search_runs.json');
|
||||
const runs = existsSync(runsPath) ? JSON.parse(readFileSync(runsPath, 'utf8')) : [];
|
||||
// Update last entry if same run, otherwise append
|
||||
if (runs.length > 0 && runs[runs.length - 1].started_at === startedAt) {
|
||||
runs[runs.length - 1] = entry;
|
||||
} else {
|
||||
runs.push(entry);
|
||||
}
|
||||
writeFileSync(runsPath, JSON.stringify(runs, null, 2));
|
||||
};
|
||||
|
||||
lock.onShutdown(() => {
|
||||
@@ -195,6 +207,11 @@ async function main() {
|
||||
if (totalAdded > 0) await sendTelegram(settings, summary);
|
||||
|
||||
writeLastRun(true);
|
||||
// Archive final progress snapshot before clearing (for audit — answers "what was searched?")
|
||||
const progressPath = resolve(__dir, 'data/search_progress.json');
|
||||
if (existsSync(progressPath)) {
|
||||
writeFileSync(resolve(__dir, 'data/search_progress_last.json'), readFileSync(progressPath, 'utf8'));
|
||||
}
|
||||
clearProgress(); // run finished cleanly — next run starts fresh with new keywords
|
||||
|
||||
console.log('\n✅ Search complete');
|
||||
|
||||
Reference in New Issue
Block a user