Compare commits
13 Commits
main
...
standalone
| Author | SHA1 | Date | |
|---|---|---|---|
| c8357bb358 | |||
| 9b0aa7bf77 | |||
| a7d8aa84d3 | |||
| 7e1bce924e | |||
| ac05c54c06 | |||
| 441d52e084 | |||
| f0877932f6 | |||
| 1f95c5c70b | |||
| f14af48905 | |||
| 377437cf3c | |||
| 3ecabeea63 | |||
| ba5cbedcf4 | |||
| da95350733 |
@@ -26,6 +26,7 @@ import { createBrowser } from './lib/browser.mjs';
|
|||||||
import { ensureAuth } from './lib/session.mjs';
|
import { ensureAuth } from './lib/session.mjs';
|
||||||
import { FormFiller } from './lib/form_filler.mjs';
|
import { FormFiller } from './lib/form_filler.mjs';
|
||||||
import { applyToJob, supportedTypes } from './lib/apply/index.mjs';
|
import { applyToJob, supportedTypes } from './lib/apply/index.mjs';
|
||||||
|
import { buildTrackProfiles, getTrackProfile } from './lib/profile.mjs';
|
||||||
import { sendTelegram, formatApplySummary } from './lib/notify.mjs';
|
import { sendTelegram, formatApplySummary } from './lib/notify.mjs';
|
||||||
import { processTelegramReplies } from './lib/telegram_answers.mjs';
|
import { processTelegramReplies } from './lib/telegram_answers.mjs';
|
||||||
import { generateAnswer } from './lib/ai_answer.mjs';
|
import { generateAnswer } from './lib/ai_answer.mjs';
|
||||||
@@ -43,22 +44,28 @@ const isPreview = process.argv.includes('--preview');
|
|||||||
async function main() {
|
async function main() {
|
||||||
const lock = acquireLock('applier', resolve(__dir, 'data'));
|
const lock = acquireLock('applier', resolve(__dir, 'data'));
|
||||||
|
|
||||||
const settings = loadConfig(resolve(__dir, 'config/settings.json'));
|
const settings = await loadConfig(resolve(__dir, 'config/settings.json'));
|
||||||
await initQueue(settings);
|
await initQueue(settings);
|
||||||
const profile = loadConfig(resolve(__dir, 'config/profile.json'));
|
const baseProfile = await loadConfig(resolve(__dir, 'config/profile.json'));
|
||||||
|
const searchConfig = await loadConfig(resolve(__dir, 'config/search_config.json'));
|
||||||
|
const profilesByTrack = await buildTrackProfiles(baseProfile, searchConfig.searches || []);
|
||||||
|
|
||||||
// Ensure resume is available locally (downloads from S3 if needed)
|
// Ensure resume is available locally for each track profile
|
||||||
if (profile.resume_path) {
|
for (const prof of Object.values(profilesByTrack)) {
|
||||||
profile.resume_path = await ensureLocalFile('config/Matthew_Jackson_Resume.pdf', profile.resume_path);
|
if (prof.resume_path) {
|
||||||
|
const s3Key = prof.resume_path.startsWith('/') ? prof.resume_path.split('/').pop() : prof.resume_path;
|
||||||
|
prof.resume_path = await ensureLocalFile(`config/${s3Key}`, prof.resume_path);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const answersPath = resolve(__dir, 'config/answers.json');
|
const answersPath = resolve(__dir, 'config/answers.json');
|
||||||
const answers = existsSync(answersPath) ? loadConfig(answersPath) : [];
|
const answers = await loadConfig(answersPath).catch(() => []);
|
||||||
const maxApps = settings.max_applications_per_run || Infinity;
|
const maxApps = settings.max_applications_per_run || Infinity;
|
||||||
const maxRetries = settings.max_retries ?? DEFAULT_MAX_RETRIES;
|
const maxRetries = settings.max_retries ?? DEFAULT_MAX_RETRIES;
|
||||||
const enabledTypes = settings.enabled_apply_types || DEFAULT_ENABLED_APPLY_TYPES;
|
const enabledTypes = settings.enabled_apply_types || DEFAULT_ENABLED_APPLY_TYPES;
|
||||||
const apiKey = process.env.ANTHROPIC_API_KEY || settings.anthropic_api_key;
|
const apiKey = process.env.ANTHROPIC_API_KEY || settings.anthropic_api_key;
|
||||||
const formFiller = new FormFiller(profile, answers, { apiKey, answersPath });
|
// Default FormFiller uses base profile — swapped per job below
|
||||||
|
const formFiller = new FormFiller(baseProfile, answers, { apiKey, answersPath });
|
||||||
|
|
||||||
const startedAt = Date.now();
|
const startedAt = Date.now();
|
||||||
const rateLimitPath = resolve(__dir, 'data/linkedin_rate_limited_at.json');
|
const rateLimitPath = resolve(__dir, 'data/linkedin_rate_limited_at.json');
|
||||||
@@ -213,12 +220,14 @@ async function main() {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set job context for AI answers
|
// Swap profile for this job's track (different resume/cover letter per track)
|
||||||
|
const jobProfile = getTrackProfile(profilesByTrack, job.track);
|
||||||
|
formFiller.profile = jobProfile;
|
||||||
formFiller.jobContext = { title: job.title, company: job.company };
|
formFiller.jobContext = { title: job.title, company: job.company };
|
||||||
|
|
||||||
// Reload answers.json before each job — picks up Telegram replies between jobs
|
// Reload answers.json before each job — picks up Telegram replies between jobs
|
||||||
try {
|
try {
|
||||||
const freshAnswers = existsSync(answersPath) ? loadConfig(answersPath) : [];
|
const freshAnswers = await loadConfig(answersPath).catch(() => []);
|
||||||
formFiller.answers = freshAnswers;
|
formFiller.answers = freshAnswers;
|
||||||
} catch { /* keep existing answers on read error */ }
|
} catch { /* keep existing answers on read error */ }
|
||||||
|
|
||||||
@@ -243,7 +252,7 @@ async function main() {
|
|||||||
new Promise((_, reject) => setTimeout(() => reject(new Error('Job apply timed out')), PER_JOB_TIMEOUT_MS)),
|
new Promise((_, reject) => setTimeout(() => reject(new Error('Job apply timed out')), PER_JOB_TIMEOUT_MS)),
|
||||||
]);
|
]);
|
||||||
result.applyStartedAt = applyStartedAt;
|
result.applyStartedAt = applyStartedAt;
|
||||||
await handleResult(job, result, results, settings, profile, apiKey);
|
await handleResult(job, result, results, settings, jobProfile, apiKey);
|
||||||
if (results.rate_limited) break;
|
if (results.rate_limited) break;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(` ❌ Error: ${e.message}`);
|
console.error(` ❌ Error: ${e.message}`);
|
||||||
|
|||||||
@@ -31,7 +31,8 @@ process.stdout.write = (chunk, ...args) => { logStream.write(chunk); return orig
|
|||||||
process.stderr.write = (chunk, ...args) => { logStream.write(chunk); return origStderrWrite(chunk, ...args); };
|
process.stderr.write = (chunk, ...args) => { logStream.write(chunk); return origStderrWrite(chunk, ...args); };
|
||||||
|
|
||||||
import { getJobsByStatus, updateJobStatus, loadConfig, loadQueue, saveQueue, dedupeAfterFilter, initQueue } from './lib/queue.mjs';
|
import { getJobsByStatus, updateJobStatus, loadConfig, loadQueue, saveQueue, dedupeAfterFilter, initQueue } from './lib/queue.mjs';
|
||||||
import { loadProfile, submitBatches, checkBatch, downloadResults } from './lib/filter.mjs';
|
import { submitBatches, checkBatch, downloadResults } from './lib/filter.mjs';
|
||||||
|
import { buildTrackProfiles } from './lib/profile.mjs';
|
||||||
import { sendTelegram, formatFilterSummary } from './lib/notify.mjs';
|
import { sendTelegram, formatFilterSummary } from './lib/notify.mjs';
|
||||||
import { DEFAULT_FILTER_MODEL, DEFAULT_FILTER_MIN_SCORE } from './lib/constants.mjs';
|
import { DEFAULT_FILTER_MODEL, DEFAULT_FILTER_MIN_SCORE } from './lib/constants.mjs';
|
||||||
|
|
||||||
@@ -131,7 +132,7 @@ async function collect(state, settings) {
|
|||||||
for (const [k, v] of Object.entries(usage)) totalUsage[k] = (totalUsage[k] || 0) + v;
|
for (const [k, v] of Object.entries(usage)) totalUsage[k] = (totalUsage[k] || 0) + v;
|
||||||
}
|
}
|
||||||
|
|
||||||
const searchConfig = loadConfig(resolve(__dir, 'config/search_config.json'));
|
const searchConfig = await loadConfig(resolve(__dir, 'config/search_config.json'));
|
||||||
const globalMin = searchConfig.filter_min_score ?? DEFAULT_FILTER_MIN_SCORE;
|
const globalMin = searchConfig.filter_min_score ?? DEFAULT_FILTER_MIN_SCORE;
|
||||||
|
|
||||||
let passed = 0, filtered = 0, errors = 0;
|
let passed = 0, filtered = 0, errors = 0;
|
||||||
@@ -204,7 +205,7 @@ async function collect(state, settings) {
|
|||||||
// Phase 2 — Submit a new batch
|
// Phase 2 — Submit a new batch
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
async function submit(settings, searchConfig, candidateProfile) {
|
async function submit(settings, searchConfig, profilesByTrack) {
|
||||||
const apiKey = process.env.ANTHROPIC_API_KEY;
|
const apiKey = process.env.ANTHROPIC_API_KEY;
|
||||||
|
|
||||||
// Clear stale batch markers — jobs marked as submitted but no filter_state.json means
|
// Clear stale batch markers — jobs marked as submitted but no filter_state.json means
|
||||||
@@ -234,31 +235,28 @@ async function submit(settings, searchConfig, candidateProfile) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build job profiles map by track
|
// Build search tracks map from search config
|
||||||
const profilePaths = settings.filter?.job_profiles || {};
|
const searchesByTrack = {};
|
||||||
const jobProfilesByTrack = {};
|
for (const search of searchConfig.searches) {
|
||||||
for (const [track, path] of Object.entries(profilePaths)) {
|
searchesByTrack[search.track] = search;
|
||||||
const profile = loadProfile(path);
|
|
||||||
if (profile) jobProfilesByTrack[track] = profile;
|
|
||||||
else console.warn(`⚠️ Could not load job profile for track "${track}" at ${path}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Filter out jobs with no profile (will pass through unscored)
|
// Filter out jobs with no matching search track
|
||||||
const filterable = jobs.filter(j => jobProfilesByTrack[j.track || 'ae']);
|
const filterable = jobs.filter(j => searchesByTrack[j.track || 'ae']);
|
||||||
const noProfile = jobs.length - filterable.length;
|
const noTrack = jobs.length - filterable.length;
|
||||||
|
|
||||||
if (noProfile > 0) console.warn(`⚠️ ${noProfile} jobs skipped — no profile for their track`);
|
if (noTrack > 0) console.warn(`⚠️ ${noTrack} jobs skipped — no search track configured`);
|
||||||
|
|
||||||
if (filterable.length === 0) {
|
if (filterable.length === 0) {
|
||||||
console.log('Nothing filterable — no job profiles configured for any track.');
|
console.log('Nothing filterable — no matching search tracks.');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const model = settings.filter?.model || DEFAULT_FILTER_MODEL;
|
const model = settings.filter?.model || DEFAULT_FILTER_MODEL;
|
||||||
const submittedAt = new Date().toISOString();
|
const submittedAt = new Date().toISOString();
|
||||||
console.log(`🚀 Submitting batches — ${filterable.length} jobs across ${Object.keys(jobProfilesByTrack).length} tracks, model: ${model}`);
|
console.log(`🚀 Submitting batches — ${filterable.length} jobs across ${Object.keys(searchesByTrack).length} tracks, model: ${model}`);
|
||||||
|
|
||||||
const submitted = await submitBatches(filterable, jobProfilesByTrack, candidateProfile, model, apiKey);
|
const submitted = await submitBatches(filterable, searchesByTrack, profilesByTrack, model, apiKey);
|
||||||
|
|
||||||
writeState({
|
writeState({
|
||||||
batches: submitted,
|
batches: submitted,
|
||||||
@@ -304,10 +302,11 @@ async function main() {
|
|||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
const settings = loadConfig(resolve(__dir, 'config/settings.json'));
|
const settings = await loadConfig(resolve(__dir, 'config/settings.json'));
|
||||||
await initQueue(settings);
|
await initQueue(settings);
|
||||||
const searchConfig = loadConfig(resolve(__dir, 'config/search_config.json'));
|
const searchConfig = await loadConfig(resolve(__dir, 'config/search_config.json'));
|
||||||
const candidateProfile = loadConfig(resolve(__dir, 'config/profile.json'));
|
const baseProfile = await loadConfig(resolve(__dir, 'config/profile.json'));
|
||||||
|
const profilesByTrack = await buildTrackProfiles(baseProfile, searchConfig.searches || []);
|
||||||
|
|
||||||
console.log('🔍 claw-apply: AI Job Filter\n');
|
console.log('🔍 claw-apply: AI Job Filter\n');
|
||||||
|
|
||||||
@@ -320,7 +319,7 @@ async function main() {
|
|||||||
|
|
||||||
// Phase 2: submit any remaining unscored jobs (runs after collect too)
|
// Phase 2: submit any remaining unscored jobs (runs after collect too)
|
||||||
if (!readState()) {
|
if (!readState()) {
|
||||||
await submit(settings, searchConfig, candidateProfile);
|
await submit(settings, searchConfig, profilesByTrack);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
102
job_searcher.mjs
102
job_searcher.mjs
@@ -21,7 +21,7 @@ process.stdout.write = (chunk, ...args) => { logStream.write(chunk); return orig
|
|||||||
process.stderr.write = (chunk, ...args) => { logStream.write(chunk); return origStderrWrite(chunk, ...args); };
|
process.stderr.write = (chunk, ...args) => { logStream.write(chunk); return origStderrWrite(chunk, ...args); };
|
||||||
|
|
||||||
import { addJobs, loadQueue, loadConfig, getJobsByStatus, updateJobStatus, initQueue } from './lib/queue.mjs';
|
import { addJobs, loadQueue, loadConfig, getJobsByStatus, updateJobStatus, initQueue } from './lib/queue.mjs';
|
||||||
import { writeFileSync, readFileSync, existsSync } from 'fs';
|
import { writeFileSync, readFileSync, existsSync, unlinkSync } from 'fs';
|
||||||
import { acquireLock } from './lib/lock.mjs';
|
import { acquireLock } from './lib/lock.mjs';
|
||||||
import { createBrowser } from './lib/browser.mjs';
|
import { createBrowser } from './lib/browser.mjs';
|
||||||
import { verifyLogin as liLogin, searchLinkedIn, classifyExternalJobs } from './lib/linkedin.mjs';
|
import { verifyLogin as liLogin, searchLinkedIn, classifyExternalJobs } from './lib/linkedin.mjs';
|
||||||
@@ -29,7 +29,7 @@ import { verifyLogin as wfLogin, searchWellfound } from './lib/wellfound.mjs';
|
|||||||
import { sendTelegram, formatSearchSummary } from './lib/notify.mjs';
|
import { sendTelegram, formatSearchSummary } from './lib/notify.mjs';
|
||||||
import { DEFAULT_FIRST_RUN_DAYS } from './lib/constants.mjs';
|
import { DEFAULT_FIRST_RUN_DAYS } from './lib/constants.mjs';
|
||||||
import { generateKeywords } from './lib/keywords.mjs';
|
import { generateKeywords } from './lib/keywords.mjs';
|
||||||
import { initProgress, isCompleted, markComplete, getKeywordStart, markKeywordComplete, saveKeywords, getSavedKeywords, clearProgress } from './lib/search_progress.mjs';
|
import { initProgress, isCompleted, markComplete, getKeywordStart, markKeywordComplete, saveKeywords, getSavedKeywords, clearProgress, saveTrackLookback } from './lib/search_progress.mjs';
|
||||||
import { ensureLoggedIn } from './lib/session.mjs';
|
import { ensureLoggedIn } from './lib/session.mjs';
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
@@ -59,7 +59,7 @@ async function main() {
|
|||||||
const trackCounts = {}; // { trackName: { found, added } }
|
const trackCounts = {}; // { trackName: { found, added } }
|
||||||
const startedAt = Date.now();
|
const startedAt = Date.now();
|
||||||
|
|
||||||
const settings = loadConfig(resolve(__dir, 'config/settings.json'));
|
const settings = await loadConfig(resolve(__dir, 'config/settings.json'));
|
||||||
await initQueue(settings);
|
await initQueue(settings);
|
||||||
|
|
||||||
const writeLastRun = (finished = false) => {
|
const writeLastRun = (finished = false) => {
|
||||||
@@ -96,46 +96,66 @@ async function main() {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Load config
|
// Load config
|
||||||
const searchConfig = loadConfig(resolve(__dir, 'config/search_config.json'));
|
const searchConfig = await loadConfig(resolve(__dir, 'config/search_config.json'));
|
||||||
|
|
||||||
// First run detection: if queue is empty, use first_run_days lookback
|
// First run detection: if queue is empty, use first_run_days lookback
|
||||||
const profile = loadConfig(resolve(__dir, 'config/profile.json'));
|
const profile = await loadConfig(resolve(__dir, 'config/profile.json'));
|
||||||
const anthropicKey = process.env.ANTHROPIC_API_KEY || settings.anthropic_api_key;
|
const anthropicKey = process.env.ANTHROPIC_API_KEY || settings.anthropic_api_key;
|
||||||
|
|
||||||
// Determine lookback: check for an in-progress run first, then fall back to first-run/normal logic
|
// Per-track lookback: each track remembers when it was last searched.
|
||||||
|
// New tracks get first_run_days (default 90), existing tracks look back since last completion.
|
||||||
|
const trackHistoryPath = resolve(__dir, 'data/track_history.json');
|
||||||
|
const trackHistory = existsSync(trackHistoryPath)
|
||||||
|
? JSON.parse(readFileSync(trackHistoryPath, 'utf8'))
|
||||||
|
: {};
|
||||||
|
|
||||||
const savedProgress = existsSync(resolve(__dir, 'data/search_progress.json'))
|
const savedProgress = existsSync(resolve(__dir, 'data/search_progress.json'))
|
||||||
? JSON.parse(readFileSync(resolve(__dir, 'data/search_progress.json'), 'utf8'))
|
? JSON.parse(readFileSync(resolve(__dir, 'data/search_progress.json'), 'utf8'))
|
||||||
: null;
|
: null;
|
||||||
const isFirstRun = loadQueue().length === 0;
|
|
||||||
|
|
||||||
// Dynamic lookback: time since last run × 1.25 buffer (min 4h, max first_run_days)
|
const nextRunPath = resolve(__dir, 'data/next_run.json');
|
||||||
function dynamicLookbackDays() {
|
let nextRunOverride = null;
|
||||||
const lastRunPath = resolve(__dir, 'data/searcher_last_run.json');
|
if (existsSync(nextRunPath)) {
|
||||||
if (!existsSync(lastRunPath)) return searchConfig.posted_within_days || 2;
|
try {
|
||||||
const lastRun = JSON.parse(readFileSync(lastRunPath, 'utf8'));
|
nextRunOverride = JSON.parse(readFileSync(nextRunPath, 'utf8'));
|
||||||
const lastRanAt = lastRun.started_at || lastRun.finished_at;
|
} catch {}
|
||||||
if (!lastRanAt) return searchConfig.posted_within_days || 2;
|
}
|
||||||
const hoursSince = (Date.now() - new Date(lastRanAt).getTime()) / (1000 * 60 * 60);
|
|
||||||
|
const defaultFirstRunDays = searchConfig.first_run_days || DEFAULT_FIRST_RUN_DAYS;
|
||||||
|
|
||||||
|
function lookbackForTrack(trackName) {
|
||||||
|
// Override applies to all tracks
|
||||||
|
if (nextRunOverride?.lookback_days) return nextRunOverride.lookback_days;
|
||||||
|
// Resuming a crashed run — use the saved per-track lookback
|
||||||
|
if (savedProgress?.track_lookback?.[trackName]) return savedProgress.track_lookback[trackName];
|
||||||
|
// Per-track history: how long since this track last completed?
|
||||||
|
const lastSearched = trackHistory[trackName]?.last_searched_at;
|
||||||
|
if (!lastSearched) return defaultFirstRunDays; // new track — full lookback
|
||||||
|
const hoursSince = (Date.now() - new Date(lastSearched).getTime()) / (1000 * 60 * 60);
|
||||||
const buffered = hoursSince * 1.25;
|
const buffered = hoursSince * 1.25;
|
||||||
const minHours = 4;
|
const minHours = 4;
|
||||||
const maxDays = searchConfig.first_run_days || DEFAULT_FIRST_RUN_DAYS;
|
return Math.min(Math.max(buffered / 24, minHours / 24), defaultFirstRunDays);
|
||||||
return Math.min(Math.max(buffered / 24, minHours / 24), maxDays);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const lookbackDays = savedProgress?.lookback_days
|
function saveTrackCompletion(trackName) {
|
||||||
|| (isFirstRun ? (searchConfig.first_run_days || DEFAULT_FIRST_RUN_DAYS) : dynamicLookbackDays());
|
trackHistory[trackName] = { last_searched_at: new Date().toISOString() };
|
||||||
|
writeFileSync(trackHistoryPath, JSON.stringify(trackHistory, null, 2));
|
||||||
if (savedProgress?.lookback_days) {
|
|
||||||
console.log(`🔁 Resuming ${lookbackDays.toFixed(2)}-day search run\n`);
|
|
||||||
} else if (isFirstRun) {
|
|
||||||
console.log(`📅 First run — looking back ${lookbackDays} days\n`);
|
|
||||||
} else {
|
|
||||||
const hours = (lookbackDays * 24).toFixed(1);
|
|
||||||
console.log(`⏱️ Dynamic lookback: ${hours}h (time since last run × 1.25)\n`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Log per-track lookback
|
||||||
|
console.log('📅 Per-track lookback:');
|
||||||
|
for (const search of searchConfig.searches) {
|
||||||
|
const days = lookbackForTrack(search.name);
|
||||||
|
const label = trackHistory[search.name]?.last_searched_at ? `${(days * 24).toFixed(1)}h since last run` : `${days}d (new track)`;
|
||||||
|
console.log(` • ${search.name}: ${label}`);
|
||||||
|
}
|
||||||
|
if (nextRunOverride?.lookback_days) console.log(` (override: ${nextRunOverride.lookback_days}d for all tracks)`);
|
||||||
|
console.log('');
|
||||||
|
|
||||||
// Init progress tracking — enables resume on restart
|
// Init progress tracking — enables resume on restart
|
||||||
initProgress(resolve(__dir, 'data'), lookbackDays);
|
// Use max lookback across tracks for progress file identity
|
||||||
|
const maxLookback = Math.max(...searchConfig.searches.map(s => lookbackForTrack(s.name)));
|
||||||
|
initProgress(resolve(__dir, 'data'), maxLookback);
|
||||||
|
|
||||||
// Enhance keywords with AI — reuse saved keywords from progress if resuming, never regenerate mid-run
|
// Enhance keywords with AI — reuse saved keywords from progress if resuming, never regenerate mid-run
|
||||||
for (const search of searchConfig.searches) {
|
for (const search of searchConfig.searches) {
|
||||||
@@ -173,7 +193,7 @@ async function main() {
|
|||||||
console.log(' Creating browser...');
|
console.log(' Creating browser...');
|
||||||
liBrowser = await createBrowser(settings, 'linkedin');
|
liBrowser = await createBrowser(settings, 'linkedin');
|
||||||
console.log(' Browser connected, verifying login...');
|
console.log(' Browser connected, verifying login...');
|
||||||
const loggedIn = await ensureLoggedIn(liBrowser.page, liLogin, 'linkedin', settings.kernel_api_key || process.env.KERNEL_API_KEY, settings.kernel?.connection_ids || {});
|
const loggedIn = await ensureLoggedIn(liBrowser.page, liLogin, 'linkedin', settings.kernel_api_key || process.env.KERNEL_API_KEY);
|
||||||
if (!loggedIn) throw new Error('LinkedIn not logged in');
|
if (!loggedIn) throw new Error('LinkedIn not logged in');
|
||||||
console.log(' ✅ Logged in');
|
console.log(' ✅ Logged in');
|
||||||
|
|
||||||
@@ -184,7 +204,9 @@ async function main() {
|
|||||||
}
|
}
|
||||||
const keywordStart = getKeywordStart('linkedin', search.name);
|
const keywordStart = getKeywordStart('linkedin', search.name);
|
||||||
if (keywordStart > 0) console.log(` [${search.name}] resuming from keyword ${keywordStart + 1}/${search.keywords.length}`);
|
if (keywordStart > 0) console.log(` [${search.name}] resuming from keyword ${keywordStart + 1}/${search.keywords.length}`);
|
||||||
const effectiveSearch = { ...search, keywords: search.keywords.slice(keywordStart), keywordOffset: keywordStart, filters: { ...search.filters, posted_within_days: lookbackDays } };
|
const trackLookback = lookbackForTrack(search.name);
|
||||||
|
saveTrackLookback(search.name, trackLookback);
|
||||||
|
const effectiveSearch = { ...search, keywords: search.keywords.slice(keywordStart), keywordOffset: keywordStart, filters: { ...search.filters, posted_within_days: trackLookback } };
|
||||||
let queryFound = 0, queryAdded = 0;
|
let queryFound = 0, queryAdded = 0;
|
||||||
try {
|
try {
|
||||||
await searchLinkedIn(liBrowser.page, effectiveSearch, {
|
await searchLinkedIn(liBrowser.page, effectiveSearch, {
|
||||||
@@ -215,6 +237,7 @@ async function main() {
|
|||||||
}
|
}
|
||||||
console.log(`\r [${search.name}] ${queryFound} found, ${queryAdded} new`);
|
console.log(`\r [${search.name}] ${queryFound} found, ${queryAdded} new`);
|
||||||
markComplete('linkedin', search.name, { found: queryFound, added: queryAdded });
|
markComplete('linkedin', search.name, { found: queryFound, added: queryAdded });
|
||||||
|
saveTrackCompletion(search.name);
|
||||||
const tc = trackCounts[search.name] || (trackCounts[search.name] = { found: 0, added: 0 });
|
const tc = trackCounts[search.name] || (trackCounts[search.name] = { found: 0, added: 0 });
|
||||||
tc.found += queryFound; tc.added += queryAdded;
|
tc.found += queryFound; tc.added += queryAdded;
|
||||||
// Save progress after each search track
|
// Save progress after each search track
|
||||||
@@ -224,12 +247,9 @@ async function main() {
|
|||||||
platformsRun.push('LinkedIn');
|
platformsRun.push('LinkedIn');
|
||||||
|
|
||||||
// Classify unknown_external jobs using the existing LinkedIn browser session
|
// Classify unknown_external jobs using the existing LinkedIn browser session
|
||||||
// Cap at 50 per run — each one requires a click + redirect wait
|
const unclassified = getJobsByStatus('new').filter(j => j.apply_type === 'unknown_external' && !j.apply_url);
|
||||||
const MAX_CLASSIFY = 50;
|
|
||||||
const allUnclassified = getJobsByStatus('new').filter(j => j.apply_type === 'unknown_external' && !j.apply_url);
|
|
||||||
const unclassified = allUnclassified.slice(0, MAX_CLASSIFY);
|
|
||||||
if (unclassified.length > 0) {
|
if (unclassified.length > 0) {
|
||||||
console.log(`\n🔍 Classifying ${unclassified.length}/${allUnclassified.length} external jobs...`);
|
console.log(`\n🔍 Classifying ${unclassified.length} external jobs...`);
|
||||||
try {
|
try {
|
||||||
const { classified, remaining } = await classifyExternalJobs(liBrowser.page, unclassified, async (job, applyType, applyUrl) => {
|
const { classified, remaining } = await classifyExternalJobs(liBrowser.page, unclassified, async (job, applyType, applyUrl) => {
|
||||||
await updateJobStatus(job.id, 'new', { apply_type: applyType, apply_url: applyUrl });
|
await updateJobStatus(job.id, 'new', { apply_type: applyType, apply_url: applyUrl });
|
||||||
@@ -264,7 +284,7 @@ async function main() {
|
|||||||
console.log(' Creating browser...');
|
console.log(' Creating browser...');
|
||||||
wfBrowser = await createBrowser(settings, 'wellfound');
|
wfBrowser = await createBrowser(settings, 'wellfound');
|
||||||
console.log(' Browser connected, verifying login...');
|
console.log(' Browser connected, verifying login...');
|
||||||
const loggedIn = await ensureLoggedIn(wfBrowser.page, wfLogin, 'wellfound', settings.kernel_api_key || process.env.KERNEL_API_KEY, settings.kernel?.connection_ids || {});
|
const loggedIn = await ensureLoggedIn(wfBrowser.page, wfLogin, 'wellfound', settings.kernel_api_key || process.env.KERNEL_API_KEY);
|
||||||
if (!loggedIn) console.warn(' ⚠️ Wellfound login unconfirmed, proceeding');
|
if (!loggedIn) console.warn(' ⚠️ Wellfound login unconfirmed, proceeding');
|
||||||
else console.log(' ✅ Logged in');
|
else console.log(' ✅ Logged in');
|
||||||
|
|
||||||
@@ -273,7 +293,9 @@ async function main() {
|
|||||||
console.log(` [${search.name}] ✓ already done, skipping`);
|
console.log(` [${search.name}] ✓ already done, skipping`);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const effectiveSearch = { ...search, filters: { ...search.filters, posted_within_days: lookbackDays } };
|
const trackLookback = lookbackForTrack(search.name);
|
||||||
|
saveTrackLookback(search.name, trackLookback);
|
||||||
|
const effectiveSearch = { ...search, filters: { ...search.filters, posted_within_days: trackLookback } };
|
||||||
let queryFound = 0, queryAdded = 0;
|
let queryFound = 0, queryAdded = 0;
|
||||||
try {
|
try {
|
||||||
await searchWellfound(wfBrowser.page, effectiveSearch, {
|
await searchWellfound(wfBrowser.page, effectiveSearch, {
|
||||||
@@ -300,6 +322,7 @@ async function main() {
|
|||||||
}
|
}
|
||||||
console.log(`\r [${search.name}] ${queryFound} found, ${queryAdded} new`);
|
console.log(`\r [${search.name}] ${queryFound} found, ${queryAdded} new`);
|
||||||
markComplete('wellfound', search.name, { found: queryFound, added: queryAdded });
|
markComplete('wellfound', search.name, { found: queryFound, added: queryAdded });
|
||||||
|
saveTrackCompletion(search.name);
|
||||||
const tc = trackCounts[search.name] || (trackCounts[search.name] = { found: 0, added: 0 });
|
const tc = trackCounts[search.name] || (trackCounts[search.name] = { found: 0, added: 0 });
|
||||||
tc.found += queryFound; tc.added += queryAdded;
|
tc.found += queryFound; tc.added += queryAdded;
|
||||||
writeLastRun(false);
|
writeLastRun(false);
|
||||||
@@ -335,6 +358,11 @@ async function main() {
|
|||||||
}
|
}
|
||||||
clearProgress(); // run finished cleanly — next run starts fresh with new keywords
|
clearProgress(); // run finished cleanly — next run starts fresh with new keywords
|
||||||
|
|
||||||
|
// Consume override file so next cron run uses dynamic lookback
|
||||||
|
if (existsSync(nextRunPath)) {
|
||||||
|
try { unlinkSync(nextRunPath); } catch {}
|
||||||
|
}
|
||||||
|
|
||||||
console.log(`\n✅ Search complete at ${new Date().toISOString()}`);
|
console.log(`\n✅ Search complete at ${new Date().toISOString()}`);
|
||||||
return { added: totalAdded, seen: totalSeen };
|
return { added: totalAdded, seen: totalSeen };
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,8 +31,8 @@ export const LINKEDIN_MAX_MODAL_STEPS = 20;
|
|||||||
export const WELLFOUND_BASE = 'https://wellfound.com';
|
export const WELLFOUND_BASE = 'https://wellfound.com';
|
||||||
|
|
||||||
// --- Browser ---
|
// --- Browser ---
|
||||||
export const KERNEL_SDK_PATH = '/home/ubuntu/.openclaw/workspace/node_modules/@onkernel/sdk/index.js';
|
export const KERNEL_SDK_PATH = '@onkernel/sdk';
|
||||||
export const DEFAULT_PLAYWRIGHT_PATH = '/home/ubuntu/.npm-global/lib/node_modules/playwright/index.mjs';
|
export const DEFAULT_PLAYWRIGHT_PATH = 'playwright';
|
||||||
|
|
||||||
// --- Search ---
|
// --- Search ---
|
||||||
export const LINKEDIN_MAX_SEARCH_PAGES = 40;
|
export const LINKEDIN_MAX_SEARCH_PAGES = 40;
|
||||||
|
|||||||
@@ -4,7 +4,6 @@
|
|||||||
* Uses Batch API for 50% cost savings + prompt caching for shared context
|
* Uses Batch API for 50% cost savings + prompt caching for shared context
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { readFileSync, existsSync } from 'fs';
|
|
||||||
import {
|
import {
|
||||||
ANTHROPIC_BATCH_API_URL, FILTER_DESC_MAX_CHARS, FILTER_BATCH_MAX_TOKENS
|
ANTHROPIC_BATCH_API_URL, FILTER_DESC_MAX_CHARS, FILTER_BATCH_MAX_TOKENS
|
||||||
} from './constants.mjs';
|
} from './constants.mjs';
|
||||||
@@ -13,28 +12,29 @@ import {
|
|||||||
// Helpers
|
// Helpers
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
export function loadProfile(profilePath) {
|
function buildSystemPrompt(searchTrack, candidateProfile) {
|
||||||
if (!profilePath || !existsSync(profilePath)) return null;
|
const trackInfo = {
|
||||||
try { return JSON.parse(readFileSync(profilePath, 'utf8')); } catch { return null; }
|
keywords: searchTrack.keywords,
|
||||||
}
|
exclude_keywords: searchTrack.exclude_keywords,
|
||||||
|
salary_min: searchTrack.salary_min,
|
||||||
|
remote: searchTrack.filters?.remote,
|
||||||
|
};
|
||||||
|
|
||||||
function buildSystemPrompt(jobProfile, candidateProfile) {
|
return `You are a job relevance scorer. Score each job listing 0-10 based on how well it matches the candidate and search criteria below.
|
||||||
return `You are a job relevance scorer. Score each job listing 0-10 based on how well it matches the candidate profile below.
|
|
||||||
|
|
||||||
## Candidate Profile
|
## Candidate
|
||||||
${JSON.stringify(candidateProfile, null, 2)}
|
${JSON.stringify(candidateProfile, null, 2)}
|
||||||
|
|
||||||
## Target Job Profile
|
## Search Criteria (${searchTrack.name})
|
||||||
${JSON.stringify(jobProfile, null, 2)}
|
${JSON.stringify(trackInfo)}
|
||||||
|
|
||||||
## Instructions
|
## Instructions
|
||||||
- Use the candidate profile and target job profile above as your only criteria
|
- Score based on title fit, industry fit, experience match, salary range, location/remote, and exclude_keywords
|
||||||
- Score based on title fit, industry fit, experience match, salary range, location/remote requirements, and any exclude_keywords
|
|
||||||
- 10 = perfect match, 0 = completely irrelevant
|
- 10 = perfect match, 0 = completely irrelevant
|
||||||
- If salary is unknown, do not penalize
|
- If salary is unknown, do not penalize
|
||||||
- If a posting is from a staffing agency but the role itself matches, score the role — not the agency
|
- If a posting is from a staffing agency but the role itself matches, score the role — not the agency
|
||||||
|
|
||||||
Return ONLY a JSON object: {"score": <0-10>, "reason": "<one concise line>"}`;
|
Return ONLY: {"score": <0-10>, "reason": "<one concise line>"}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function sanitize(str) {
|
function sanitize(str) {
|
||||||
@@ -66,27 +66,28 @@ function apiHeaders(apiKey) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Submit one batch per track (one per job profile/search description).
|
* Submit one batch per track.
|
||||||
* Each batch uses the system prompt for that track only — maximizes prompt caching.
|
* Each batch uses the system prompt for that track only — maximizes prompt caching.
|
||||||
* Returns array of { track, batchId, idMap, jobCount }
|
* Returns array of { track, batchId, idMap, jobCount }
|
||||||
*/
|
*/
|
||||||
export async function submitBatches(jobs, jobProfilesByTrack, candidateProfile, model, apiKey) {
|
export async function submitBatches(jobs, searchesByTrack, profilesByTrack, model, apiKey) {
|
||||||
// Group jobs by track
|
// Group jobs by track
|
||||||
const byTrack = {};
|
const byTrack = {};
|
||||||
for (const job of jobs) {
|
for (const job of jobs) {
|
||||||
const track = job.track || 'ae';
|
const track = job.track || 'ae';
|
||||||
if (!jobProfilesByTrack[track]) continue; // no profile → skip
|
if (!searchesByTrack[track]) continue;
|
||||||
if (!byTrack[track]) byTrack[track] = [];
|
if (!byTrack[track]) byTrack[track] = [];
|
||||||
byTrack[track].push(job);
|
byTrack[track].push(job);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (Object.keys(byTrack).length === 0) throw new Error('No jobs to submit — check job profiles are configured');
|
if (Object.keys(byTrack).length === 0) throw new Error('No jobs to submit — check search config has tracks matching queued jobs');
|
||||||
|
|
||||||
const submitted = [];
|
const submitted = [];
|
||||||
|
|
||||||
for (const [track, trackJobs] of Object.entries(byTrack)) {
|
for (const [track, trackJobs] of Object.entries(byTrack)) {
|
||||||
const jobProfile = jobProfilesByTrack[track];
|
const searchTrack = searchesByTrack[track];
|
||||||
const systemPrompt = buildSystemPrompt(jobProfile, candidateProfile);
|
const trackProfile = profilesByTrack[track] || profilesByTrack._base;
|
||||||
|
const systemPrompt = buildSystemPrompt(searchTrack, trackProfile);
|
||||||
const idMap = {};
|
const idMap = {};
|
||||||
const requests = [];
|
const requests = [];
|
||||||
|
|
||||||
|
|||||||
@@ -128,16 +128,15 @@ export class FormFiller {
|
|||||||
this.jobContext = opts.jobContext || {};
|
this.jobContext = opts.jobContext || {};
|
||||||
}
|
}
|
||||||
|
|
||||||
saveAnswer(pattern, answer) {
|
async saveAnswer(pattern, answer) {
|
||||||
if (!pattern || !answer) return;
|
if (!pattern || !answer) return;
|
||||||
const existing = this.answers.findIndex(a => a.pattern === pattern);
|
const existing = this.answers.findIndex(a => a.pattern === pattern);
|
||||||
if (existing >= 0) return;
|
if (existing >= 0) return;
|
||||||
this.answers.push({ pattern, answer });
|
this.answers.push({ pattern, answer });
|
||||||
if (this.answersPath) {
|
if (this.answersPath) {
|
||||||
try {
|
try {
|
||||||
const tmp = this.answersPath + '.tmp';
|
const { saveJSON } = await import('./storage.mjs');
|
||||||
writeFileSync(tmp, JSON.stringify(this.answers, null, 2));
|
await saveJSON(this.answersPath, this.answers);
|
||||||
renameSync(tmp, this.answersPath);
|
|
||||||
} catch { /* best effort */ }
|
} catch { /* best effort */ }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -626,7 +625,7 @@ Answer:`;
|
|||||||
const needsAnswer = field.required || field.type === 'number';
|
const needsAnswer = field.required || field.type === 'number';
|
||||||
if (answer === null && needsAnswer) {
|
if (answer === null && needsAnswer) {
|
||||||
answer = await this.aiAnswerFor(formatHint ? `${lbl} ${formatHint}` : lbl);
|
answer = await this.aiAnswerFor(formatHint ? `${lbl} ${formatHint}` : lbl);
|
||||||
if (answer) this.saveAnswer(lbl, answer);
|
if (answer) await this.saveAnswer(lbl, answer);
|
||||||
}
|
}
|
||||||
// For number fields, extract just the numeric value
|
// For number fields, extract just the numeric value
|
||||||
if (answer && field.type === 'number') {
|
if (answer && field.type === 'number') {
|
||||||
@@ -656,7 +655,7 @@ Answer:`;
|
|||||||
let answer = this.answerFor(field.label);
|
let answer = this.answerFor(field.label);
|
||||||
if (!answer && field.required) {
|
if (!answer && field.required) {
|
||||||
answer = await this.aiAnswerFor(taFormatHint ? `${field.label} ${taFormatHint}` : field.label);
|
answer = await this.aiAnswerFor(taFormatHint ? `${field.label} ${taFormatHint}` : field.label);
|
||||||
if (answer) this.saveAnswer(field.label, answer);
|
if (answer) await this.saveAnswer(field.label, answer);
|
||||||
}
|
}
|
||||||
if (answer) {
|
if (answer) {
|
||||||
const el = await byTag(field.tag);
|
const el = await byTag(field.tag);
|
||||||
@@ -683,7 +682,7 @@ Answer:`;
|
|||||||
}
|
}
|
||||||
if (!answer) {
|
if (!answer) {
|
||||||
answer = await this.aiAnswerFor(field.legend, { options: field.options });
|
answer = await this.aiAnswerFor(field.legend, { options: field.options });
|
||||||
if (answer) this.saveAnswer(field.legend, answer);
|
if (answer) await this.saveAnswer(field.legend, answer);
|
||||||
}
|
}
|
||||||
if (answer) {
|
if (answer) {
|
||||||
const fs = await byTag(field.tag);
|
const fs = await byTag(field.tag);
|
||||||
@@ -763,7 +762,7 @@ Answer:`;
|
|||||||
if (field.required) {
|
if (field.required) {
|
||||||
answer = await this.aiAnswerFor(field.label, { options: field.options });
|
answer = await this.aiAnswerFor(field.label, { options: field.options });
|
||||||
if (answer) {
|
if (answer) {
|
||||||
this.saveAnswer(field.label, answer);
|
await this.saveAnswer(field.label, answer);
|
||||||
} else {
|
} else {
|
||||||
unknown.push({ label: field.label, type: 'select', options: field.options });
|
unknown.push({ label: field.label, type: 'select', options: field.options });
|
||||||
continue;
|
continue;
|
||||||
|
|||||||
@@ -26,7 +26,6 @@ export async function searchLinkedIn(page, search, { onPage, onKeyword } = {}) {
|
|||||||
console.log(` [${search.track_label || search.track}] keyword ${globalIdx + 1}/${globalTotal}: "${keyword}"`);
|
console.log(` [${search.track_label || search.track}] keyword ${globalIdx + 1}/${globalTotal}: "${keyword}"`);
|
||||||
const params = new URLSearchParams({ keywords: keyword, sortBy: 'DD' });
|
const params = new URLSearchParams({ keywords: keyword, sortBy: 'DD' });
|
||||||
if (search.filters?.remote) params.set('f_WT', '2');
|
if (search.filters?.remote) params.set('f_WT', '2');
|
||||||
if (search.filters?.easy_apply_only) params.set('f_LF', 'f_AL');
|
|
||||||
if (search.filters?.posted_within_days) {
|
if (search.filters?.posted_within_days) {
|
||||||
params.set('f_TPR', `r${search.filters.posted_within_days * LINKEDIN_SECONDS_PER_DAY}`);
|
params.set('f_TPR', `r${search.filters.posted_within_days * LINKEDIN_SECONDS_PER_DAY}`);
|
||||||
}
|
}
|
||||||
|
|||||||
65
lib/profile.mjs
Normal file
65
lib/profile.mjs
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
/**
|
||||||
|
* profile.mjs — Per-track profile management
|
||||||
|
*
|
||||||
|
* Each search track can override parts of the base profile.json:
|
||||||
|
* - profile_overrides: { ... } inline in search_config.json
|
||||||
|
* - profile_path: "config/profiles/se_profile.json" (loaded + merged)
|
||||||
|
*
|
||||||
|
* Base profile has shared info (name, phone, location, work auth).
|
||||||
|
* Track overrides customize resume, cover letter, experience highlights, etc.
|
||||||
|
*/
|
||||||
|
import { loadJSON } from './storage.mjs';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deep merge b into a (b wins on conflicts). Arrays are replaced, not concatenated.
|
||||||
|
*/
|
||||||
|
function deepMerge(a, b) {
|
||||||
|
const result = { ...a };
|
||||||
|
for (const [key, val] of Object.entries(b)) {
|
||||||
|
if (val && typeof val === 'object' && !Array.isArray(val) && typeof result[key] === 'object' && !Array.isArray(result[key])) {
|
||||||
|
result[key] = deepMerge(result[key], val);
|
||||||
|
} else {
|
||||||
|
result[key] = val;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build per-track profile map from base profile + search config.
|
||||||
|
* Returns { _base: baseProfile, trackKey: mergedProfile, ... }
|
||||||
|
*/
|
||||||
|
export async function buildTrackProfiles(baseProfile, searches) {
|
||||||
|
const profiles = { _base: baseProfile };
|
||||||
|
|
||||||
|
for (const search of searches) {
|
||||||
|
const track = search.track;
|
||||||
|
let trackProfile = baseProfile;
|
||||||
|
|
||||||
|
// Load external profile file if specified
|
||||||
|
if (search.profile_path) {
|
||||||
|
try {
|
||||||
|
const overrides = await loadJSON(search.profile_path, null);
|
||||||
|
if (overrides) trackProfile = deepMerge(trackProfile, overrides);
|
||||||
|
} catch (e) {
|
||||||
|
console.warn(` ⚠️ [${track}] Could not load profile ${search.profile_path}: ${e.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply inline overrides (takes precedence over profile_path)
|
||||||
|
if (search.profile_overrides) {
|
||||||
|
trackProfile = deepMerge(trackProfile, search.profile_overrides);
|
||||||
|
}
|
||||||
|
|
||||||
|
profiles[track] = trackProfile;
|
||||||
|
}
|
||||||
|
|
||||||
|
return profiles;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the profile for a specific track from a profiles map.
|
||||||
|
*/
|
||||||
|
export function getTrackProfile(profilesByTrack, track) {
|
||||||
|
return profilesByTrack[track] || profilesByTrack._base;
|
||||||
|
}
|
||||||
@@ -20,10 +20,23 @@ const LOG_PATH = `${__dir}/../data/applications_log.json`;
|
|||||||
const UPDATES_PATH = `${__dir}/../data/queue_updates.jsonl`;
|
const UPDATES_PATH = `${__dir}/../data/queue_updates.jsonl`;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Load and validate a JSON config file. Throws with a clear message on failure.
|
* Load and validate a JSON config file.
|
||||||
|
* Uses the storage layer (S3 or disk) when initialized.
|
||||||
|
* Falls back to direct disk read for bootstrap (settings.json loaded before initQueue).
|
||||||
*/
|
*/
|
||||||
export function loadConfig(filePath) {
|
export async function loadConfig(filePath) {
|
||||||
const resolved = resolve(filePath);
|
const resolved = resolve(filePath);
|
||||||
|
|
||||||
|
// If storage is initialized, use the storage layer
|
||||||
|
if (_initialized) {
|
||||||
|
const data = await loadJSON(resolved, null);
|
||||||
|
if (data === null) {
|
||||||
|
throw new Error(`Config file not found: ${resolved}\nCopy the matching .example.json and fill in your values.`);
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Bootstrap fallback (settings.json loaded before initQueue)
|
||||||
if (!existsSync(resolved)) {
|
if (!existsSync(resolved)) {
|
||||||
throw new Error(`Config file not found: ${resolved}\nCopy the matching .example.json and fill in your values.`);
|
throw new Error(`Config file not found: ${resolved}\nCopy the matching .example.json and fill in your values.`);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,12 +7,13 @@ import { readFileSync, writeFileSync, existsSync, unlinkSync } from 'fs';
|
|||||||
let progressPath = null;
|
let progressPath = null;
|
||||||
let progress = null;
|
let progress = null;
|
||||||
|
|
||||||
export function initProgress(dataDir, lookbackDays) {
|
export function initProgress(dataDir, maxLookbackDays) {
|
||||||
progressPath = `${dataDir}/search_progress.json`;
|
progressPath = `${dataDir}/search_progress.json`;
|
||||||
|
|
||||||
if (existsSync(progressPath)) {
|
if (existsSync(progressPath)) {
|
||||||
const saved = JSON.parse(readFileSync(progressPath, 'utf8'));
|
const saved = JSON.parse(readFileSync(progressPath, 'utf8'));
|
||||||
if (saved.lookback_days === lookbackDays) {
|
// Resume if an in-progress run exists (has uncompleted tracks)
|
||||||
|
if (saved.started_at && !saved.finished) {
|
||||||
progress = saved;
|
progress = saved;
|
||||||
const done = progress.completed?.length ?? 0;
|
const done = progress.completed?.length ?? 0;
|
||||||
if (done > 0) {
|
if (done > 0) {
|
||||||
@@ -20,19 +21,27 @@ export function initProgress(dataDir, lookbackDays) {
|
|||||||
}
|
}
|
||||||
return progress;
|
return progress;
|
||||||
}
|
}
|
||||||
console.log(`🆕 New lookback window (${lookbackDays}d), starting fresh\n`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
progress = {
|
progress = {
|
||||||
lookback_days: lookbackDays,
|
lookback_days: maxLookbackDays,
|
||||||
|
track_lookback: {}, // per-track lookback days, saved on init for crash resume
|
||||||
started_at: Date.now(),
|
started_at: Date.now(),
|
||||||
completed: [],
|
completed: [],
|
||||||
keyword_progress: {}, // key: "platform:track" → last completed keyword index (0-based)
|
keyword_progress: {},
|
||||||
};
|
};
|
||||||
save();
|
save();
|
||||||
return progress;
|
return progress;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Save per-track lookback so crash resume uses the right value */
|
||||||
|
export function saveTrackLookback(trackName, days) {
|
||||||
|
if (!progress) return;
|
||||||
|
if (!progress.track_lookback) progress.track_lookback = {};
|
||||||
|
progress.track_lookback[trackName] = days;
|
||||||
|
save();
|
||||||
|
}
|
||||||
|
|
||||||
/** Save generated keywords for a track — reused on resume, never regenerated mid-run */
|
/** Save generated keywords for a track — reused on resume, never regenerated mid-run */
|
||||||
export function saveKeywords(platform, track, keywords) {
|
export function saveKeywords(platform, track, keywords) {
|
||||||
if (!progress) return;
|
if (!progress) return;
|
||||||
|
|||||||
@@ -11,9 +11,8 @@
|
|||||||
* storage: { type: "local" } (default)
|
* storage: { type: "local" } (default)
|
||||||
*/
|
*/
|
||||||
import { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';
|
import { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';
|
||||||
import { basename, dirname } from 'path';
|
import { basename, dirname, join, resolve as resolvePath } from 'path';
|
||||||
import { tmpdir } from 'os';
|
import { tmpdir } from 'os';
|
||||||
import { join } from 'path';
|
|
||||||
|
|
||||||
let _s3Client = null;
|
let _s3Client = null;
|
||||||
let _config = { type: 'local' };
|
let _config = { type: 'local' };
|
||||||
@@ -27,7 +26,16 @@ export function storageType() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function getS3Key(filePath) {
|
function getS3Key(filePath) {
|
||||||
return `data/${basename(filePath)}`;
|
// Normalize and extract relative path from project root (e.g. data/jobs_queue.json)
|
||||||
|
const storageUrl = new URL(import.meta.url);
|
||||||
|
const projectRoot = dirname(dirname(storageUrl.pathname));
|
||||||
|
const abs = resolvePath(filePath.startsWith('/') ? filePath : join(projectRoot, filePath));
|
||||||
|
if (abs.startsWith(projectRoot + '/')) {
|
||||||
|
return abs.slice(projectRoot.length + 1);
|
||||||
|
}
|
||||||
|
// Fallback: use last two path segments
|
||||||
|
const parts = abs.split('/');
|
||||||
|
return parts.slice(-2).join('/');
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getS3Client() {
|
async function getS3Client() {
|
||||||
@@ -60,6 +68,7 @@ export async function loadJSON(filePath, defaultValue = []) {
|
|||||||
return parsed;
|
return parsed;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
if (err.name === 'NoSuchKey') return defaultValue;
|
if (err.name === 'NoSuchKey') return defaultValue;
|
||||||
|
if (err.$metadata?.httpStatusCode === 404) return defaultValue;
|
||||||
console.warn(`⚠️ S3 load failed for ${basename(filePath)}: ${err.message}`);
|
console.warn(`⚠️ S3 load failed for ${basename(filePath)}: ${err.message}`);
|
||||||
return defaultValue;
|
return defaultValue;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,11 +10,12 @@
|
|||||||
* 4. Flip job back to "new" for retry
|
* 4. Flip job back to "new" for retry
|
||||||
* 5. Send confirmation reply
|
* 5. Send confirmation reply
|
||||||
*/
|
*/
|
||||||
import { existsSync, readFileSync, writeFileSync, renameSync } from 'fs';
|
import { existsSync, readFileSync, writeFileSync } from 'fs';
|
||||||
import { dirname, resolve } from 'path';
|
import { dirname, resolve } from 'path';
|
||||||
import { fileURLToPath } from 'url';
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
import { loadQueue, saveQueue } from './queue.mjs';
|
import { loadQueue, saveQueue } from './queue.mjs';
|
||||||
|
import { loadJSON, saveJSON } from './storage.mjs';
|
||||||
import { getTelegramUpdates, replyTelegram } from './notify.mjs';
|
import { getTelegramUpdates, replyTelegram } from './notify.mjs';
|
||||||
|
|
||||||
const __dir = dirname(fileURLToPath(import.meta.url));
|
const __dir = dirname(fileURLToPath(import.meta.url));
|
||||||
@@ -31,27 +32,10 @@ function saveOffset(offset) {
|
|||||||
writeFileSync(OFFSET_PATH, JSON.stringify({ offset }));
|
writeFileSync(OFFSET_PATH, JSON.stringify({ offset }));
|
||||||
}
|
}
|
||||||
|
|
||||||
function loadAnswers(path) {
|
|
||||||
if (!existsSync(path)) return [];
|
|
||||||
const raw = JSON.parse(readFileSync(path, 'utf8'));
|
|
||||||
// Normalize: support both object {"q":"a"} and array [{pattern,answer}] formats
|
|
||||||
if (Array.isArray(raw)) return raw;
|
|
||||||
if (raw && typeof raw === 'object') {
|
|
||||||
return Object.entries(raw).map(([pattern, answer]) => ({ pattern, answer: String(answer) }));
|
|
||||||
}
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
function saveAnswers(path, answers) {
|
|
||||||
const tmp = path + '.tmp';
|
|
||||||
writeFileSync(tmp, JSON.stringify(answers, null, 2));
|
|
||||||
renameSync(tmp, path);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Process pending Telegram replies. Returns number of answers processed.
|
* Process pending Telegram replies. Returns number of answers processed.
|
||||||
* @param {object} settings - settings.json contents
|
* @param {object} settings - settings.json contents
|
||||||
* @param {string} answersPath - absolute path to answers.json
|
* @param {string} answersPath - path to answers.json (used as storage key)
|
||||||
* @returns {number} count of answers saved
|
* @returns {number} count of answers saved
|
||||||
*/
|
*/
|
||||||
export async function processTelegramReplies(settings, answersPath) {
|
export async function processTelegramReplies(settings, answersPath) {
|
||||||
@@ -63,7 +47,7 @@ export async function processTelegramReplies(settings, answersPath) {
|
|||||||
const updates = await getTelegramUpdates(botToken, offset, 1);
|
const updates = await getTelegramUpdates(botToken, offset, 1);
|
||||||
if (updates.length === 0) return 0;
|
if (updates.length === 0) return 0;
|
||||||
|
|
||||||
// Build lookup: telegram_message_id → job
|
// Build lookup: telegram_message_id -> job
|
||||||
const queue = loadQueue();
|
const queue = loadQueue();
|
||||||
const jobsByMsgId = new Map();
|
const jobsByMsgId = new Map();
|
||||||
for (const job of queue) {
|
for (const job of queue) {
|
||||||
@@ -74,7 +58,7 @@ export async function processTelegramReplies(settings, answersPath) {
|
|||||||
|
|
||||||
let queueDirty = false;
|
let queueDirty = false;
|
||||||
let answersDirty = false;
|
let answersDirty = false;
|
||||||
const answers = loadAnswers(answersPath);
|
const answers = await loadJSON(answersPath, []);
|
||||||
let maxUpdateId = offset;
|
let maxUpdateId = offset;
|
||||||
let processed = 0;
|
let processed = 0;
|
||||||
|
|
||||||
@@ -162,7 +146,7 @@ export async function processTelegramReplies(settings, answersPath) {
|
|||||||
processed++;
|
processed++;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (answersDirty) saveAnswers(answersPath, answers);
|
if (answersDirty) await saveJSON(answersPath, answers);
|
||||||
if (queueDirty) await saveQueue(queue);
|
if (queueDirty) await saveQueue(queue);
|
||||||
saveOffset(maxUpdateId + 1);
|
saveOffset(maxUpdateId + 1);
|
||||||
|
|
||||||
|
|||||||
@@ -18,9 +18,9 @@ async function main() {
|
|||||||
|
|
||||||
// Check configs
|
// Check configs
|
||||||
console.log('Checking config files...');
|
console.log('Checking config files...');
|
||||||
const settings = loadConfig(resolve(__dir, 'config/settings.json'));
|
const settings = await loadConfig(resolve(__dir, 'config/settings.json'));
|
||||||
const profile = loadConfig(resolve(__dir, 'config/profile.json'));
|
const profile = await loadConfig(resolve(__dir, 'config/profile.json'));
|
||||||
const searchConfig = loadConfig(resolve(__dir, 'config/search_config.json'));
|
const searchConfig = await loadConfig(resolve(__dir, 'config/search_config.json'));
|
||||||
|
|
||||||
const checks = [
|
const checks = [
|
||||||
[profile.name?.first && profile.name?.last, 'profile.json: name'],
|
[profile.name?.first && profile.name?.last, 'profile.json: name'],
|
||||||
|
|||||||
69
status.mjs
69
status.mjs
@@ -9,6 +9,12 @@ import { readFileSync, existsSync } from 'fs';
|
|||||||
import { dirname, resolve } from 'path';
|
import { dirname, resolve } from 'path';
|
||||||
import { fileURLToPath } from 'url';
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
|
import { loadEnv } from './lib/env.mjs';
|
||||||
|
loadEnv();
|
||||||
|
|
||||||
|
import { loadConfig, initQueue, loadQueue } from './lib/queue.mjs';
|
||||||
|
import { sendTelegram } from './lib/notify.mjs';
|
||||||
|
|
||||||
const __dir = dirname(fileURLToPath(import.meta.url));
|
const __dir = dirname(fileURLToPath(import.meta.url));
|
||||||
const jsonMode = process.argv.includes('--json');
|
const jsonMode = process.argv.includes('--json');
|
||||||
|
|
||||||
@@ -32,7 +38,6 @@ function buildSearchProgress() {
|
|||||||
const sp = readJson(resolve(__dir, 'data/search_progress.json'));
|
const sp = readJson(resolve(__dir, 'data/search_progress.json'));
|
||||||
if (!sp) return null;
|
if (!sp) return null;
|
||||||
|
|
||||||
// Build unique track list from completed + keyword_progress, prefer platform-specific key
|
|
||||||
const seen = new Set();
|
const seen = new Set();
|
||||||
const tracks = [];
|
const tracks = [];
|
||||||
|
|
||||||
@@ -62,10 +67,7 @@ function buildSearchProgress() {
|
|||||||
return tracks;
|
return tracks;
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildStatus() {
|
function buildStatus(queue, log) {
|
||||||
const queue = readJson(resolve(__dir, 'data/jobs_queue.json')) || [];
|
|
||||||
const log = readJson(resolve(__dir, 'data/applications_log.json')) || [];
|
|
||||||
|
|
||||||
// Queue breakdown
|
// Queue breakdown
|
||||||
const byStatus = {};
|
const byStatus = {};
|
||||||
const byPlatform = {};
|
const byPlatform = {};
|
||||||
@@ -162,27 +164,27 @@ function formatReport(s) {
|
|||||||
// Searcher section
|
// Searcher section
|
||||||
const sr = s.searcher;
|
const sr = s.searcher;
|
||||||
const searcherLine = sr.running
|
const searcherLine = sr.running
|
||||||
? `🔄 Running now — ${q.total} jobs found so far`
|
? `Running now — ${q.total} jobs found so far`
|
||||||
: sr.last_run?.finished === false
|
: sr.last_run?.finished === false
|
||||||
? `⚠️ Last run interrupted ${timeAgo(sr.last_run?.started_at)} (partial results saved)`
|
? `Last run interrupted ${timeAgo(sr.last_run?.started_at)} (partial results saved)`
|
||||||
: `⏸️ Last ran ${timeAgo(sr.last_run?.finished_at)}`;
|
: `Last ran ${timeAgo(sr.last_run?.finished_at)}`;
|
||||||
const lastRunDetail = sr.last_run && !sr.running
|
const lastRunDetail = sr.last_run && !sr.running
|
||||||
? `→ Found ${sr.last_run.added} new jobs (${sr.last_run.seen} seen, ${sr.last_run.skipped_dupes || 0} dupes)`
|
? `Found ${sr.last_run.added} new jobs (${sr.last_run.seen} seen, ${sr.last_run.skipped_dupes || 0} dupes)`
|
||||||
: null;
|
: null;
|
||||||
|
|
||||||
// Applier section
|
// Applier section
|
||||||
const ar = s.applier;
|
const ar = s.applier;
|
||||||
const applierLine = ar.running
|
const applierLine = ar.running
|
||||||
? `🔄 Running now`
|
? `Running now`
|
||||||
: `⏸️ Last ran ${timeAgo(ar.last_run?.finished_at)}`;
|
: `Last ran ${timeAgo(ar.last_run?.finished_at)}`;
|
||||||
const lastApplierDetail = ar.last_run && !ar.running
|
const lastApplierDetail = ar.last_run && !ar.running
|
||||||
? `→ Applied ${ar.last_run.submitted} jobs in that run`
|
? `Applied ${ar.last_run.submitted} jobs in that run`
|
||||||
: null;
|
: null;
|
||||||
|
|
||||||
const lines = [
|
const lines = [
|
||||||
`📊 *claw-apply Status*`,
|
`*claw-apply Status*`,
|
||||||
``,
|
``,
|
||||||
`🔍 *Searcher:* ${searcherLine}`,
|
`*Searcher:* ${searcherLine}`,
|
||||||
];
|
];
|
||||||
if (lastRunDetail) lines.push(lastRunDetail);
|
if (lastRunDetail) lines.push(lastRunDetail);
|
||||||
|
|
||||||
@@ -198,8 +200,8 @@ function formatReport(s) {
|
|||||||
lines.push(`${platform.charAt(0).toUpperCase() + platform.slice(1)}:`);
|
lines.push(`${platform.charAt(0).toUpperCase() + platform.slice(1)}:`);
|
||||||
for (const t of tracks) {
|
for (const t of tracks) {
|
||||||
const pct = t.total > 0 ? Math.round((t.done / t.total) * 100) : 0;
|
const pct = t.total > 0 ? Math.round((t.done / t.total) * 100) : 0;
|
||||||
const bar = t.complete ? '✅ done' : `${t.done}/${t.total} keywords (${pct}%)`;
|
const bar = t.complete ? 'done' : `${t.done}/${t.total} keywords (${pct}%)`;
|
||||||
lines.push(`• ${t.track}: ${bar}`);
|
lines.push(` ${t.track}: ${bar}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -208,23 +210,23 @@ function formatReport(s) {
|
|||||||
const fr = s.filter;
|
const fr = s.filter;
|
||||||
let filterLine;
|
let filterLine;
|
||||||
if (fr.batch_pending) {
|
if (fr.batch_pending) {
|
||||||
filterLine = `⏳ Batch in flight — ${fr.batch_job_count} jobs, submitted ${timeAgo(new Date(fr.submitted_at).getTime())}`;
|
filterLine = `Batch in flight — ${fr.batch_job_count} jobs, submitted ${timeAgo(new Date(fr.submitted_at).getTime())}`;
|
||||||
} else if (fr.last_run) {
|
} else if (fr.last_run) {
|
||||||
const lr = fr.last_run;
|
const lr = fr.last_run;
|
||||||
filterLine = `⏸️ Last ran ${timeAgo(new Date(lr.collected_at).getTime())} — ✅ ${lr.passed} passed, 🚫 ${lr.filtered} filtered`;
|
filterLine = `Last ran ${timeAgo(new Date(lr.collected_at).getTime())} — ${lr.passed} passed, ${lr.filtered} filtered`;
|
||||||
} else {
|
} else {
|
||||||
filterLine = fr.unscored > 0 ? `🟡 ${fr.unscored} jobs awaiting filter` : `⏸️ Never run`;
|
filterLine = fr.unscored > 0 ? `${fr.unscored} jobs awaiting filter` : `Never run`;
|
||||||
}
|
}
|
||||||
if (fr.model) filterLine += ` (${fr.model.replace('claude-', '').replace(/-\d{8}$/, '')})`;
|
if (fr.model) filterLine += ` (${fr.model.replace('claude-', '').replace(/-\d{8}$/, '')})`;
|
||||||
if (fr.unscored > 0 && !fr.batch_pending) filterLine += ` · ${fr.unscored} unscored`;
|
if (fr.unscored > 0 && !fr.batch_pending) filterLine += ` | ${fr.unscored} unscored`;
|
||||||
|
|
||||||
lines.push(`🔬 *Filter:* ${filterLine}`);
|
lines.push(`*Filter:* ${filterLine}`);
|
||||||
lines.push(`🚀 *Applier:* ${applierLine}`);
|
lines.push(`*Applier:* ${applierLine}`);
|
||||||
if (lastApplierDetail) lines.push(lastApplierDetail);
|
if (lastApplierDetail) lines.push(lastApplierDetail);
|
||||||
|
|
||||||
// Queue summary — only show non-zero counts
|
// Queue summary
|
||||||
const unique = q.total - (q.duplicate || 0);
|
const unique = q.total - (q.duplicate || 0);
|
||||||
lines.push('', `📋 *Queue* — ${unique} unique jobs (${q.duplicate || 0} dupes)`);
|
lines.push('', `*Queue* — ${unique} unique jobs (${q.duplicate || 0} dupes)`);
|
||||||
|
|
||||||
const queueLines = [
|
const queueLines = [
|
||||||
[q.applied, 'Applied'],
|
[q.applied, 'Applied'],
|
||||||
@@ -254,17 +256,17 @@ function formatReport(s) {
|
|||||||
unknown: 'Unknown',
|
unknown: 'Unknown',
|
||||||
};
|
};
|
||||||
if (sorted.length > 0) {
|
if (sorted.length > 0) {
|
||||||
lines.push(`• Ready to apply: ${q.new}`);
|
lines.push(`Ready to apply: ${q.new}`);
|
||||||
for (const [type, count] of sorted) {
|
for (const [type, count] of sorted) {
|
||||||
lines.push(`→ ${typeNames[type] || type}: ${count}`);
|
lines.push(` ${typeNames[type] || type}: ${count}`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
lines.push(`• Ready to apply: ${q.new}`);
|
lines.push(`Ready to apply: ${q.new}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const [count, label] of queueLines) {
|
for (const [count, label] of queueLines) {
|
||||||
if (count > 0) lines.push(`• ${label}: ${count}`);
|
if (count > 0) lines.push(`${label}: ${count}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Last applied
|
// Last applied
|
||||||
@@ -276,16 +278,19 @@ function formatReport(s) {
|
|||||||
return lines.join('\n');
|
return lines.join('\n');
|
||||||
}
|
}
|
||||||
|
|
||||||
import { loadConfig } from './lib/queue.mjs';
|
// Main
|
||||||
import { sendTelegram } from './lib/notify.mjs';
|
const settings = await loadConfig(resolve(__dir, 'config/settings.json'));
|
||||||
|
await initQueue(settings);
|
||||||
|
const queue = loadQueue();
|
||||||
|
const { loadJSON } = await import('./lib/storage.mjs');
|
||||||
|
const log = await loadJSON(resolve(__dir, 'data/applications_log.json'), []);
|
||||||
|
|
||||||
const status = buildStatus();
|
const status = buildStatus(queue, log);
|
||||||
|
|
||||||
if (jsonMode) {
|
if (jsonMode) {
|
||||||
console.log(JSON.stringify(status, null, 2));
|
console.log(JSON.stringify(status, null, 2));
|
||||||
} else {
|
} else {
|
||||||
const report = formatReport(status);
|
const report = formatReport(status);
|
||||||
const settings = loadConfig(resolve(__dir, 'config/settings.json'));
|
|
||||||
if (settings.notifications?.bot_token && settings.notifications?.telegram_user_id) {
|
if (settings.notifications?.bot_token && settings.notifications?.telegram_user_id) {
|
||||||
await sendTelegram(settings, report);
|
await sendTelegram(settings, report);
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ loadEnv();
|
|||||||
import { resolve, dirname } from 'path';
|
import { resolve, dirname } from 'path';
|
||||||
import { fileURLToPath } from 'url';
|
import { fileURLToPath } from 'url';
|
||||||
import { createWriteStream } from 'fs';
|
import { createWriteStream } from 'fs';
|
||||||
import { loadConfig } from './lib/queue.mjs';
|
import { loadConfig, initQueue } from './lib/queue.mjs';
|
||||||
import { processTelegramReplies } from './lib/telegram_answers.mjs';
|
import { processTelegramReplies } from './lib/telegram_answers.mjs';
|
||||||
|
|
||||||
const __dir = dirname(fileURLToPath(import.meta.url));
|
const __dir = dirname(fileURLToPath(import.meta.url));
|
||||||
@@ -22,7 +22,8 @@ const origStdoutWrite = process.stdout.write.bind(process.stdout);
|
|||||||
const origStderrWrite = process.stderr.write.bind(process.stderr);
|
const origStderrWrite = process.stderr.write.bind(process.stderr);
|
||||||
process.stdout.write = (chunk, ...args) => { logStream.write(chunk); return origStdoutWrite(chunk, ...args); };
|
process.stdout.write = (chunk, ...args) => { logStream.write(chunk); return origStdoutWrite(chunk, ...args); };
|
||||||
process.stderr.write = (chunk, ...args) => { logStream.write(chunk); return origStderrWrite(chunk, ...args); };
|
process.stderr.write = (chunk, ...args) => { logStream.write(chunk); return origStderrWrite(chunk, ...args); };
|
||||||
const settings = loadConfig(resolve(__dir, 'config/settings.json'));
|
const settings = await loadConfig(resolve(__dir, 'config/settings.json'));
|
||||||
|
await initQueue(settings);
|
||||||
const answersPath = resolve(__dir, 'config/answers.json');
|
const answersPath = resolve(__dir, 'config/answers.json');
|
||||||
|
|
||||||
const processed = await processTelegramReplies(settings, answersPath);
|
const processed = await processTelegramReplies(settings, answersPath);
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import { dirname, resolve } from 'path';
|
|||||||
import { fileURLToPath } from 'url';
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
const __dir = dirname(fileURLToPath(import.meta.url));
|
const __dir = dirname(fileURLToPath(import.meta.url));
|
||||||
const settings = loadConfig(resolve(__dir, 'config/settings.json'));
|
const settings = await loadConfig(resolve(__dir, 'config/settings.json'));
|
||||||
|
|
||||||
let browser;
|
let browser;
|
||||||
try {
|
try {
|
||||||
|
|||||||
Reference in New Issue
Block a user