feat: rewrite filter to use Anthropic Batch API

- Batch API = 50% cost savings vs synchronous calls
- Prompt caching on system prompt (profile + criteria shared across all jobs)
- One request per job with custom_id = job ID for result matching
- Two-phase state machine: submit → poll/collect (hourly cron safe)
- filter_state.json tracks pending batch ID between runs
- Model configurable via settings.filter.model (default: claude-sonnet-4-6)
- Telegram notifications on submit + collect
- Errors pass through — never block applications due to filter failure
- --stats flag for queue overview
This commit is contained in:
2026-03-06 10:12:47 +00:00
parent 9bf904dada
commit dbe9967713
2 changed files with 314 additions and 179 deletions

View File

@@ -3,121 +3,231 @@ import { loadEnv } from './lib/env.mjs';
loadEnv();
/**
* job_filter.mjs — claw-apply AI Job Filter
* Scores all queued 'new' jobs 0-10 against candidate profile using Claude Haiku
* Jobs below filter_min_score (default 5, configurable per-search in search_config.json)
* are marked 'filtered' and skipped by the applier
* job_filter.mjs — claw-apply AI Job Filter (Anthropic Batch API)
*
* Runs in two phases on each invocation:
*
* Phase 1 — COLLECT: if a batch is in flight, check status + download results
* Phase 2 — SUBMIT: if no batch pending, find unscored jobs + submit a new batch
*
* Designed to run hourly via cron. Safe to run anytime — idempotent.
*
* Usage:
* node job_filter.mjs — filter all new jobs
* node job_filter.mjs --dry-run — score without writing status changes
* node job_filter.mjs --stats — show filter stats only (no re-filter)
* node job_filter.mjs — normal run (collect if pending, else submit)
* node job_filter.mjs --stats — show filter stats only
*/
import { dirname, resolve } from 'path';
import { fileURLToPath } from 'url';
import { readFileSync, writeFileSync, existsSync, unlinkSync } from 'fs';
const __dir = dirname(fileURLToPath(import.meta.url));
import { getJobsByStatus, updateJobStatus, loadConfig } from './lib/queue.mjs';
import { acquireLock } from './lib/lock.mjs';
import { runFilter } from './lib/filter.mjs';
import { getJobsByStatus, updateJobStatus, loadConfig, loadQueue } from './lib/queue.mjs';
import { loadProfile, submitBatch, checkBatch, downloadResults } from './lib/filter.mjs';
import { sendTelegram } from './lib/notify.mjs';
const isDryRun = process.argv.includes('--dry-run');
const isStats = process.argv.includes('--stats');
async function showStats() {
const all = getJobsByStatus(['new', 'filtered']);
const filtered = all.filter(j => j.status === 'filtered');
const scored = all.filter(j => j.filter_score != null);
const STATE_PATH = resolve(__dir, 'data/filter_state.json');
const DEFAULT_MODEL = 'claude-sonnet-4-6-20251101';
console.log(`📊 Filter Stats\n`);
console.log(` Filtered (blocked): ${filtered.length}`);
console.log(` New (passed/unscored): ${all.length - filtered.length}`);
console.log(` Total scored: ${scored.length}\n`);
// ---------------------------------------------------------------------------
// State helpers
// ---------------------------------------------------------------------------
function readState() {
if (!existsSync(STATE_PATH)) return null;
try { return JSON.parse(readFileSync(STATE_PATH, 'utf8')); } catch { return null; }
}
function writeState(state) {
writeFileSync(STATE_PATH, JSON.stringify(state, null, 2));
}
function clearState() {
if (existsSync(STATE_PATH)) unlinkSync(STATE_PATH);
}
// ---------------------------------------------------------------------------
// Stats
// ---------------------------------------------------------------------------
function showStats() {
const queue = loadQueue();
const byStatus = {};
for (const j of queue) byStatus[j.status] = (byStatus[j.status] || 0) + 1;
const filtered = queue.filter(j => j.status === 'filtered');
const scored = queue.filter(j => j.filter_score != null);
console.log('📊 Filter Stats\n');
console.log(` New (unfiltered): ${byStatus['new'] || 0}`);
console.log(` Filtered (blocked): ${byStatus['filtered'] || 0}`);
console.log(` Total scored: ${scored.length}`);
console.log(` Pass rate: ${scored.length > 0 ? Math.round((scored.filter(j => j.status !== 'filtered').length / scored.length) * 100) : 0}%\n`);
const state = readState();
if (state) {
console.log(` Pending batch: ${state.batch_id}`);
console.log(` Submitted: ${state.submitted_at}`);
console.log(` Job count: ${state.job_count}\n`);
}
if (filtered.length > 0) {
console.log(`Sample filtered jobs:`);
filtered.slice(0, 10).forEach(j => {
console.log(` [${j.filter_score}/10] ${j.title} @ ${j.company}${j.filter_reason}`);
});
console.log('Sample filtered:');
filtered.slice(0, 10).forEach(j =>
console.log(` [${j.filter_score}/10] ${j.title} @ ${j.company}${j.filter_reason}`)
);
}
}
// ---------------------------------------------------------------------------
// Phase 1 — Collect results from a pending batch
// ---------------------------------------------------------------------------
async function collect(state, settings) {
const apiKey = process.env.ANTHROPIC_API_KEY;
console.log(`🔍 Checking batch ${state.batch_id}...`);
const { status, counts } = await checkBatch(state.batch_id, apiKey);
if (status === 'in_progress') {
const total = Object.values(counts).reduce((a, b) => a + b, 0);
const done = (counts.succeeded || 0) + (counts.errored || 0) + (counts.canceled || 0) + (counts.expired || 0);
console.log(` Still processing — ${done}/${total} complete. Check back later.`);
return;
}
console.log(` Batch ended. Downloading results...`);
const results = await downloadResults(state.batch_id, apiKey);
const searchConfig = loadConfig(resolve(__dir, 'config/search_config.json'));
const globalMin = searchConfig.filter_min_score ?? 5;
let passed = 0, filtered = 0, errors = 0;
for (const { jobId, score, reason, error } of results) {
if (error || score === null) {
errors++;
// Pass through on error — never block applications due to filter failure
updateJobStatus(jobId, 'new', { filter_score: null, filter_reason: reason || 'filter_error' });
continue;
}
// Find per-track threshold
const queue = loadQueue();
const job = queue.find(j => j.id === jobId);
const track = job?.track || 'ae';
const searchEntry = (searchConfig.searches || []).find(s => s.track === track);
const minScore = searchEntry?.filter_min_score ?? globalMin;
if (score >= minScore) {
passed++;
updateJobStatus(jobId, 'new', { filter_score: score, filter_reason: reason });
} else {
filtered++;
updateJobStatus(jobId, 'filtered', { filter_score: score, filter_reason: reason });
}
}
clearState();
const summary = `✅ Filter complete — ${passed} passed, ${filtered} filtered, ${errors} errors`;
console.log(`\n${summary}`);
// Notify via Telegram
await sendTelegram(settings,
`🔍 *AI Filter complete*\n✅ Passed: ${passed}\n🚫 Filtered: ${filtered}\n⚠️ Errors: ${errors}`
).catch(() => {}); // non-fatal
}
// ---------------------------------------------------------------------------
// Phase 2 — Submit a new batch
// ---------------------------------------------------------------------------
async function submit(settings, searchConfig, candidateProfile) {
const apiKey = process.env.ANTHROPIC_API_KEY;
// Get all new jobs that haven't been scored yet
const jobs = getJobsByStatus('new').filter(j => j.filter_score == null);
if (jobs.length === 0) {
console.log('✅ Nothing to filter — all new jobs already scored.');
return;
}
// Build job profiles map by track
const profilePaths = settings.filter?.job_profiles || {};
const jobProfilesByTrack = {};
for (const [track, path] of Object.entries(profilePaths)) {
const profile = loadProfile(path);
if (profile) jobProfilesByTrack[track] = profile;
else console.warn(`⚠️ Could not load job profile for track "${track}" at ${path}`);
}
// Filter out jobs with no profile (will pass through unscored)
const filterable = jobs.filter(j => jobProfilesByTrack[j.track || 'ae']);
const noProfile = jobs.length - filterable.length;
if (noProfile > 0) console.warn(`⚠️ ${noProfile} jobs skipped — no profile for their track`);
if (filterable.length === 0) {
console.log('Nothing filterable — no job profiles configured for any track.');
return;
}
const model = settings.filter?.model || DEFAULT_MODEL;
console.log(`🚀 Submitting batch — ${filterable.length} jobs, model: ${model}`);
const batchId = await submitBatch(filterable, jobProfilesByTrack, searchConfig, candidateProfile, model, apiKey);
writeState({
batch_id: batchId,
submitted_at: new Date().toISOString(),
job_count: filterable.length,
model,
});
console.log(` Batch submitted: ${batchId}`);
console.log(` Results typically ready in < 1 hour. Next run will collect.`);
// Notify
await sendTelegram(settings,
`🔍 *AI Filter submitted*\n${filterable.length} jobs queued for scoring\nBatch: \`${batchId}\``
).catch(() => {});
}
// ---------------------------------------------------------------------------
// Main
// ---------------------------------------------------------------------------
async function main() {
if (isStats) {
await showStats();
showStats();
return;
}
const apiKey = process.env.ANTHROPIC_API_KEY;
if (!apiKey) {
console.error('❌ ANTHROPIC_API_KEY not set — filter requires Anthropic API');
console.error('❌ ANTHROPIC_API_KEY not set');
process.exit(1);
}
const lock = acquireLock('filter', resolve(__dir, 'data'));
const settings = loadConfig(resolve(__dir, 'config/settings.json'));
const searchConfig = loadConfig(resolve(__dir, 'config/search_config.json'));
const candidateProfile = loadConfig(resolve(__dir, 'config/profile.json'));
const jobs = getJobsByStatus('new');
const globalMin = searchConfig.filter_min_score ?? 5;
console.log('🔍 claw-apply: AI Job Filter\n');
console.log(`🔍 claw-apply: AI Job Filter${isDryRun ? ' (DRY RUN)' : ''}\n`);
console.log(` Jobs to score: ${jobs.length}`);
console.log(` Default threshold: ${globalMin}/10\n`);
const state = readState();
if (jobs.length === 0) {
console.log('Nothing to filter.');
return;
}
let passed = 0, filtered = 0, errors = 0;
const filterLog = [];
const results = await runFilter(jobs, searchConfig, settings, candidateProfile, apiKey, {
onProgress: (done, total, track) => {
process.stdout.write(`\r [${track}] ${done}/${total} scored...`);
}
});
console.log('\n');
for (const { job, score, reason, pass, minScore } of results) {
if (score === null) {
errors++;
continue;
}
filterLog.push({ id: job.id, title: job.title, company: job.company, score, reason, pass, minScore });
if (pass) {
passed++;
if (!isDryRun) {
updateJobStatus(job.id, 'new', { filter_score: score, filter_reason: reason });
}
} else {
filtered++;
if (!isDryRun) {
updateJobStatus(job.id, 'filtered', { filter_score: score, filter_reason: reason });
}
}
}
console.log(`✅ Filter complete${isDryRun ? ' (no changes written)' : ''}`);
console.log(` ✅ Passed: ${passed}`);
console.log(` 🚫 Filtered: ${filtered}`);
console.log(` ⚠️ Errors: ${errors} (passed through)`);
console.log(` 📊 Pass rate: ${jobs.length > 0 ? Math.round((passed / jobs.length) * 100) : 0}%\n`);
if (isDryRun && filterLog.length > 0) {
console.log(`Sample scores:`);
filterLog.slice(0, 20).forEach(j => {
const icon = j.pass ? '✅' : '🚫';
console.log(` ${icon} [${j.score}/10] ${j.title} @ ${j.company}${j.reason}`);
});
if (state?.batch_id) {
// Phase 1: collect results from pending batch
await collect(state, settings);
} else {
// Phase 2: submit new batch
await submit(settings, searchConfig, candidateProfile);
}
}

View File

@@ -1,15 +1,19 @@
/**
* filter.mjs — AI job relevance filter
* Scores queued jobs 0-10 against candidate profile + job profiles using Claude Haiku
* Jobs below filter_min_score are marked 'filtered' and skipped by the applier
* filter.mjs — AI job relevance filter (Anthropic Batch API)
* Scores queued jobs 0-10 against candidate profile using Claude (Sonnet by default)
* Uses Batch API for 50% cost savings + prompt caching for shared context
*/
import { readFileSync, existsSync } from 'fs';
const BATCH_SIZE = 10;
const DESC_MAX_CHARS = 800;
const BATCH_API = 'https://api.anthropic.com/v1/messages/batches';
function loadProfile(profilePath) {
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
export function loadProfile(profilePath) {
if (!profilePath || !existsSync(profilePath)) return null;
try { return JSON.parse(readFileSync(profilePath, 'utf8')); } catch { return null; }
}
@@ -43,7 +47,7 @@ ${highlights}
## Scoring Guide
10 = Perfect match (exact title, right company stage, right industry, right salary range)
7-9 = Strong match (right role type, maybe slightly off industry or stage)
5-6 = Borderline (relevant but some mismatches — wrong industry, wrong seniority, or vague posting)
5-6 = Borderline (relevant but some mismatches — wrong industry, seniority, or vague posting)
3-4 = Weak match (mostly off target but some overlap)
0-2 = Not relevant (wrong role type, wrong industry, recruiter spam, part-time, etc.)
@@ -51,119 +55,140 @@ Penalize heavily for:
- Part-time roles
- Wrong industry (insurance, healthcare PR, construction, retail, K-12 education, utilities)
- Wrong role type (SDR/BDR, customer success, partnerships, marketing, coordinator)
- Junior or entry-level
- Staffing agency spam where no real company is named
- Salary clearly below minimum`;
- Junior or entry-level positions
- Staffing agency spam with no real company named
- Salary clearly below minimum
Return ONLY a JSON object: {"score": <0-10>, "reason": "<one line>"}`;
}
function buildUserPrompt(jobs) {
const jobList = jobs.map((j, i) => {
const desc = (j.description || '').substring(0, DESC_MAX_CHARS).replace(/\s+/g, ' ').trim();
return `JOB ${i + 1}
Title: ${j.title}
Company: ${j.company || 'Unknown'}
Location: ${j.location || 'Unknown'}
Description: ${desc}`;
}).join('\n\n---\n\n');
function buildJobMessage(job) {
const desc = (job.description || '').substring(0, DESC_MAX_CHARS).replace(/\s+/g, ' ').trim();
return `Title: ${job.title}
Company: ${job.company || 'Unknown'}
Location: ${job.location || 'Unknown'}
Description: ${desc}
return `Score each of the following ${jobs.length} jobs. Return ONLY a JSON array with one object per job in order:
[{"score": 7, "reason": "one line explaining score"}, ...]
${jobList}`;
Return ONLY: {"score": <0-10>, "reason": "<one line>"}`;
}
async function filterBatch(jobs, jobProfile, candidateProfile, apiKey) {
const res = await fetch('https://api.anthropic.com/v1/messages', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'x-api-key': apiKey,
'anthropic-version': '2023-06-01'
},
body: JSON.stringify({
model: 'claude-3-haiku-20240307',
max_tokens: 1024,
system: buildSystemPrompt(jobProfile, candidateProfile),
messages: [{ role: 'user', content: buildUserPrompt(jobs) }]
})
});
// ---------------------------------------------------------------------------
// Batch API
// ---------------------------------------------------------------------------
if (!res.ok) throw new Error(`Anthropic API error: ${res.status} ${res.statusText}`);
const data = await res.json();
if (data.error) throw new Error(data.error.message);
const text = data.content[0].text.trim();
const clean = text.replace(/```json\n?|\n?```/g, '').trim();
return JSON.parse(clean);
function apiHeaders(apiKey) {
return {
'Content-Type': 'application/json',
'x-api-key': apiKey,
'anthropic-version': '2023-06-01',
'anthropic-beta': 'prompt-caching-2024-07-31',
};
}
/**
* runFilter — score all new jobs and return results
* @param {Array} jobs - jobs with status 'new'
* @param {Object} searchConfig - search_config.json
* @param {Object} settings - settings.json (needs settings.filter.job_profiles)
* @param {Object} candidateProfile - profile.json
* @param {string} apiKey - Anthropic API key
* @param {Object} opts - { onProgress }
* @returns {Array} [{ job, score, reason, pass, minScore }]
* Submit all jobs as a single Anthropic batch.
* System prompt is marked cache_control=ephemeral so it's cached across requests.
* Returns the batch ID.
*/
export async function runFilter(jobs, searchConfig, settings, candidateProfile, apiKey, { onProgress } = {}) {
export async function submitBatch(jobs, jobProfilesByTrack, searchConfig, candidateProfile, model, apiKey) {
const globalMin = searchConfig.filter_min_score ?? 5;
// Group jobs by track
const byTrack = {};
const requests = [];
for (const job of jobs) {
const track = job.track || 'ae';
if (!byTrack[track]) byTrack[track] = [];
byTrack[track].push(job);
const jobProfile = jobProfilesByTrack[track];
if (!jobProfile) continue; // no profile → skip (caller handles this)
const systemPrompt = buildSystemPrompt(jobProfile, candidateProfile);
requests.push({
custom_id: job.id,
params: {
model,
max_tokens: 128,
system: [
{
type: 'text',
text: systemPrompt,
cache_control: { type: 'ephemeral' }, // cache the shared context
}
],
messages: [
{ role: 'user', content: buildJobMessage(job) }
],
}
});
}
if (requests.length === 0) throw new Error('No requests to submit — check job profiles are configured');
const res = await fetch(BATCH_API, {
method: 'POST',
headers: apiHeaders(apiKey),
body: JSON.stringify({ requests }),
});
if (!res.ok) {
const err = await res.text();
throw new Error(`Batch submit failed ${res.status}: ${err}`);
}
const data = await res.json();
return data.id; // msgbatch_...
}
/**
* Check batch status. Returns { status: 'in_progress'|'ended', counts }
*/
export async function checkBatch(batchId, apiKey) {
const res = await fetch(`${BATCH_API}/${batchId}`, {
headers: apiHeaders(apiKey),
});
if (!res.ok) throw new Error(`Batch status check failed ${res.status}`);
const data = await res.json();
return {
status: data.processing_status, // 'in_progress' | 'ended'
counts: data.request_counts, // { processing, succeeded, errored, canceled, expired }
resultsUrl: data.results_url,
};
}
/**
* Download and parse batch results. Returns array of { jobId, score, reason, error }
*/
export async function downloadResults(batchId, apiKey) {
const res = await fetch(`${BATCH_API}/${batchId}/results`, {
headers: apiHeaders(apiKey),
});
if (!res.ok) throw new Error(`Results download failed ${res.status}`);
const text = await res.text();
const lines = text.trim().split('\n').filter(Boolean);
const results = [];
for (const [track, trackJobs] of Object.entries(byTrack)) {
const searchEntry = (searchConfig.searches || []).find(s => s.track === track);
const minScore = searchEntry?.filter_min_score ?? globalMin;
for (const line of lines) {
try {
const entry = JSON.parse(line);
const jobId = entry.custom_id;
const profilePath = settings.filter?.job_profiles?.[track];
const jobProfile = loadProfile(profilePath);
if (!jobProfile) {
console.warn(`⚠️ No job profile configured for track "${track}" — passing ${trackJobs.length} jobs through unfiltered`);
for (const job of trackJobs) {
results.push({ job, score: null, reason: 'no_profile', pass: true, minScore });
}
continue;
}
let done = 0;
for (let i = 0; i < trackJobs.length; i += BATCH_SIZE) {
const batch = trackJobs.slice(i, i + BATCH_SIZE);
try {
const scores = await filterBatch(batch, jobProfile, candidateProfile, apiKey);
for (let j = 0; j < batch.length; j++) {
const job = batch[j];
const result = scores[j] || { score: 5, reason: 'parse_error' };
results.push({
job,
score: result.score,
reason: result.reason,
pass: result.score >= minScore,
minScore
});
}
} catch (err) {
console.error(`\n Filter batch error (track: ${track}, batch ${i}${i + batch.length}): ${err.message}`);
// On error, pass jobs through — don't block applications
for (const job of batch) {
results.push({ job, score: null, reason: 'filter_error', pass: true, minScore });
if (entry.result?.type === 'succeeded') {
const content = entry.result.message?.content?.[0]?.text || '';
try {
const clean = content.replace(/```json\n?|\n?```/g, '').trim();
const parsed = JSON.parse(clean);
results.push({ jobId, score: parsed.score, reason: parsed.reason });
} catch {
results.push({ jobId, score: null, reason: 'parse_error', error: true });
}
} else {
results.push({ jobId, score: null, reason: entry.result?.type || 'unknown_error', error: true });
}
done += batch.length;
if (onProgress) onProgress(done, trackJobs.length, track);
} catch {
// malformed line — skip
}
}