feat: rewrite filter to use Anthropic Batch API
- Batch API = 50% cost savings vs synchronous calls - Prompt caching on system prompt (profile + criteria shared across all jobs) - One request per job with custom_id = job ID for result matching - Two-phase state machine: submit → poll/collect (hourly cron safe) - filter_state.json tracks pending batch ID between runs - Model configurable via settings.filter.model (default: claude-sonnet-4-6) - Telegram notifications on submit + collect - Errors pass through — never block applications due to filter failure - --stats flag for queue overview
This commit is contained in:
221
lib/filter.mjs
221
lib/filter.mjs
@@ -1,15 +1,19 @@
|
||||
/**
|
||||
* filter.mjs — AI job relevance filter
|
||||
* Scores queued jobs 0-10 against candidate profile + job profiles using Claude Haiku
|
||||
* Jobs below filter_min_score are marked 'filtered' and skipped by the applier
|
||||
* filter.mjs — AI job relevance filter (Anthropic Batch API)
|
||||
* Scores queued jobs 0-10 against candidate profile using Claude (Sonnet by default)
|
||||
* Uses Batch API for 50% cost savings + prompt caching for shared context
|
||||
*/
|
||||
|
||||
import { readFileSync, existsSync } from 'fs';
|
||||
|
||||
const BATCH_SIZE = 10;
|
||||
const DESC_MAX_CHARS = 800;
|
||||
const BATCH_API = 'https://api.anthropic.com/v1/messages/batches';
|
||||
|
||||
function loadProfile(profilePath) {
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export function loadProfile(profilePath) {
|
||||
if (!profilePath || !existsSync(profilePath)) return null;
|
||||
try { return JSON.parse(readFileSync(profilePath, 'utf8')); } catch { return null; }
|
||||
}
|
||||
@@ -43,7 +47,7 @@ ${highlights}
|
||||
## Scoring Guide
|
||||
10 = Perfect match (exact title, right company stage, right industry, right salary range)
|
||||
7-9 = Strong match (right role type, maybe slightly off industry or stage)
|
||||
5-6 = Borderline (relevant but some mismatches — wrong industry, wrong seniority, or vague posting)
|
||||
5-6 = Borderline (relevant but some mismatches — wrong industry, seniority, or vague posting)
|
||||
3-4 = Weak match (mostly off target but some overlap)
|
||||
0-2 = Not relevant (wrong role type, wrong industry, recruiter spam, part-time, etc.)
|
||||
|
||||
@@ -51,119 +55,140 @@ Penalize heavily for:
|
||||
- Part-time roles
|
||||
- Wrong industry (insurance, healthcare PR, construction, retail, K-12 education, utilities)
|
||||
- Wrong role type (SDR/BDR, customer success, partnerships, marketing, coordinator)
|
||||
- Junior or entry-level
|
||||
- Staffing agency spam where no real company is named
|
||||
- Salary clearly below minimum`;
|
||||
- Junior or entry-level positions
|
||||
- Staffing agency spam with no real company named
|
||||
- Salary clearly below minimum
|
||||
|
||||
Return ONLY a JSON object: {"score": <0-10>, "reason": "<one line>"}`;
|
||||
}
|
||||
|
||||
function buildUserPrompt(jobs) {
|
||||
const jobList = jobs.map((j, i) => {
|
||||
const desc = (j.description || '').substring(0, DESC_MAX_CHARS).replace(/\s+/g, ' ').trim();
|
||||
return `JOB ${i + 1}
|
||||
Title: ${j.title}
|
||||
Company: ${j.company || 'Unknown'}
|
||||
Location: ${j.location || 'Unknown'}
|
||||
Description: ${desc}`;
|
||||
}).join('\n\n---\n\n');
|
||||
function buildJobMessage(job) {
|
||||
const desc = (job.description || '').substring(0, DESC_MAX_CHARS).replace(/\s+/g, ' ').trim();
|
||||
return `Title: ${job.title}
|
||||
Company: ${job.company || 'Unknown'}
|
||||
Location: ${job.location || 'Unknown'}
|
||||
Description: ${desc}
|
||||
|
||||
return `Score each of the following ${jobs.length} jobs. Return ONLY a JSON array with one object per job in order:
|
||||
[{"score": 7, "reason": "one line explaining score"}, ...]
|
||||
|
||||
${jobList}`;
|
||||
Return ONLY: {"score": <0-10>, "reason": "<one line>"}`;
|
||||
}
|
||||
|
||||
async function filterBatch(jobs, jobProfile, candidateProfile, apiKey) {
|
||||
const res = await fetch('https://api.anthropic.com/v1/messages', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': apiKey,
|
||||
'anthropic-version': '2023-06-01'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: 'claude-3-haiku-20240307',
|
||||
max_tokens: 1024,
|
||||
system: buildSystemPrompt(jobProfile, candidateProfile),
|
||||
messages: [{ role: 'user', content: buildUserPrompt(jobs) }]
|
||||
})
|
||||
});
|
||||
// ---------------------------------------------------------------------------
|
||||
// Batch API
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
if (!res.ok) throw new Error(`Anthropic API error: ${res.status} ${res.statusText}`);
|
||||
|
||||
const data = await res.json();
|
||||
if (data.error) throw new Error(data.error.message);
|
||||
|
||||
const text = data.content[0].text.trim();
|
||||
const clean = text.replace(/```json\n?|\n?```/g, '').trim();
|
||||
return JSON.parse(clean);
|
||||
function apiHeaders(apiKey) {
|
||||
return {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': apiKey,
|
||||
'anthropic-version': '2023-06-01',
|
||||
'anthropic-beta': 'prompt-caching-2024-07-31',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* runFilter — score all new jobs and return results
|
||||
* @param {Array} jobs - jobs with status 'new'
|
||||
* @param {Object} searchConfig - search_config.json
|
||||
* @param {Object} settings - settings.json (needs settings.filter.job_profiles)
|
||||
* @param {Object} candidateProfile - profile.json
|
||||
* @param {string} apiKey - Anthropic API key
|
||||
* @param {Object} opts - { onProgress }
|
||||
* @returns {Array} [{ job, score, reason, pass, minScore }]
|
||||
* Submit all jobs as a single Anthropic batch.
|
||||
* System prompt is marked cache_control=ephemeral so it's cached across requests.
|
||||
* Returns the batch ID.
|
||||
*/
|
||||
export async function runFilter(jobs, searchConfig, settings, candidateProfile, apiKey, { onProgress } = {}) {
|
||||
export async function submitBatch(jobs, jobProfilesByTrack, searchConfig, candidateProfile, model, apiKey) {
|
||||
const globalMin = searchConfig.filter_min_score ?? 5;
|
||||
|
||||
// Group jobs by track
|
||||
const byTrack = {};
|
||||
const requests = [];
|
||||
|
||||
for (const job of jobs) {
|
||||
const track = job.track || 'ae';
|
||||
if (!byTrack[track]) byTrack[track] = [];
|
||||
byTrack[track].push(job);
|
||||
const jobProfile = jobProfilesByTrack[track];
|
||||
if (!jobProfile) continue; // no profile → skip (caller handles this)
|
||||
|
||||
const systemPrompt = buildSystemPrompt(jobProfile, candidateProfile);
|
||||
|
||||
requests.push({
|
||||
custom_id: job.id,
|
||||
params: {
|
||||
model,
|
||||
max_tokens: 128,
|
||||
system: [
|
||||
{
|
||||
type: 'text',
|
||||
text: systemPrompt,
|
||||
cache_control: { type: 'ephemeral' }, // cache the shared context
|
||||
}
|
||||
],
|
||||
messages: [
|
||||
{ role: 'user', content: buildJobMessage(job) }
|
||||
],
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (requests.length === 0) throw new Error('No requests to submit — check job profiles are configured');
|
||||
|
||||
const res = await fetch(BATCH_API, {
|
||||
method: 'POST',
|
||||
headers: apiHeaders(apiKey),
|
||||
body: JSON.stringify({ requests }),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
const err = await res.text();
|
||||
throw new Error(`Batch submit failed ${res.status}: ${err}`);
|
||||
}
|
||||
|
||||
const data = await res.json();
|
||||
return data.id; // msgbatch_...
|
||||
}
|
||||
|
||||
/**
|
||||
* Check batch status. Returns { status: 'in_progress'|'ended', counts }
|
||||
*/
|
||||
export async function checkBatch(batchId, apiKey) {
|
||||
const res = await fetch(`${BATCH_API}/${batchId}`, {
|
||||
headers: apiHeaders(apiKey),
|
||||
});
|
||||
|
||||
if (!res.ok) throw new Error(`Batch status check failed ${res.status}`);
|
||||
const data = await res.json();
|
||||
|
||||
return {
|
||||
status: data.processing_status, // 'in_progress' | 'ended'
|
||||
counts: data.request_counts, // { processing, succeeded, errored, canceled, expired }
|
||||
resultsUrl: data.results_url,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Download and parse batch results. Returns array of { jobId, score, reason, error }
|
||||
*/
|
||||
export async function downloadResults(batchId, apiKey) {
|
||||
const res = await fetch(`${BATCH_API}/${batchId}/results`, {
|
||||
headers: apiHeaders(apiKey),
|
||||
});
|
||||
|
||||
if (!res.ok) throw new Error(`Results download failed ${res.status}`);
|
||||
|
||||
const text = await res.text();
|
||||
const lines = text.trim().split('\n').filter(Boolean);
|
||||
const results = [];
|
||||
|
||||
for (const [track, trackJobs] of Object.entries(byTrack)) {
|
||||
const searchEntry = (searchConfig.searches || []).find(s => s.track === track);
|
||||
const minScore = searchEntry?.filter_min_score ?? globalMin;
|
||||
for (const line of lines) {
|
||||
try {
|
||||
const entry = JSON.parse(line);
|
||||
const jobId = entry.custom_id;
|
||||
|
||||
const profilePath = settings.filter?.job_profiles?.[track];
|
||||
const jobProfile = loadProfile(profilePath);
|
||||
|
||||
if (!jobProfile) {
|
||||
console.warn(`⚠️ No job profile configured for track "${track}" — passing ${trackJobs.length} jobs through unfiltered`);
|
||||
for (const job of trackJobs) {
|
||||
results.push({ job, score: null, reason: 'no_profile', pass: true, minScore });
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
let done = 0;
|
||||
for (let i = 0; i < trackJobs.length; i += BATCH_SIZE) {
|
||||
const batch = trackJobs.slice(i, i + BATCH_SIZE);
|
||||
|
||||
try {
|
||||
const scores = await filterBatch(batch, jobProfile, candidateProfile, apiKey);
|
||||
|
||||
for (let j = 0; j < batch.length; j++) {
|
||||
const job = batch[j];
|
||||
const result = scores[j] || { score: 5, reason: 'parse_error' };
|
||||
results.push({
|
||||
job,
|
||||
score: result.score,
|
||||
reason: result.reason,
|
||||
pass: result.score >= minScore,
|
||||
minScore
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`\n Filter batch error (track: ${track}, batch ${i}–${i + batch.length}): ${err.message}`);
|
||||
// On error, pass jobs through — don't block applications
|
||||
for (const job of batch) {
|
||||
results.push({ job, score: null, reason: 'filter_error', pass: true, minScore });
|
||||
if (entry.result?.type === 'succeeded') {
|
||||
const content = entry.result.message?.content?.[0]?.text || '';
|
||||
try {
|
||||
const clean = content.replace(/```json\n?|\n?```/g, '').trim();
|
||||
const parsed = JSON.parse(clean);
|
||||
results.push({ jobId, score: parsed.score, reason: parsed.reason });
|
||||
} catch {
|
||||
results.push({ jobId, score: null, reason: 'parse_error', error: true });
|
||||
}
|
||||
} else {
|
||||
results.push({ jobId, score: null, reason: entry.result?.type || 'unknown_error', error: true });
|
||||
}
|
||||
|
||||
done += batch.length;
|
||||
if (onProgress) onProgress(done, trackJobs.length, track);
|
||||
} catch {
|
||||
// malformed line — skip
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user