feat: track token usage and estimated cost per filter run in filter_runs.json

This commit is contained in:
2026-03-06 16:22:14 +00:00
parent 3575f06018
commit 37b95b6b85
2 changed files with 35 additions and 4 deletions

View File

@@ -114,9 +114,13 @@ async function collect(state, settings) {
// All done — download and merge all results
console.log('\n All batches complete. Downloading results...');
const resultMap = {};
let totalCost = 0;
const totalUsage = { input_tokens: 0, output_tokens: 0, cache_creation_input_tokens: 0, cache_read_input_tokens: 0 };
for (const b of batches) {
const results = await downloadResults(b.batchId, apiKey, b.idMap || {});
const { results, usage, cost } = await downloadResults(b.batchId, apiKey, b.idMap || {});
for (const r of results) resultMap[r.jobId] = r;
totalCost += cost;
for (const [k, v] of Object.entries(usage)) totalUsage[k] = (totalUsage[k] || 0) + v;
}
const searchConfig = loadConfig(resolve(__dir, 'config/search_config.json'));
@@ -170,10 +174,12 @@ async function collect(state, settings) {
passed,
filtered,
errors,
cost_usd: Math.round(totalCost * 100) / 100,
usage: totalUsage,
});
writeFileSync(runsPath, JSON.stringify(runs, null, 2));
const summary = `✅ Filter complete — ${passed} passed, ${filtered} filtered, ${errors} errors`;
const summary = `✅ Filter complete — ${passed} passed, ${filtered} filtered, ${errors} errors (est. cost: $${totalCost.toFixed(2)})`;
console.log(`\n${summary}`);
await sendTelegram(settings,

View File

@@ -146,6 +146,14 @@ export async function checkBatch(batchId, apiKey) {
/**
* Download and parse batch results. Returns array of { jobId, score, reason, error }
*/
// Sonnet batch API pricing (per million tokens)
const PRICING = {
input: 1.50,
output: 7.50,
cache_write: 1.875,
cache_read: 0.15,
};
export async function downloadResults(batchId, apiKey, idMap = {}) {
const res = await fetch(`${BATCH_API}/${batchId}/results`, {
headers: apiHeaders(apiKey),
@@ -156,13 +164,22 @@ export async function downloadResults(batchId, apiKey, idMap = {}) {
const text = await res.text();
const lines = text.trim().split('\n').filter(Boolean);
const results = [];
const usage = { input_tokens: 0, output_tokens: 0, cache_creation_input_tokens: 0, cache_read_input_tokens: 0 };
for (const line of lines) {
try {
const entry = JSON.parse(line);
// Resolve truncated custom_id back to original job ID
const jobId = idMap[entry.custom_id] || entry.custom_id;
// Accumulate token usage
const u = entry.result?.message?.usage;
if (u) {
usage.input_tokens += u.input_tokens || 0;
usage.output_tokens += u.output_tokens || 0;
usage.cache_creation_input_tokens += u.cache_creation_input_tokens || 0;
usage.cache_read_input_tokens += u.cache_read_input_tokens || 0;
}
if (entry.result?.type === 'succeeded') {
const content = entry.result.message?.content?.[0]?.text || '';
try {
@@ -180,5 +197,13 @@ export async function downloadResults(batchId, apiKey, idMap = {}) {
}
}
return results;
// Calculate estimated cost
const cost = (
(usage.input_tokens * PRICING.input) +
(usage.output_tokens * PRICING.output) +
(usage.cache_creation_input_tokens * PRICING.cache_write) +
(usage.cache_read_input_tokens * PRICING.cache_read)
) / 1_000_000;
return { results, usage, cost: Math.round(cost * 100) / 100 };
}