Migrate to Docker: containerize for docker-server deployment

- Add Dockerfile + cron.js (daily 4pm UTC loop replacing EC2 cron)
- Add infra/docker-compose.yml and deploy-stack.sh for Portainer
- Support DATA_DIR env var in bot.js for persistent history volume
- Support PROMPTS_JSON env var in cron.js (no SSH needed for config)

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-17 17:15:18 -07:00
parent ab32ef4cc2
commit dacc7604cc
8 changed files with 277 additions and 1 deletions

1
.gitignore vendored
View File

@@ -1,5 +1,6 @@
node_modules/ node_modules/
.env .env
infra/.env
.state.json .state.json
history.json history.json
history-*.json history-*.json

50
CLAUDE.md Normal file
View File

@@ -0,0 +1,50 @@
# xBot
AI-powered Twitter/X bot that generates tweets via the Anthropic Claude API and posts them using stealth browser automation through Kernel.sh (residential proxies + CAPTCHA solving).
## Key Features
- AI tweet generation (claude-sonnet-4-6, max 280 tokens)
- Multi-account support (credentials keyed by account name in .env)
- Configurable scheduling (daily or random interval posting)
- Tweet history tracking to avoid repetition
- Link injection (URLs added post-generation, never sent to AI)
- Anti-detection: keyboard.type() with delays, adaptive login flow
## Tech Stack
Node.js 18+ (ES modules), @onkernel/sdk, dotenv
## Project Structure
- `bot.js` — Core logic: login to X, generate tweet via Claude, post it
- `scheduler.js` — Picks random times within a window, invokes bot.js via child_process
- `package.json` — Dependencies & scripts
- `.env` — Credentials (not tracked)
- `prompts.json` — Prompt configs with account mapping, schedule, and link placeholders (not tracked)
- `history-<prompt-name>.json` — Per-prompt tweet history (auto-generated)
## Entry Points
- `node bot.js <prompt-name>` — Post a single tweet immediately
- `node scheduler.js <prompt-name>` — Schedule random posts for the day
## External Services
| Service | Purpose | Config Key |
|---------|---------|------------|
| **Kernel.sh** | Stealth browser automation (residential proxies, CAPTCHA solving, Playwright) | `KERNEL` env var |
| **Anthropic Claude API** | Tweet generation | `ANTHROPIC` env var |
| **X/Twitter** | Target platform for posting | Per-account: `{PREFIX}_USER`, `{PREFIX}_PW`, `{PREFIX}_EMAIL`, `{PREFIX}_PHONE` |
Multi-account credentials use uppercase prefix pattern in .env (e.g., MYACCOUNT_USER, MYACCOUNT_PW).
## Deployment
Deployed on AWS EC2 in **us-west-2**:
- Instance: `i-088b711cac69e6ac9`
- Name: `xbot`
- Type: `t4g.nano` (ARM-based)
- Public IP: `44.249.48.3`
- AWS account: `457667483187`
- Cron: `0 16 * * *` (4pm UTC / before 8am PST window), one job per prompt
## Guidelines
- All changes must preserve stealth/anti-detection behavior and multi-account architecture
- bot.js is the core; scheduler.js orchestrates timing
- Config lives in .env + prompts.json
- All project memory/context stays in this file (or a `CLAUDE-memory/` folder in the repo) — never save to the external `~/.claude/projects/.../memory/` system

7
Dockerfile Normal file
View File

@@ -0,0 +1,7 @@
FROM node:18-alpine
WORKDIR /app
COPY package*.json ./
RUN npm ci --omit=dev
COPY bot.js scheduler.js cron.js ./
ENV DATA_DIR=/data
CMD ["node", "cron.js"]

3
bot.js
View File

@@ -6,13 +6,14 @@ import { dirname, join } from "path";
const __dirname = dirname(fileURLToPath(import.meta.url)); const __dirname = dirname(fileURLToPath(import.meta.url));
const PROMPTS_FILE = join(__dirname, "prompts.json"); const PROMPTS_FILE = join(__dirname, "prompts.json");
const DATA_DIR = process.env.DATA_DIR || __dirname;
function loadPrompts() { function loadPrompts() {
return JSON.parse(readFileSync(PROMPTS_FILE, "utf-8")); return JSON.parse(readFileSync(PROMPTS_FILE, "utf-8"));
} }
function historyPath(name) { function historyPath(name) {
return join(__dirname, `history-${name}.json`); return join(DATA_DIR, `history-${name}.json`);
} }
function loadHistory(name) { function loadHistory(name) {

49
cron.js Normal file
View File

@@ -0,0 +1,49 @@
import { execFileSync } from "child_process";
import { writeFileSync } from "fs";
import { fileURLToPath } from "url";
import { dirname, join } from "path";
const __dirname = dirname(fileURLToPath(import.meta.url));
const PROMPT_NAME = process.env.PROMPT_NAME;
if (!PROMPT_NAME) {
console.error("PROMPT_NAME env var required");
process.exit(1);
}
// Write prompts.json from env var.
// Future: if PROMPTS_URL is set, fetch from S3 or similar instead.
if (process.env.PROMPTS_JSON) {
writeFileSync(join(__dirname, "prompts.json"), process.env.PROMPTS_JSON);
console.log(`[${PROMPT_NAME}] Wrote prompts.json from PROMPTS_JSON env var`);
} else {
console.error("PROMPTS_JSON env var required");
process.exit(1);
}
function msUntilNext4pmUTC() {
const now = new Date();
const next = new Date(now);
next.setUTCHours(16, 0, 0, 0);
if (next <= now) next.setUTCDate(next.getUTCDate() + 1);
return next - now;
}
function runScheduler() {
console.log(`[${PROMPT_NAME}] Running scheduler at ${new Date().toISOString()}`);
try {
execFileSync("node", [join(__dirname, "scheduler.js"), PROMPT_NAME], { stdio: "inherit" });
} catch {
console.error(`[${PROMPT_NAME}] scheduler.js exited with error`);
}
scheduleNext();
}
function scheduleNext() {
const ms = msUntilNext4pmUTC();
const next = new Date(Date.now() + ms);
console.log(`[${PROMPT_NAME}] Next run at ${next.toISOString()}`);
setTimeout(runScheduler, ms);
}
scheduleNext();

23
infra/.env.example Normal file
View File

@@ -0,0 +1,23 @@
# Copy to infra/.env and fill in values. Never commit infra/.env.
# Docker registry (from docker-server setup)
REGISTRY=registry.yourdomain.com
# API keys
ANTHROPIC=sk-ant-...
KERNEL=...
# Prompt config (full JSON from prompts.json, minified)
PROMPTS_JSON=[{"name":"example","account":"myaccount","prompt":"...","schedule":{"type":"daily","window":[8,20],"postsPerDay":[1,1]}}]
# Account credentials — uppercase prefix must match "account" field in PROMPTS_JSON
MYACCOUNT_USER=
MYACCOUNT_PW=
MYACCOUNT_EMAIL=
MYACCOUNT_PHONE=
# Add more accounts as needed:
# OTHERACCOUNT_USER=
# OTHERACCOUNT_PW=
# OTHERACCOUNT_EMAIL=
# OTHERACCOUNT_PHONE=

105
infra/deploy-stack.sh Normal file
View File

@@ -0,0 +1,105 @@
#!/bin/bash
# Deploy xBot stack to Portainer. No Caddy wiring needed (no HTTP endpoints).
#
# Required env:
# PORTAINER_URL — e.g. https://portainer.yourdomain.com
# PORTAINER_API_KEY — from docker-server setup.sh output
#
# Optional:
# STACK_NAME — default: xbot
# ENV_FILE — default: ./infra/.env
# COMPOSE_FILE — default: ./infra/docker-compose.yml
#
# Usage:
# PORTAINER_URL=https://portainer.yourdomain.com \
# PORTAINER_API_KEY=ptr_... \
# bash infra/deploy-stack.sh
set -euo pipefail
PORTAINER_URL="${PORTAINER_URL:?Set PORTAINER_URL}"
PORTAINER_API_KEY="${PORTAINER_API_KEY:?Set PORTAINER_API_KEY}"
STACK_NAME="${STACK_NAME:-xbot}"
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
ENV_FILE="${ENV_FILE:-$SCRIPT_DIR/.env}"
COMPOSE_FILE="${COMPOSE_FILE:-$SCRIPT_DIR/docker-compose.yml}"
for f in "$ENV_FILE" "$COMPOSE_FILE"; do
[ -f "$f" ] || { echo "ERROR: not found: $f"; exit 1; }
done
API="$PORTAINER_URL/api"
echo "[$STACK_NAME] Looking up Portainer endpoint..."
ENDPOINT_ID=$(curl -s "$API/endpoints" \
-H "X-API-Key: $PORTAINER_API_KEY" | \
python3 -c "import sys,json; print(json.load(sys.stdin)[0]['Id'])")
[ -z "$ENDPOINT_ID" ] && { echo "ERROR: No Portainer endpoint found"; exit 1; }
build_payload() {
local mode="$1"
python3 - "$COMPOSE_FILE" "$ENV_FILE" "$STACK_NAME" "$mode" <<'PYEOF'
import json, sys, re
compose_file, env_file, stack_name, mode = sys.argv[1:5]
with open(compose_file) as f:
compose = f.read()
env_vars = []
with open(env_file) as f:
for line in f:
line = line.strip()
if not line or line.startswith('#'):
continue
key, _, value = line.partition('=')
if key:
env_vars.append({"name": key, "value": value})
def replace_env_file(m):
indent = re.search(r'\n(\s+)env_file', m.group(0)).group(1)
lines = [f'\n{indent}environment:']
for var in env_vars:
lines.append(f'{indent} {var["name"]}: "${{{var["name"]}}}"')
return '\n'.join(lines)
compose = re.sub(r'\n\s+env_file:[^\n]*', replace_env_file, compose)
payload = {"stackFileContent": compose, "env": env_vars}
if mode == "create":
payload["name"] = stack_name
else:
payload["prune"] = True
json.dump(payload, sys.stdout)
PYEOF
}
echo "[$STACK_NAME] Checking for existing stack..."
EXISTING_ID=$(curl -s "$API/stacks" \
-H "X-API-Key: $PORTAINER_API_KEY" | \
python3 -c "
import sys, json
for s in json.load(sys.stdin):
if s['Name'] == '$STACK_NAME':
print(s['Id']); break
" 2>/dev/null || true)
if [ -n "$EXISTING_ID" ]; then
echo "[$STACK_NAME] Updating stack (ID: $EXISTING_ID)..."
build_payload update | curl -s -X PUT "$API/stacks/$EXISTING_ID?endpointId=$ENDPOINT_ID" \
-H "X-API-Key: $PORTAINER_API_KEY" \
-H "Content-Type: application/json" \
-d @- > /dev/null
else
echo "[$STACK_NAME] Creating stack..."
build_payload create | curl -s -X POST "$API/stacks/create/standalone/string?endpointId=$ENDPOINT_ID" \
-H "X-API-Key: $PORTAINER_API_KEY" \
-H "Content-Type: application/json" \
-d @- > /dev/null
fi
echo ""
echo "=== $STACK_NAME deployed ==="
echo "Check status at: $PORTAINER_URL"

40
infra/docker-compose.yml Normal file
View File

@@ -0,0 +1,40 @@
services:
thejunipersky-tease:
image: ${REGISTRY}/xbot:latest
restart: unless-stopped
environment:
PROMPT_NAME: tease
PROMPTS_JSON: ${PROMPTS_JSON}
DATA_DIR: /data
ANTHROPIC: ${ANTHROPIC}
KERNEL: ${KERNEL}
THEJUNIPERSKY_USER: ${THEJUNIPERSKY_USER}
THEJUNIPERSKY_PW: ${THEJUNIPERSKY_PW}
THEJUNIPERSKY_EMAIL: ${THEJUNIPERSKY_EMAIL}
THEJUNIPERSKY_PHONE: ${THEJUNIPERSKY_PHONE}
volumes:
- xbot-data:/data
thejunipersky-personality:
image: ${REGISTRY}/xbot:latest
restart: unless-stopped
environment:
PROMPT_NAME: personality
PROMPTS_JSON: ${PROMPTS_JSON}
DATA_DIR: /data
ANTHROPIC: ${ANTHROPIC}
KERNEL: ${KERNEL}
THEJUNIPERSKY_USER: ${THEJUNIPERSKY_USER}
THEJUNIPERSKY_PW: ${THEJUNIPERSKY_PW}
THEJUNIPERSKY_EMAIL: ${THEJUNIPERSKY_EMAIL}
THEJUNIPERSKY_PHONE: ${THEJUNIPERSKY_PHONE}
volumes:
- xbot-data:/data
volumes:
xbot-data:
networks:
xbot-net:
name: xbot-net