feat(workers): Add proxy reload, staggered tasks, and bulk proxy import
- Periodic proxy reload: Workers now reload proxies every 60s to pick up changes - Staggered task scheduling: New API endpoints for creating tasks with delays - Bulk proxy import: Script supports multiple URL formats including host:port:user:pass - Proxy URL column: Migration 086 adds proxy_url for non-standard formats Key changes: - crawl-rotator.ts: Added reloadIfStale(), isStale(), setReloadInterval() - task-worker.ts: Calls reloadIfStale() in main loop - task-service.ts: Added createStaggeredTasks() and createAZStoreTasks() - tasks.ts: Added POST /batch/staggered and /batch/az-stores endpoints - import-proxies.ts: New script for bulk proxy import - CLAUDE.md: Documented staggered task workflow 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
263
backend/src/scripts/import-proxies.ts
Normal file
263
backend/src/scripts/import-proxies.ts
Normal file
@@ -0,0 +1,263 @@
|
||||
/**
|
||||
* Bulk Proxy Import Script
|
||||
*
|
||||
* Imports proxies from various formats into the proxies table.
|
||||
* Supports:
|
||||
* - Standard format: http://user:pass@host:port
|
||||
* - Colon format: http://host:port:user:pass
|
||||
* - Simple format: host:port:user:pass (defaults to http)
|
||||
*
|
||||
* Usage:
|
||||
* npx tsx src/scripts/import-proxies.ts < proxies.txt
|
||||
* echo "http://host:port:user:pass" | npx tsx src/scripts/import-proxies.ts
|
||||
* npx tsx src/scripts/import-proxies.ts --file proxies.txt
|
||||
* npx tsx src/scripts/import-proxies.ts --url "http://host:port:user:pass"
|
||||
*
|
||||
* Options:
|
||||
* --file <path> Read proxies from file (one per line)
|
||||
* --url <url> Import a single proxy URL
|
||||
* --max-connections Set max_connections for all imported proxies (default: 1)
|
||||
* --dry-run Parse and show what would be imported without inserting
|
||||
*/
|
||||
|
||||
import { getPool } from '../db/pool';
|
||||
import * as fs from 'fs';
|
||||
import * as readline from 'readline';
|
||||
|
||||
interface ParsedProxy {
|
||||
protocol: string;
|
||||
host: string;
|
||||
port: number;
|
||||
username?: string;
|
||||
password?: string;
|
||||
rawUrl: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a proxy URL in various formats
|
||||
*/
|
||||
function parseProxyUrl(input: string): ParsedProxy | null {
|
||||
const trimmed = input.trim();
|
||||
if (!trimmed || trimmed.startsWith('#')) return null;
|
||||
|
||||
// Format 1: Standard URL format - http://user:pass@host:port
|
||||
const standardMatch = trimmed.match(/^(https?|socks5):\/\/([^:]+):([^@]+)@([^:]+):(\d+)$/);
|
||||
if (standardMatch) {
|
||||
return {
|
||||
protocol: standardMatch[1],
|
||||
username: standardMatch[2],
|
||||
password: standardMatch[3],
|
||||
host: standardMatch[4],
|
||||
port: parseInt(standardMatch[5], 10),
|
||||
rawUrl: trimmed,
|
||||
};
|
||||
}
|
||||
|
||||
// Format 2: Standard URL without auth - http://host:port
|
||||
const noAuthMatch = trimmed.match(/^(https?|socks5):\/\/([^:]+):(\d+)$/);
|
||||
if (noAuthMatch) {
|
||||
return {
|
||||
protocol: noAuthMatch[1],
|
||||
host: noAuthMatch[2],
|
||||
port: parseInt(noAuthMatch[3], 10),
|
||||
rawUrl: trimmed,
|
||||
};
|
||||
}
|
||||
|
||||
// Format 3: Colon format with protocol - http://host:port:user:pass
|
||||
const colonWithProtocolMatch = trimmed.match(/^(https?|socks5):\/\/([^:]+):(\d+):([^:]+):(.+)$/);
|
||||
if (colonWithProtocolMatch) {
|
||||
return {
|
||||
protocol: colonWithProtocolMatch[1],
|
||||
host: colonWithProtocolMatch[2],
|
||||
port: parseInt(colonWithProtocolMatch[3], 10),
|
||||
username: colonWithProtocolMatch[4],
|
||||
password: colonWithProtocolMatch[5],
|
||||
rawUrl: trimmed, // Keep raw URL for non-standard format
|
||||
};
|
||||
}
|
||||
|
||||
// Format 4: Colon format without protocol - host:port:user:pass
|
||||
const colonMatch = trimmed.match(/^([^:]+):(\d+):([^:]+):(.+)$/);
|
||||
if (colonMatch) {
|
||||
return {
|
||||
protocol: 'http',
|
||||
host: colonMatch[1],
|
||||
port: parseInt(colonMatch[2], 10),
|
||||
username: colonMatch[3],
|
||||
password: colonMatch[4],
|
||||
rawUrl: `http://${trimmed}`, // Construct raw URL
|
||||
};
|
||||
}
|
||||
|
||||
// Format 5: Simple host:port
|
||||
const simpleMatch = trimmed.match(/^([^:]+):(\d+)$/);
|
||||
if (simpleMatch) {
|
||||
return {
|
||||
protocol: 'http',
|
||||
host: simpleMatch[1],
|
||||
port: parseInt(simpleMatch[2], 10),
|
||||
rawUrl: `http://${trimmed}`,
|
||||
};
|
||||
}
|
||||
|
||||
console.error(`[ImportProxies] Could not parse: ${trimmed}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if proxy URL is in non-standard format (needs proxy_url column)
|
||||
*/
|
||||
function isNonStandardFormat(rawUrl: string): boolean {
|
||||
// Colon format: protocol://host:port:user:pass
|
||||
return /^(https?|socks5):\/\/[^:]+:\d+:[^:]+:.+$/.test(rawUrl);
|
||||
}
|
||||
|
||||
async function importProxies(proxies: ParsedProxy[], maxConnections: number, dryRun: boolean) {
|
||||
if (dryRun) {
|
||||
console.log('\n[ImportProxies] DRY RUN - Would import:');
|
||||
for (const p of proxies) {
|
||||
const needsRawUrl = isNonStandardFormat(p.rawUrl);
|
||||
console.log(` ${p.host}:${p.port} (${p.protocol}) user=${p.username || 'none'} needsProxyUrl=${needsRawUrl}`);
|
||||
}
|
||||
console.log(`\nTotal: ${proxies.length} proxies`);
|
||||
return;
|
||||
}
|
||||
|
||||
const pool = getPool();
|
||||
let inserted = 0;
|
||||
let skipped = 0;
|
||||
|
||||
for (const proxy of proxies) {
|
||||
try {
|
||||
// Determine if we need to store the raw URL (non-standard format)
|
||||
const needsRawUrl = isNonStandardFormat(proxy.rawUrl);
|
||||
|
||||
const result = await pool.query(`
|
||||
INSERT INTO proxies (host, port, protocol, username, password, max_connections, proxy_url, active)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, true)
|
||||
ON CONFLICT (host, port, protocol)
|
||||
DO UPDATE SET
|
||||
username = EXCLUDED.username,
|
||||
password = EXCLUDED.password,
|
||||
max_connections = EXCLUDED.max_connections,
|
||||
proxy_url = EXCLUDED.proxy_url,
|
||||
active = true,
|
||||
updated_at = NOW()
|
||||
RETURNING id, (xmax = 0) as is_insert
|
||||
`, [
|
||||
proxy.host,
|
||||
proxy.port,
|
||||
proxy.protocol,
|
||||
proxy.username || null,
|
||||
proxy.password || null,
|
||||
maxConnections,
|
||||
needsRawUrl ? proxy.rawUrl : null,
|
||||
]);
|
||||
|
||||
const isInsert = result.rows[0]?.is_insert;
|
||||
if (isInsert) {
|
||||
inserted++;
|
||||
console.log(`[ImportProxies] Inserted: ${proxy.host}:${proxy.port}`);
|
||||
} else {
|
||||
console.log(`[ImportProxies] Updated: ${proxy.host}:${proxy.port}`);
|
||||
inserted++; // Count updates too
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error(`[ImportProxies] Error inserting ${proxy.host}:${proxy.port}: ${err.message}`);
|
||||
skipped++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\n[ImportProxies] Complete: ${inserted} imported, ${skipped} skipped`);
|
||||
|
||||
// Notify any listening workers
|
||||
try {
|
||||
await pool.query(`NOTIFY proxy_added, 'bulk import'`);
|
||||
console.log('[ImportProxies] Sent proxy_added notification to workers');
|
||||
} catch {
|
||||
// Ignore notification errors
|
||||
}
|
||||
}
|
||||
|
||||
async function readFromStdin(): Promise<string[]> {
|
||||
return new Promise((resolve) => {
|
||||
const lines: string[] = [];
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
terminal: false,
|
||||
});
|
||||
|
||||
rl.on('line', (line) => {
|
||||
lines.push(line);
|
||||
});
|
||||
|
||||
rl.on('close', () => {
|
||||
resolve(lines);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const args = process.argv.slice(2);
|
||||
let lines: string[] = [];
|
||||
let maxConnections = 1;
|
||||
let dryRun = false;
|
||||
|
||||
// Parse arguments
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
if (args[i] === '--file' && args[i + 1]) {
|
||||
const content = fs.readFileSync(args[i + 1], 'utf-8');
|
||||
lines.push(...content.split('\n'));
|
||||
i++;
|
||||
} else if (args[i] === '--url' && args[i + 1]) {
|
||||
lines.push(args[i + 1]);
|
||||
i++;
|
||||
} else if (args[i] === '--max-connections' && args[i + 1]) {
|
||||
maxConnections = parseInt(args[i + 1], 10);
|
||||
i++;
|
||||
} else if (args[i] === '--dry-run') {
|
||||
dryRun = true;
|
||||
} else if (!args[i].startsWith('--')) {
|
||||
// Treat as URL directly
|
||||
lines.push(args[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// If no lines yet, read from stdin
|
||||
if (lines.length === 0) {
|
||||
console.log('[ImportProxies] Reading from stdin...');
|
||||
lines = await readFromStdin();
|
||||
}
|
||||
|
||||
// Parse all lines
|
||||
const proxies: ParsedProxy[] = [];
|
||||
for (const line of lines) {
|
||||
const parsed = parseProxyUrl(line);
|
||||
if (parsed) {
|
||||
proxies.push(parsed);
|
||||
}
|
||||
}
|
||||
|
||||
if (proxies.length === 0) {
|
||||
console.error('[ImportProxies] No valid proxies found');
|
||||
console.error('\nUsage:');
|
||||
console.error(' npx tsx src/scripts/import-proxies.ts --url "http://host:port:user:pass"');
|
||||
console.error(' npx tsx src/scripts/import-proxies.ts --file proxies.txt');
|
||||
console.error(' echo "host:port:user:pass" | npx tsx src/scripts/import-proxies.ts');
|
||||
console.error('\nSupported formats:');
|
||||
console.error(' http://user:pass@host:port (standard)');
|
||||
console.error(' http://host:port:user:pass (colon format)');
|
||||
console.error(' host:port:user:pass (simple)');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`[ImportProxies] Parsed ${proxies.length} proxies (max_connections=${maxConnections})`);
|
||||
await importProxies(proxies, maxConnections, dryRun);
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error('[ImportProxies] Fatal error:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
Reference in New Issue
Block a user