Files
cannaiq/backend/dist/services/proxyTestQueue.js
Kelly 66e07b2009 fix(monitor): remove non-existent worker columns from job_run_logs query
The job_run_logs table tracks scheduled job orchestration, not individual
worker jobs. Worker info (worker_id, worker_hostname) belongs on
dispensary_crawl_jobs, not job_run_logs.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-12-03 18:45:05 -07:00

175 lines
5.9 KiB
JavaScript

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.cleanupOrphanedJobs = cleanupOrphanedJobs;
exports.createProxyTestJob = createProxyTestJob;
exports.getProxyTestJob = getProxyTestJob;
exports.getActiveProxyTestJob = getActiveProxyTestJob;
exports.cancelProxyTestJob = cancelProxyTestJob;
const migrate_1 = require("../db/migrate");
const proxy_1 = require("./proxy");
// Simple in-memory queue - could be replaced with Bull/Bee-Queue for production
const activeJobs = new Map();
// Clean up orphaned jobs on server startup
async function cleanupOrphanedJobs() {
try {
const result = await migrate_1.pool.query(`
UPDATE proxy_test_jobs
SET status = 'cancelled',
completed_at = CURRENT_TIMESTAMP,
updated_at = CURRENT_TIMESTAMP
WHERE status IN ('pending', 'running')
RETURNING id
`);
if (result.rows.length > 0) {
console.log(`🧹 Cleaned up ${result.rows.length} orphaned proxy test jobs`);
}
}
catch (error) {
console.error('Error cleaning up orphaned jobs:', error);
}
}
async function createProxyTestJob() {
// Check for existing running jobs first
const existingJob = await getActiveProxyTestJob();
if (existingJob) {
throw new Error('A proxy test job is already running. Please cancel it first.');
}
const result = await migrate_1.pool.query(`
SELECT COUNT(*) as count FROM proxies
`);
const totalProxies = parseInt(result.rows[0].count);
const jobResult = await migrate_1.pool.query(`
INSERT INTO proxy_test_jobs (status, total_proxies)
VALUES ('pending', $1)
RETURNING id
`, [totalProxies]);
const jobId = jobResult.rows[0].id;
// Start job in background
runProxyTestJob(jobId).catch(err => {
console.error(`❌ Proxy test job ${jobId} failed:`, err);
});
return jobId;
}
async function getProxyTestJob(jobId) {
const result = await migrate_1.pool.query(`
SELECT id, status, total_proxies, tested_proxies, passed_proxies, failed_proxies
FROM proxy_test_jobs
WHERE id = $1
`, [jobId]);
if (result.rows.length === 0) {
return null;
}
return result.rows[0];
}
async function getActiveProxyTestJob() {
const result = await migrate_1.pool.query(`
SELECT id, status, total_proxies, tested_proxies, passed_proxies, failed_proxies
FROM proxy_test_jobs
WHERE status IN ('pending', 'running')
ORDER BY created_at DESC
LIMIT 1
`);
if (result.rows.length === 0) {
return null;
}
return result.rows[0];
}
async function cancelProxyTestJob(jobId) {
// Try to cancel in-memory job first
const jobControl = activeJobs.get(jobId);
if (jobControl) {
jobControl.cancelled = true;
}
// Always update database to handle orphaned jobs
const result = await migrate_1.pool.query(`
UPDATE proxy_test_jobs
SET status = 'cancelled',
completed_at = CURRENT_TIMESTAMP,
updated_at = CURRENT_TIMESTAMP
WHERE id = $1 AND status IN ('pending', 'running')
RETURNING id
`, [jobId]);
return result.rows.length > 0;
}
async function runProxyTestJob(jobId) {
// Register job as active
activeJobs.set(jobId, { cancelled: false });
try {
// Update status to running
await migrate_1.pool.query(`
UPDATE proxy_test_jobs
SET status = 'running',
started_at = CURRENT_TIMESTAMP,
updated_at = CURRENT_TIMESTAMP
WHERE id = $1
`, [jobId]);
console.log(`🔍 Starting proxy test job ${jobId}...`);
// Get all proxies
const result = await migrate_1.pool.query(`
SELECT id, host, port, protocol, username, password
FROM proxies
ORDER BY id
`);
let tested = 0;
let passed = 0;
let failed = 0;
for (const proxy of result.rows) {
// Check if job was cancelled
const jobControl = activeJobs.get(jobId);
if (jobControl?.cancelled) {
console.log(`⏸️ Proxy test job ${jobId} cancelled`);
break;
}
// Test the proxy
const testResult = await (0, proxy_1.testProxy)(proxy.host, proxy.port, proxy.protocol, proxy.username, proxy.password);
// Save result
await (0, proxy_1.saveProxyTestResult)(proxy.id, testResult);
tested++;
if (testResult.success) {
passed++;
}
else {
failed++;
}
// Update job progress
await migrate_1.pool.query(`
UPDATE proxy_test_jobs
SET tested_proxies = $1,
passed_proxies = $2,
failed_proxies = $3,
updated_at = CURRENT_TIMESTAMP
WHERE id = $4
`, [tested, passed, failed, jobId]);
// Log progress every 10 proxies
if (tested % 10 === 0) {
console.log(`📊 Job ${jobId}: ${tested}/${result.rows.length} proxies tested (${passed} passed, ${failed} failed)`);
}
}
// Mark job as completed
const jobControl = activeJobs.get(jobId);
const finalStatus = jobControl?.cancelled ? 'cancelled' : 'completed';
await migrate_1.pool.query(`
UPDATE proxy_test_jobs
SET status = $1,
completed_at = CURRENT_TIMESTAMP,
updated_at = CURRENT_TIMESTAMP
WHERE id = $2
`, [finalStatus, jobId]);
console.log(`✅ Proxy test job ${jobId} ${finalStatus}: ${tested} tested, ${passed} passed, ${failed} failed`);
}
catch (error) {
console.error(`❌ Proxy test job ${jobId} error:`, error);
await migrate_1.pool.query(`
UPDATE proxy_test_jobs
SET status = 'failed',
completed_at = CURRENT_TIMESTAMP,
updated_at = CURRENT_TIMESTAMP
WHERE id = $1
`, [jobId]);
}
finally {
// Remove from active jobs
activeJobs.delete(jobId);
}
}