From afc288d2cf93c2b058aface259665d2150864b86 Mon Sep 17 00:00:00 2001 From: Kelly Date: Wed, 10 Dec 2025 09:27:26 -0700 Subject: [PATCH 1/3] feat(ci): Auto-merge PRs after all type checks pass MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Uses Gitea API to merge PR automatically when all typecheck jobs succeed. Requires gitea_token secret in Woodpecker. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- .woodpecker/.ci.yml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/.woodpecker/.ci.yml b/.woodpecker/.ci.yml index 1d23337a..34d7ccd6 100644 --- a/.woodpecker/.ci.yml +++ b/.woodpecker/.ci.yml @@ -45,6 +45,31 @@ steps: when: event: pull_request + # =========================================== + # AUTO-MERGE: Merge PR after all checks pass + # =========================================== + auto-merge: + image: alpine:latest + environment: + GITEA_TOKEN: + from_secret: gitea_token + commands: + - apk add --no-cache curl + - | + echo "Merging PR #${CI_COMMIT_PULL_REQUEST}..." + curl -s -X POST \ + -H "Authorization: token $GITEA_TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"Do":"merge"}' \ + "https://code.cannabrands.app/api/v1/repos/Creationshop/dispensary-scraper/pulls/${CI_COMMIT_PULL_REQUEST}/merge" + depends_on: + - typecheck-backend + - typecheck-cannaiq + - typecheck-findadispo + - typecheck-findagram + when: + event: pull_request + # =========================================== # MASTER DEPLOY: Parallel Docker builds # =========================================== From 9647f94f89af0f08f4088d1f6d4fdcc7dbcb02d4 Mon Sep 17 00:00:00 2001 From: Kelly Date: Wed, 10 Dec 2025 09:38:05 -0700 Subject: [PATCH 2/3] fix: Copy migrations folder to Docker image + fix SQL FILTER syntax MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Dockerfile: Add COPY migrations ./migrations so auto-migrate works on remote - intelligence.ts: Fix FILTER clause placement in aggregate functions - FILTER must be inside AVG(), not wrapping ROUND() - Remove redundant FILTER on MIN (already filtered by WHERE) - Remove unsupported FILTER on PERCENTILE_CONT These fixes resolve: - "Failed to get task counts" (worker_tasks table missing) - "FILTER specified but round is not an aggregate function" errors - /national page "column m.state does not exist" (mv_state_metrics missing) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- backend/Dockerfile | 3 +++ backend/src/routes/intelligence.ts | 11 +++++------ 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/backend/Dockerfile b/backend/Dockerfile index e953efde..c0a8ad48 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -47,6 +47,9 @@ RUN npm ci --omit=dev COPY --from=builder /app/dist ./dist +# Copy migrations for auto-migrate on startup +COPY migrations ./migrations + # Create local images directory for when MinIO is not configured RUN mkdir -p /app/public/images/products diff --git a/backend/src/routes/intelligence.ts b/backend/src/routes/intelligence.ts index 4da15411..fb970965 100644 --- a/backend/src/routes/intelligence.ts +++ b/backend/src/routes/intelligence.ts @@ -27,8 +27,8 @@ router.get('/brands', async (req: Request, res: Response) => { array_agg(DISTINCT d.state) FILTER (WHERE d.state IS NOT NULL) as states, COUNT(DISTINCT d.id) as store_count, COUNT(DISTINCT sp.id) as sku_count, - ROUND(AVG(sp.price_rec)::numeric, 2) FILTER (WHERE sp.price_rec > 0) as avg_price_rec, - ROUND(AVG(sp.price_med)::numeric, 2) FILTER (WHERE sp.price_med > 0) as avg_price_med + ROUND(AVG(sp.price_rec) FILTER (WHERE sp.price_rec > 0)::numeric, 2) as avg_price_rec, + ROUND(AVG(sp.price_med) FILTER (WHERE sp.price_med > 0)::numeric, 2) as avg_price_med FROM store_products sp JOIN dispensaries d ON sp.dispensary_id = d.id WHERE sp.brand_name_raw IS NOT NULL AND sp.brand_name_raw != '' @@ -154,10 +154,9 @@ router.get('/pricing', async (req: Request, res: Response) => { SELECT sp.category_raw as category, ROUND(AVG(sp.price_rec)::numeric, 2) as avg_price, - MIN(sp.price_rec) FILTER (WHERE sp.price_rec > 0) as min_price, + MIN(sp.price_rec) as min_price, MAX(sp.price_rec) as max_price, - ROUND(PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY sp.price_rec)::numeric, 2) - FILTER (WHERE sp.price_rec > 0) as median_price, + ROUND(PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY sp.price_rec)::numeric, 2) as median_price, COUNT(*) as product_count FROM store_products sp WHERE sp.category_raw IS NOT NULL AND sp.price_rec > 0 @@ -169,7 +168,7 @@ router.get('/pricing', async (req: Request, res: Response) => { SELECT d.state, ROUND(AVG(sp.price_rec)::numeric, 2) as avg_price, - MIN(sp.price_rec) FILTER (WHERE sp.price_rec > 0) as min_price, + MIN(sp.price_rec) as min_price, MAX(sp.price_rec) as max_price, COUNT(DISTINCT sp.id) as product_count FROM store_products sp From 249d3c1b7f7ce62aa7d005609e017242bdab8098 Mon Sep 17 00:00:00 2001 From: Kelly Date: Wed, 10 Dec 2025 09:53:21 -0700 Subject: [PATCH 3/3] fix: Build args format for version info + schema-tolerant routes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit CI/CD: - Fix build_args format in woodpecker CI (comma-separated, not YAML list) - This fixes "unknown" SHA/version showing on remote deployments Backend schema-tolerant fixes (graceful fallbacks when tables missing): - users.ts: Check which columns exist before querying - worker-registry.ts: Return empty result if table doesn't exist - task-service.ts: Add tableExists() helper, handle missing tables/views - proxies.ts: Return totalProxies in test-all response Frontend fixes: - Proxies: Use total from response for accurate progress display - SEO PagesTab: Dim Generate button when no AI provider active 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- .woodpecker/.ci.yml | 6 +- backend/src/routes/proxies.ts | 8 +-- backend/src/routes/users.ts | 47 ++++++++++++---- backend/src/routes/worker-registry.ts | 23 ++++++++ backend/src/services/proxyTestQueue.ts | 9 ++- backend/src/tasks/task-service.ts | 71 +++++++++++++++++++----- cannaiq/src/lib/api.ts | 2 +- cannaiq/src/pages/Proxies.tsx | 3 +- cannaiq/src/pages/admin/seo/PagesTab.tsx | 34 ++++++++++-- 9 files changed, 161 insertions(+), 42 deletions(-) diff --git a/.woodpecker/.ci.yml b/.woodpecker/.ci.yml index 34d7ccd6..b1712481 100644 --- a/.woodpecker/.ci.yml +++ b/.woodpecker/.ci.yml @@ -89,11 +89,7 @@ steps: from_secret: registry_password platforms: linux/amd64 provenance: false - build_args: - - APP_BUILD_VERSION=${CI_COMMIT_SHA} - - APP_GIT_SHA=${CI_COMMIT_SHA} - - APP_BUILD_TIME=${CI_PIPELINE_CREATED} - - CONTAINER_IMAGE_TAG=${CI_COMMIT_SHA:0:8} + build_args: APP_BUILD_VERSION=${CI_COMMIT_SHA:0:8},APP_GIT_SHA=${CI_COMMIT_SHA},APP_BUILD_TIME=${CI_PIPELINE_CREATED},CONTAINER_IMAGE_TAG=${CI_COMMIT_SHA:0:8} depends_on: [] when: branch: master diff --git a/backend/src/routes/proxies.ts b/backend/src/routes/proxies.ts index 0ab9f090..dd1797a1 100755 --- a/backend/src/routes/proxies.ts +++ b/backend/src/routes/proxies.ts @@ -183,8 +183,8 @@ router.post('/test-all', requireRole('superadmin', 'admin'), async (req, res) => return res.status(400).json({ error: 'Concurrency must be between 1 and 50' }); } - const jobId = await createProxyTestJob(mode, concurrency); - res.json({ jobId, mode, concurrency, message: `Proxy test job started (mode: ${mode}, concurrency: ${concurrency})` }); + const { jobId, totalProxies } = await createProxyTestJob(mode, concurrency); + res.json({ jobId, total: totalProxies, mode, concurrency, message: `Proxy test job started (mode: ${mode}, concurrency: ${concurrency})` }); } catch (error: any) { console.error('Error starting proxy test job:', error); res.status(500).json({ error: error.message || 'Failed to start proxy test job' }); @@ -195,8 +195,8 @@ router.post('/test-all', requireRole('superadmin', 'admin'), async (req, res) => router.post('/test-failed', requireRole('superadmin', 'admin'), async (req, res) => { try { const concurrency = parseInt(req.query.concurrency as string) || 10; - const jobId = await createProxyTestJob('failed', concurrency); - res.json({ jobId, mode: 'failed', concurrency, message: 'Retesting failed proxies...' }); + const { jobId, totalProxies } = await createProxyTestJob('failed', concurrency); + res.json({ jobId, total: totalProxies, mode: 'failed', concurrency, message: 'Retesting failed proxies...' }); } catch (error: any) { console.error('Error starting failed proxy test:', error); res.status(500).json({ error: error.message || 'Failed to start proxy test job' }); diff --git a/backend/src/routes/users.ts b/backend/src/routes/users.ts index be803ffb..98d2122d 100644 --- a/backend/src/routes/users.ts +++ b/backend/src/routes/users.ts @@ -14,23 +14,36 @@ router.get('/', async (req: AuthRequest, res) => { try { const { search, domain } = req.query; - let query = ` - SELECT id, email, role, first_name, last_name, phone, domain, created_at, updated_at - FROM users - WHERE 1=1 - `; + // Check which columns exist (schema-tolerant) + const columnsResult = await pool.query(` + SELECT column_name FROM information_schema.columns + WHERE table_name = 'users' AND column_name IN ('first_name', 'last_name', 'phone', 'domain') + `); + const existingColumns = new Set(columnsResult.rows.map((r: any) => r.column_name)); + + // Build column list based on what exists + const selectCols = ['id', 'email', 'role', 'created_at', 'updated_at']; + if (existingColumns.has('first_name')) selectCols.push('first_name'); + if (existingColumns.has('last_name')) selectCols.push('last_name'); + if (existingColumns.has('phone')) selectCols.push('phone'); + if (existingColumns.has('domain')) selectCols.push('domain'); + + let query = `SELECT ${selectCols.join(', ')} FROM users WHERE 1=1`; const params: any[] = []; let paramIndex = 1; - // Search by email, first_name, or last_name + // Search by email (and optionally first_name, last_name if they exist) if (search && typeof search === 'string') { - query += ` AND (email ILIKE $${paramIndex} OR first_name ILIKE $${paramIndex} OR last_name ILIKE $${paramIndex})`; + const searchClauses = ['email ILIKE $' + paramIndex]; + if (existingColumns.has('first_name')) searchClauses.push('first_name ILIKE $' + paramIndex); + if (existingColumns.has('last_name')) searchClauses.push('last_name ILIKE $' + paramIndex); + query += ` AND (${searchClauses.join(' OR ')})`; params.push(`%${search}%`); paramIndex++; } - // Filter by domain - if (domain && typeof domain === 'string') { + // Filter by domain (if column exists) + if (domain && typeof domain === 'string' && existingColumns.has('domain')) { query += ` AND domain = $${paramIndex}`; params.push(domain); paramIndex++; @@ -50,8 +63,22 @@ router.get('/', async (req: AuthRequest, res) => { router.get('/:id', async (req: AuthRequest, res) => { try { const { id } = req.params; + + // Check which columns exist (schema-tolerant) + const columnsResult = await pool.query(` + SELECT column_name FROM information_schema.columns + WHERE table_name = 'users' AND column_name IN ('first_name', 'last_name', 'phone', 'domain') + `); + const existingColumns = new Set(columnsResult.rows.map((r: any) => r.column_name)); + + const selectCols = ['id', 'email', 'role', 'created_at', 'updated_at']; + if (existingColumns.has('first_name')) selectCols.push('first_name'); + if (existingColumns.has('last_name')) selectCols.push('last_name'); + if (existingColumns.has('phone')) selectCols.push('phone'); + if (existingColumns.has('domain')) selectCols.push('domain'); + const result = await pool.query(` - SELECT id, email, role, first_name, last_name, phone, domain, created_at, updated_at + SELECT ${selectCols.join(', ')} FROM users WHERE id = $1 `, [id]); diff --git a/backend/src/routes/worker-registry.ts b/backend/src/routes/worker-registry.ts index edb79d1d..a46698f4 100644 --- a/backend/src/routes/worker-registry.ts +++ b/backend/src/routes/worker-registry.ts @@ -273,6 +273,29 @@ router.post('/deregister', async (req: Request, res: Response) => { */ router.get('/workers', async (req: Request, res: Response) => { try { + // Check if worker_registry table exists + const tableCheck = await pool.query(` + SELECT EXISTS ( + SELECT FROM information_schema.tables + WHERE table_name = 'worker_registry' + ) as exists + `); + + if (!tableCheck.rows[0].exists) { + // Return empty result if table doesn't exist yet + return res.json({ + success: true, + workers: [], + summary: { + active_count: 0, + idle_count: 0, + offline_count: 0, + total_count: 0, + active_roles: 0 + } + }); + } + const { status, role, include_terminated = 'false' } = req.query; let whereClause = include_terminated === 'true' ? 'WHERE 1=1' : "WHERE status != 'terminated'"; diff --git a/backend/src/services/proxyTestQueue.ts b/backend/src/services/proxyTestQueue.ts index c17b897c..184b8640 100644 --- a/backend/src/services/proxyTestQueue.ts +++ b/backend/src/services/proxyTestQueue.ts @@ -39,7 +39,12 @@ export async function cleanupOrphanedJobs(): Promise { export type ProxyTestMode = 'all' | 'failed' | 'inactive'; -export async function createProxyTestJob(mode: ProxyTestMode = 'all', concurrency: number = DEFAULT_CONCURRENCY): Promise { +export interface CreateJobResult { + jobId: number; + totalProxies: number; +} + +export async function createProxyTestJob(mode: ProxyTestMode = 'all', concurrency: number = DEFAULT_CONCURRENCY): Promise { // Check for existing running jobs first const existingJob = await getActiveProxyTestJob(); if (existingJob) { @@ -79,7 +84,7 @@ export async function createProxyTestJob(mode: ProxyTestMode = 'all', concurrenc console.error(`❌ Proxy test job ${jobId} failed:`, err); }); - return jobId; + return { jobId, totalProxies }; } export async function getProxyTestJob(jobId: number): Promise { diff --git a/backend/src/tasks/task-service.ts b/backend/src/tasks/task-service.ts index c28c6a76..518887e3 100644 --- a/backend/src/tasks/task-service.ts +++ b/backend/src/tasks/task-service.ts @@ -10,6 +10,17 @@ import { pool } from '../db/pool'; +// Helper to check if a table exists +async function tableExists(tableName: string): Promise { + const result = await pool.query(` + SELECT EXISTS ( + SELECT FROM information_schema.tables + WHERE table_name = $1 + ) as exists + `, [tableName]); + return result.rows[0].exists; +} + export type TaskRole = | 'store_discovery' | 'entry_point_discovery' @@ -270,6 +281,11 @@ class TaskService { * List tasks with filters */ async listTasks(filter: TaskFilter = {}): Promise { + // Return empty list if table doesn't exist + if (!await tableExists('worker_tasks')) { + return []; + } + const conditions: string[] = []; const params: (string | number | string[])[] = []; let paramIndex = 1; @@ -323,21 +339,41 @@ class TaskService { * Get capacity metrics for all roles */ async getCapacityMetrics(): Promise { - const result = await pool.query( - `SELECT * FROM v_worker_capacity` - ); - return result.rows as CapacityMetrics[]; + // Return empty metrics if worker_tasks table doesn't exist + if (!await tableExists('worker_tasks')) { + return []; + } + + try { + const result = await pool.query( + `SELECT * FROM v_worker_capacity` + ); + return result.rows as CapacityMetrics[]; + } catch { + // View may not exist + return []; + } } /** * Get capacity metrics for a specific role */ async getRoleCapacity(role: TaskRole): Promise { - const result = await pool.query( - `SELECT * FROM v_worker_capacity WHERE role = $1`, - [role] - ); - return (result.rows[0] as CapacityMetrics) || null; + // Return null if worker_tasks table doesn't exist + if (!await tableExists('worker_tasks')) { + return null; + } + + try { + const result = await pool.query( + `SELECT * FROM v_worker_capacity WHERE role = $1`, + [role] + ); + return (result.rows[0] as CapacityMetrics) || null; + } catch { + // View may not exist + return null; + } } /** @@ -463,12 +499,6 @@ class TaskService { * Get task counts by status for dashboard */ async getTaskCounts(): Promise> { - const result = await pool.query( - `SELECT status, COUNT(*) as count - FROM worker_tasks - GROUP BY status` - ); - const counts: Record = { pending: 0, claimed: 0, @@ -478,6 +508,17 @@ class TaskService { stale: 0, }; + // Return empty counts if table doesn't exist + if (!await tableExists('worker_tasks')) { + return counts; + } + + const result = await pool.query( + `SELECT status, COUNT(*) as count + FROM worker_tasks + GROUP BY status` + ); + for (const row of result.rows) { const typedRow = row as { status: TaskStatus; count: string }; counts[typedRow.status] = parseInt(typedRow.count, 10); diff --git a/cannaiq/src/lib/api.ts b/cannaiq/src/lib/api.ts index c1df5b10..3170d7bd 100755 --- a/cannaiq/src/lib/api.ts +++ b/cannaiq/src/lib/api.ts @@ -320,7 +320,7 @@ class ApiClient { } async testAllProxies() { - return this.request<{ jobId: number; message: string }>('/api/proxies/test-all', { + return this.request<{ jobId: number; total: number; message: string }>('/api/proxies/test-all', { method: 'POST', }); } diff --git a/cannaiq/src/pages/Proxies.tsx b/cannaiq/src/pages/Proxies.tsx index 130dfbfd..9600d9ad 100755 --- a/cannaiq/src/pages/Proxies.tsx +++ b/cannaiq/src/pages/Proxies.tsx @@ -96,7 +96,8 @@ export function Proxies() { try { const response = await api.testAllProxies(); setNotification({ message: 'Proxy testing job started', type: 'success' }); - setActiveJob({ id: response.jobId, status: 'pending', tested_proxies: 0, total_proxies: proxies.length, passed_proxies: 0, failed_proxies: 0 }); + // Use response.total if available, otherwise proxies.length, but immediately poll for accurate count + setActiveJob({ id: response.jobId, status: 'pending', tested_proxies: 0, total_proxies: response.total || proxies.length || 0, passed_proxies: 0, failed_proxies: 0 }); } catch (error: any) { setNotification({ message: 'Failed to start testing: ' + error.message, type: 'error' }); } diff --git a/cannaiq/src/pages/admin/seo/PagesTab.tsx b/cannaiq/src/pages/admin/seo/PagesTab.tsx index fec7b1f5..fba15f1d 100644 --- a/cannaiq/src/pages/admin/seo/PagesTab.tsx +++ b/cannaiq/src/pages/admin/seo/PagesTab.tsx @@ -7,7 +7,7 @@ import { useState, useEffect } from 'react'; import { api } from '../../../lib/api'; -import { Building2, Tag, Globe, Target, FileText, RefreshCw, Sparkles, Loader2 } from 'lucide-react'; +import { Building2, Tag, Globe, Target, FileText, RefreshCw, Sparkles, Loader2, AlertCircle } from 'lucide-react'; interface SeoPage { id: number; @@ -47,11 +47,31 @@ export function PagesTab() { const [search, setSearch] = useState(''); const [syncing, setSyncing] = useState(false); const [generatingId, setGeneratingId] = useState(null); + const [hasActiveAiProvider, setHasActiveAiProvider] = useState(null); useEffect(() => { loadPages(); + checkAiProvider(); }, [typeFilter, search]); + async function checkAiProvider() { + try { + const data = await api.getSettings(); + const settings = data.settings || []; + // Check if either Anthropic or OpenAI is configured with an API key AND enabled + const anthropicKey = settings.find((s: any) => s.key === 'anthropic_api_key')?.value; + const anthropicEnabled = settings.find((s: any) => s.key === 'anthropic_enabled')?.value === 'true'; + const openaiKey = settings.find((s: any) => s.key === 'openai_api_key')?.value; + const openaiEnabled = settings.find((s: any) => s.key === 'openai_enabled')?.value === 'true'; + + const hasProvider = (anthropicKey && anthropicEnabled) || (openaiKey && openaiEnabled); + setHasActiveAiProvider(!!hasProvider); + } catch (error) { + console.error('Failed to check AI provider:', error); + setHasActiveAiProvider(false); + } + } + async function loadPages() { setLoading(true); try { @@ -188,12 +208,18 @@ export function PagesTab() {