feat: Responsive admin UI, SEO pages, and click analytics
## Responsive Admin UI - Layout.tsx: Mobile sidebar drawer with hamburger menu - Dashboard.tsx: 2-col grid on mobile, responsive stats cards - OrchestratorDashboard.tsx: Responsive table with hidden columns - PagesTab.tsx: Responsive filters and table ## SEO Pages - New /admin/seo section with state landing pages - SEO page generation and management - State page content with dispensary/product counts ## Click Analytics - Product click tracking infrastructure - Click analytics dashboard ## Other Changes - Consumer features scaffolding (alerts, deals, favorites) - Health panel component - Workers dashboard improvements - Legacy DutchieAZ pages removed 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
108
backend/migrations/060_consumer_verification_notifications.sql
Normal file
108
backend/migrations/060_consumer_verification_notifications.sql
Normal file
@@ -0,0 +1,108 @@
|
||||
-- Migration: Consumer verification and notification tracking
|
||||
-- Adds email/SMS verification columns and notification history table
|
||||
|
||||
-- Add verification columns to users table
|
||||
ALTER TABLE users
|
||||
ADD COLUMN IF NOT EXISTS email_verified BOOLEAN DEFAULT false,
|
||||
ADD COLUMN IF NOT EXISTS email_verification_token VARCHAR(64),
|
||||
ADD COLUMN IF NOT EXISTS email_verification_sent_at TIMESTAMP,
|
||||
ADD COLUMN IF NOT EXISTS phone_verified BOOLEAN DEFAULT false,
|
||||
ADD COLUMN IF NOT EXISTS phone_verification_code VARCHAR(6),
|
||||
ADD COLUMN IF NOT EXISTS phone_verification_sent_at TIMESTAMP,
|
||||
ADD COLUMN IF NOT EXISTS notification_preference VARCHAR(20) DEFAULT 'email'; -- email, sms, both
|
||||
|
||||
-- Add city/state to users (for notification filtering)
|
||||
ALTER TABLE users
|
||||
ADD COLUMN IF NOT EXISTS city VARCHAR(100),
|
||||
ADD COLUMN IF NOT EXISTS state VARCHAR(50);
|
||||
|
||||
-- Create notification tracking table
|
||||
CREATE TABLE IF NOT EXISTS consumer_notifications (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER REFERENCES users(id) ON DELETE CASCADE,
|
||||
alert_id INTEGER, -- References findagram_alerts or findadispo saved search
|
||||
alert_source VARCHAR(20), -- 'findagram' or 'findadispo'
|
||||
notification_type VARCHAR(20) NOT NULL, -- 'email' or 'sms'
|
||||
-- What triggered this notification
|
||||
trigger_type VARCHAR(50) NOT NULL, -- 'price_drop', 'back_in_stock', 'product_on_special', 'deal_alert'
|
||||
product_id INTEGER,
|
||||
dispensary_id INTEGER,
|
||||
-- Content
|
||||
subject VARCHAR(255),
|
||||
message_content TEXT,
|
||||
-- Tracking
|
||||
sent_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
delivered_at TIMESTAMP,
|
||||
opened_at TIMESTAMP,
|
||||
clicked_at TIMESTAMP,
|
||||
completed_at TIMESTAMP, -- When user has "seen" it or we mark it done
|
||||
-- External IDs (for SMS gateway tracking)
|
||||
external_message_id VARCHAR(100),
|
||||
-- Status
|
||||
status VARCHAR(20) DEFAULT 'pending', -- pending, sent, delivered, failed
|
||||
error_message TEXT
|
||||
);
|
||||
|
||||
-- Create findadispo_favorites table (mirroring findagram_favorites)
|
||||
CREATE TABLE IF NOT EXISTS findadispo_favorites (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER REFERENCES users(id) ON DELETE CASCADE,
|
||||
dispensary_id INTEGER, -- References dispensaries table
|
||||
-- Dispensary snapshot at time of save
|
||||
dispensary_name VARCHAR(255),
|
||||
dispensary_city VARCHAR(100),
|
||||
dispensary_state VARCHAR(50),
|
||||
-- Timestamps
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(user_id, dispensary_id)
|
||||
);
|
||||
|
||||
-- Create findadispo_alerts table
|
||||
CREATE TABLE IF NOT EXISTS findadispo_alerts (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER REFERENCES users(id) ON DELETE CASCADE,
|
||||
alert_type VARCHAR(50) NOT NULL, -- 'new_dispensary', 'deal_available'
|
||||
-- Target
|
||||
dispensary_id INTEGER,
|
||||
city VARCHAR(100),
|
||||
state VARCHAR(50),
|
||||
-- Status
|
||||
is_active BOOLEAN DEFAULT true,
|
||||
last_triggered_at TIMESTAMP,
|
||||
trigger_count INTEGER DEFAULT 0,
|
||||
-- Timestamps
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Create findadispo_saved_searches table
|
||||
CREATE TABLE IF NOT EXISTS findadispo_saved_searches (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER REFERENCES users(id) ON DELETE CASCADE,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
-- Search criteria
|
||||
query TEXT,
|
||||
city VARCHAR(100),
|
||||
state VARCHAR(50),
|
||||
min_rating DECIMAL(3, 2),
|
||||
max_distance INTEGER,
|
||||
amenities TEXT[], -- Array of amenity filters
|
||||
-- Notification settings
|
||||
notify_on_new_dispensary BOOLEAN DEFAULT false,
|
||||
notify_on_deals BOOLEAN DEFAULT false,
|
||||
-- Timestamps
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Create indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_users_email_verified ON users(email_verified);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_phone_verified ON users(phone_verified);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_city_state ON users(city, state);
|
||||
CREATE INDEX IF NOT EXISTS idx_consumer_notifications_user_id ON consumer_notifications(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_consumer_notifications_status ON consumer_notifications(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_consumer_notifications_sent_at ON consumer_notifications(sent_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_findadispo_favorites_user_id ON findadispo_favorites(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_findadispo_alerts_user_id ON findadispo_alerts(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_findadispo_alerts_active ON findadispo_alerts(is_active) WHERE is_active = true;
|
||||
CREATE INDEX IF NOT EXISTS idx_findadispo_saved_searches_user_id ON findadispo_saved_searches(user_id);
|
||||
50
backend/migrations/061_product_click_events.sql
Normal file
50
backend/migrations/061_product_click_events.sql
Normal file
@@ -0,0 +1,50 @@
|
||||
-- Migration: Product click event tracking for campaigns and analytics
|
||||
-- Tracks user interactions with products across the platform
|
||||
|
||||
-- Create the product click events table
|
||||
CREATE TABLE IF NOT EXISTS product_click_events (
|
||||
id SERIAL PRIMARY KEY,
|
||||
|
||||
-- Core identifiers
|
||||
product_id VARCHAR(100) NOT NULL, -- Internal product identifier
|
||||
store_id VARCHAR(100), -- Store context (nullable)
|
||||
brand_id VARCHAR(100), -- Brand context (nullable)
|
||||
campaign_id INTEGER, -- Campaign context (nullable, FK to campaigns)
|
||||
|
||||
-- Action details
|
||||
action VARCHAR(50) NOT NULL, -- 'view', 'open_store', 'open_product', 'compare', 'other'
|
||||
source VARCHAR(100) NOT NULL, -- Page/component where click occurred
|
||||
|
||||
-- User context (optional - pulled from auth if available)
|
||||
user_id INTEGER, -- Admin/user who triggered event
|
||||
|
||||
-- Request metadata
|
||||
ip_address INET,
|
||||
user_agent TEXT,
|
||||
|
||||
-- Timestamps
|
||||
occurred_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Create indexes for efficient querying
|
||||
CREATE INDEX IF NOT EXISTS idx_product_click_events_product_id ON product_click_events(product_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_product_click_events_store_id ON product_click_events(store_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_product_click_events_brand_id ON product_click_events(brand_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_product_click_events_campaign_id ON product_click_events(campaign_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_product_click_events_action ON product_click_events(action);
|
||||
CREATE INDEX IF NOT EXISTS idx_product_click_events_source ON product_click_events(source);
|
||||
CREATE INDEX IF NOT EXISTS idx_product_click_events_occurred_at ON product_click_events(occurred_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_product_click_events_user_id ON product_click_events(user_id);
|
||||
|
||||
-- Composite index for campaign analytics
|
||||
CREATE INDEX IF NOT EXISTS idx_product_click_events_campaign_product
|
||||
ON product_click_events(campaign_id, product_id)
|
||||
WHERE campaign_id IS NOT NULL;
|
||||
|
||||
-- Composite index for time-series queries
|
||||
CREATE INDEX IF NOT EXISTS idx_product_click_events_time_action
|
||||
ON product_click_events(occurred_at, action);
|
||||
|
||||
-- Add comment for documentation
|
||||
COMMENT ON TABLE product_click_events IS 'Tracks product interactions across the CannaIQ platform for analytics and campaign measurement';
|
||||
41
backend/migrations/062_click_analytics_enhancements.sql
Normal file
41
backend/migrations/062_click_analytics_enhancements.sql
Normal file
@@ -0,0 +1,41 @@
|
||||
-- Migration: Enhance click events for brand & campaign analytics
|
||||
-- Adds event_type and page_type for better categorization
|
||||
|
||||
-- Add event_type column (default to 'product_click' for existing rows)
|
||||
ALTER TABLE product_click_events
|
||||
ADD COLUMN IF NOT EXISTS event_type VARCHAR(50) DEFAULT 'product_click';
|
||||
|
||||
-- Add page_type column for tracking which page the event originated from
|
||||
ALTER TABLE product_click_events
|
||||
ADD COLUMN IF NOT EXISTS page_type VARCHAR(100);
|
||||
|
||||
-- Add URL path for debugging/analysis
|
||||
ALTER TABLE product_click_events
|
||||
ADD COLUMN IF NOT EXISTS url_path TEXT;
|
||||
|
||||
-- Add device type (desktop, mobile, tablet)
|
||||
ALTER TABLE product_click_events
|
||||
ADD COLUMN IF NOT EXISTS device_type VARCHAR(20);
|
||||
|
||||
-- Create index on event_type for filtering
|
||||
CREATE INDEX IF NOT EXISTS idx_product_click_events_event_type
|
||||
ON product_click_events(event_type);
|
||||
|
||||
-- Create index on page_type for page-level analytics
|
||||
CREATE INDEX IF NOT EXISTS idx_product_click_events_page_type
|
||||
ON product_click_events(page_type);
|
||||
|
||||
-- Create composite index for brand analytics (brand_id + occurred_at)
|
||||
CREATE INDEX IF NOT EXISTS idx_product_click_events_brand_time
|
||||
ON product_click_events(brand_id, occurred_at)
|
||||
WHERE brand_id IS NOT NULL;
|
||||
|
||||
-- Create composite index for store+brand analytics
|
||||
CREATE INDEX IF NOT EXISTS idx_product_click_events_store_brand
|
||||
ON product_click_events(store_id, brand_id)
|
||||
WHERE store_id IS NOT NULL;
|
||||
|
||||
-- Add comment for documentation
|
||||
COMMENT ON COLUMN product_click_events.event_type IS 'Type of event: product_click, page_view, etc.';
|
||||
COMMENT ON COLUMN product_click_events.page_type IS 'Page where event occurred: StoreDetailPage, BrandsIntelligence, CampaignDetail, etc.';
|
||||
COMMENT ON COLUMN product_click_events.device_type IS 'Device type: desktop, mobile, tablet';
|
||||
22
backend/migrations/063_seo_pages.sql
Normal file
22
backend/migrations/063_seo_pages.sql
Normal file
@@ -0,0 +1,22 @@
|
||||
-- SEO Pages table for CannaiQ marketing content
|
||||
-- All content stored here must be sanitized before insertion
|
||||
|
||||
CREATE TABLE IF NOT EXISTS seo_pages (
|
||||
id SERIAL PRIMARY KEY,
|
||||
slug VARCHAR(255) NOT NULL UNIQUE,
|
||||
type VARCHAR(50) NOT NULL, -- state, brand, competitor, landing, blog
|
||||
content JSONB NOT NULL,
|
||||
status VARCHAR(20) NOT NULL DEFAULT 'draft', -- draft, published, archived
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Indexes for efficient lookup
|
||||
CREATE INDEX IF NOT EXISTS idx_seo_pages_slug ON seo_pages(slug);
|
||||
CREATE INDEX IF NOT EXISTS idx_seo_pages_type ON seo_pages(type);
|
||||
CREATE INDEX IF NOT EXISTS idx_seo_pages_status ON seo_pages(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_seo_pages_type_status ON seo_pages(type, status);
|
||||
|
||||
-- Add comment explaining content requirements
|
||||
COMMENT ON TABLE seo_pages IS 'SEO content for CannaiQ marketing pages. All content must use approved enterprise-safe phrasing.';
|
||||
COMMENT ON COLUMN seo_pages.content IS 'JSON content with blocks structure. Must be sanitized via ContentValidator before insert.';
|
||||
221
backend/node_modules/.package-lock.json
generated
vendored
221
backend/node_modules/.package-lock.json
generated
vendored
@@ -41,6 +41,11 @@
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@ioredis/commands": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.4.0.tgz",
|
||||
"integrity": "sha512-aFT2yemJJo+TZCmieA7qnYGQooOS7QfNmYrzGtsYd3g9j5iDP8AimYYAesf79ohjbLG12XxC4nG5DyEnC88AsQ=="
|
||||
},
|
||||
"node_modules/@mapbox/node-pre-gyp": {
|
||||
"version": "1.0.11",
|
||||
"resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz",
|
||||
@@ -104,6 +109,18 @@
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
|
||||
},
|
||||
"node_modules/@msgpackr-extract/msgpackr-extract-linux-x64": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-3.0.3.tgz",
|
||||
"integrity": "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@puppeteer/browsers": {
|
||||
"version": "1.9.1",
|
||||
"resolved": "https://registry.npmjs.org/@puppeteer/browsers/-/browsers-1.9.1.tgz",
|
||||
@@ -159,6 +176,15 @@
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/bcryptjs": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/bcryptjs/-/bcryptjs-3.0.0.tgz",
|
||||
"integrity": "sha512-WRZOuCuaz8UcZZE4R5HXTco2goQSI2XxjGY3hbM/xDvwmqFWd4ivooImsMx65OKM6CtNKbnZ5YL+YwAwK7c1dg==",
|
||||
"deprecated": "This is a stub types definition. bcryptjs provides its own type definitions, so you do not need this installed.",
|
||||
"dependencies": {
|
||||
"bcryptjs": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/body-parser": {
|
||||
"version": "1.19.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.6.tgz",
|
||||
@@ -534,6 +560,14 @@
|
||||
"node": ">= 10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/bcryptjs": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-3.0.3.tgz",
|
||||
"integrity": "sha512-GlF5wPWnSa/X5LKM1o0wz0suXIINz1iHRLvTS+sLyi7XPbe5ycmYI3DlZqVGZZtDgl4DmasFg7gOB3JYbphV5g==",
|
||||
"bin": {
|
||||
"bcrypt": "bin/bcrypt"
|
||||
}
|
||||
},
|
||||
"node_modules/bl": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz",
|
||||
@@ -630,6 +664,32 @@
|
||||
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
||||
"integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="
|
||||
},
|
||||
"node_modules/bullmq": {
|
||||
"version": "5.65.1",
|
||||
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.65.1.tgz",
|
||||
"integrity": "sha512-QgDAzX1G9L5IRy4Orva5CfQTXZT+5K+OfO/kbPrAqN+pmL9LJekCzxijXehlm/u2eXfWPfWvIdJJIqiuz3WJSg==",
|
||||
"dependencies": {
|
||||
"cron-parser": "^4.9.0",
|
||||
"ioredis": "^5.8.2",
|
||||
"msgpackr": "^1.11.2",
|
||||
"node-abort-controller": "^3.1.1",
|
||||
"semver": "^7.5.4",
|
||||
"tslib": "^2.0.0",
|
||||
"uuid": "^11.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/bullmq/node_modules/uuid": {
|
||||
"version": "11.1.0",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz",
|
||||
"integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==",
|
||||
"funding": [
|
||||
"https://github.com/sponsors/broofa",
|
||||
"https://github.com/sponsors/ctavan"
|
||||
],
|
||||
"bin": {
|
||||
"uuid": "dist/esm/bin/uuid"
|
||||
}
|
||||
},
|
||||
"node_modules/bytes": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
|
||||
@@ -778,6 +838,14 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/cluster-key-slot": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz",
|
||||
"integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/color": {
|
||||
"version": "4.2.3",
|
||||
"resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz",
|
||||
@@ -913,6 +981,17 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/cron-parser": {
|
||||
"version": "4.9.0",
|
||||
"resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-4.9.0.tgz",
|
||||
"integrity": "sha512-p0SaNjrHOnQeR8/VnfGbmg9te2kfyYSQ7Sc/j/6DtPL3JQvKxmjO9TSjNFpujqV3vEYYBvNNvXSxzyksBWAx1Q==",
|
||||
"dependencies": {
|
||||
"luxon": "^3.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/cross-fetch": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-4.0.0.tgz",
|
||||
@@ -1043,6 +1122,14 @@
|
||||
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
|
||||
"integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ=="
|
||||
},
|
||||
"node_modules/denque": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
|
||||
"integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==",
|
||||
"engines": {
|
||||
"node": ">=0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/depd": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
|
||||
@@ -2096,6 +2183,50 @@
|
||||
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
|
||||
"integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew=="
|
||||
},
|
||||
"node_modules/ioredis": {
|
||||
"version": "5.8.2",
|
||||
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.8.2.tgz",
|
||||
"integrity": "sha512-C6uC+kleiIMmjViJINWk80sOQw5lEzse1ZmvD+S/s8p8CWapftSaC+kocGTx6xrbrJ4WmYQGC08ffHLr6ToR6Q==",
|
||||
"dependencies": {
|
||||
"@ioredis/commands": "1.4.0",
|
||||
"cluster-key-slot": "^1.1.0",
|
||||
"debug": "^4.3.4",
|
||||
"denque": "^2.1.0",
|
||||
"lodash.defaults": "^4.2.0",
|
||||
"lodash.isarguments": "^3.1.0",
|
||||
"redis-errors": "^1.2.0",
|
||||
"redis-parser": "^3.0.0",
|
||||
"standard-as-callback": "^2.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.22.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/ioredis"
|
||||
}
|
||||
},
|
||||
"node_modules/ioredis/node_modules/debug": {
|
||||
"version": "4.4.3",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
|
||||
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/ioredis/node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
|
||||
},
|
||||
"node_modules/ip-address": {
|
||||
"version": "10.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.1.0.tgz",
|
||||
@@ -2343,11 +2474,21 @@
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
||||
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
|
||||
},
|
||||
"node_modules/lodash.defaults": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
|
||||
"integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ=="
|
||||
},
|
||||
"node_modules/lodash.includes": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz",
|
||||
"integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w=="
|
||||
},
|
||||
"node_modules/lodash.isarguments": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz",
|
||||
"integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg=="
|
||||
},
|
||||
"node_modules/lodash.isboolean": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz",
|
||||
@@ -2386,6 +2527,14 @@
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/luxon": {
|
||||
"version": "3.7.2",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.7.2.tgz",
|
||||
"integrity": "sha512-vtEhXh/gNjI9Yg1u4jX/0YVPMvxzHuGgCm6tC5kZyb08yjGWGnqAjGJvcXbqQR2P3MyMEFnRbpcdFS6PBcLqew==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/make-dir": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz",
|
||||
@@ -2614,6 +2763,35 @@
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
|
||||
},
|
||||
"node_modules/msgpackr": {
|
||||
"version": "1.11.5",
|
||||
"resolved": "https://registry.npmjs.org/msgpackr/-/msgpackr-1.11.5.tgz",
|
||||
"integrity": "sha512-UjkUHN0yqp9RWKy0Lplhh+wlpdt9oQBYgULZOiFhV3VclSF1JnSQWZ5r9gORQlNYaUKQoR8itv7g7z1xDDuACA==",
|
||||
"optionalDependencies": {
|
||||
"msgpackr-extract": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/msgpackr-extract": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/msgpackr-extract/-/msgpackr-extract-3.0.3.tgz",
|
||||
"integrity": "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==",
|
||||
"hasInstallScript": true,
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"node-gyp-build-optional-packages": "5.2.2"
|
||||
},
|
||||
"bin": {
|
||||
"download-msgpackr-prebuilds": "bin/download-prebuilds.js"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3",
|
||||
"@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3",
|
||||
"@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3",
|
||||
"@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3",
|
||||
"@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3",
|
||||
"@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/napi-build-utils": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-2.0.0.tgz",
|
||||
@@ -2646,6 +2824,11 @@
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/node-abort-controller": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/node-abort-controller/-/node-abort-controller-3.1.1.tgz",
|
||||
"integrity": "sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ=="
|
||||
},
|
||||
"node_modules/node-addon-api": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz",
|
||||
@@ -2689,6 +2872,20 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/node-gyp-build-optional-packages": {
|
||||
"version": "5.2.2",
|
||||
"resolved": "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.2.2.tgz",
|
||||
"integrity": "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"detect-libc": "^2.0.1"
|
||||
},
|
||||
"bin": {
|
||||
"node-gyp-build-optional-packages": "bin.js",
|
||||
"node-gyp-build-optional-packages-optional": "optional.js",
|
||||
"node-gyp-build-optional-packages-test": "build-test.js"
|
||||
}
|
||||
},
|
||||
"node_modules/nopt": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz",
|
||||
@@ -3623,6 +3820,25 @@
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/redis-errors": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz",
|
||||
"integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/redis-parser": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz",
|
||||
"integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==",
|
||||
"dependencies": {
|
||||
"redis-errors": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/require-directory": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
|
||||
@@ -4065,6 +4281,11 @@
|
||||
"node": ">= 10.x"
|
||||
}
|
||||
},
|
||||
"node_modules/standard-as-callback": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz",
|
||||
"integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A=="
|
||||
},
|
||||
"node_modules/statuses": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
|
||||
|
||||
285
backend/package-lock.json
generated
285
backend/package-lock.json
generated
@@ -8,8 +8,11 @@
|
||||
"name": "dutchie-menus-backend",
|
||||
"version": "1.5.1",
|
||||
"dependencies": {
|
||||
"@types/bcryptjs": "^3.0.0",
|
||||
"axios": "^1.6.2",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bcryptjs": "^3.0.3",
|
||||
"bullmq": "^5.65.1",
|
||||
"cheerio": "^1.1.2",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.3.1",
|
||||
@@ -17,6 +20,7 @@
|
||||
"express-rate-limit": "^7.1.5",
|
||||
"helmet": "^7.1.0",
|
||||
"https-proxy-agent": "^7.0.2",
|
||||
"ioredis": "^5.8.2",
|
||||
"ipaddr.js": "^2.2.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"minio": "^7.1.3",
|
||||
@@ -482,6 +486,11 @@
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@ioredis/commands": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.4.0.tgz",
|
||||
"integrity": "sha512-aFT2yemJJo+TZCmieA7qnYGQooOS7QfNmYrzGtsYd3g9j5iDP8AimYYAesf79ohjbLG12XxC4nG5DyEnC88AsQ=="
|
||||
},
|
||||
"node_modules/@mapbox/node-pre-gyp": {
|
||||
"version": "1.0.11",
|
||||
"resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz",
|
||||
@@ -545,6 +554,78 @@
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
|
||||
},
|
||||
"node_modules/@msgpackr-extract/msgpackr-extract-darwin-arm64": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-3.0.3.tgz",
|
||||
"integrity": "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
]
|
||||
},
|
||||
"node_modules/@msgpackr-extract/msgpackr-extract-darwin-x64": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-x64/-/msgpackr-extract-darwin-x64-3.0.3.tgz",
|
||||
"integrity": "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
]
|
||||
},
|
||||
"node_modules/@msgpackr-extract/msgpackr-extract-linux-arm": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm/-/msgpackr-extract-linux-arm-3.0.3.tgz",
|
||||
"integrity": "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@msgpackr-extract/msgpackr-extract-linux-arm64": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm64/-/msgpackr-extract-linux-arm64-3.0.3.tgz",
|
||||
"integrity": "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@msgpackr-extract/msgpackr-extract-linux-x64": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-3.0.3.tgz",
|
||||
"integrity": "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@msgpackr-extract/msgpackr-extract-win32-x64": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-win32-x64/-/msgpackr-extract-win32-x64-3.0.3.tgz",
|
||||
"integrity": "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@puppeteer/browsers": {
|
||||
"version": "1.9.1",
|
||||
"resolved": "https://registry.npmjs.org/@puppeteer/browsers/-/browsers-1.9.1.tgz",
|
||||
@@ -600,6 +681,15 @@
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/bcryptjs": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/bcryptjs/-/bcryptjs-3.0.0.tgz",
|
||||
"integrity": "sha512-WRZOuCuaz8UcZZE4R5HXTco2goQSI2XxjGY3hbM/xDvwmqFWd4ivooImsMx65OKM6CtNKbnZ5YL+YwAwK7c1dg==",
|
||||
"deprecated": "This is a stub types definition. bcryptjs provides its own type definitions, so you do not need this installed.",
|
||||
"dependencies": {
|
||||
"bcryptjs": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/body-parser": {
|
||||
"version": "1.19.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.6.tgz",
|
||||
@@ -975,6 +1065,14 @@
|
||||
"node": ">= 10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/bcryptjs": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-3.0.3.tgz",
|
||||
"integrity": "sha512-GlF5wPWnSa/X5LKM1o0wz0suXIINz1iHRLvTS+sLyi7XPbe5ycmYI3DlZqVGZZtDgl4DmasFg7gOB3JYbphV5g==",
|
||||
"bin": {
|
||||
"bcrypt": "bin/bcrypt"
|
||||
}
|
||||
},
|
||||
"node_modules/bl": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz",
|
||||
@@ -1071,6 +1169,32 @@
|
||||
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
||||
"integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="
|
||||
},
|
||||
"node_modules/bullmq": {
|
||||
"version": "5.65.1",
|
||||
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.65.1.tgz",
|
||||
"integrity": "sha512-QgDAzX1G9L5IRy4Orva5CfQTXZT+5K+OfO/kbPrAqN+pmL9LJekCzxijXehlm/u2eXfWPfWvIdJJIqiuz3WJSg==",
|
||||
"dependencies": {
|
||||
"cron-parser": "^4.9.0",
|
||||
"ioredis": "^5.8.2",
|
||||
"msgpackr": "^1.11.2",
|
||||
"node-abort-controller": "^3.1.1",
|
||||
"semver": "^7.5.4",
|
||||
"tslib": "^2.0.0",
|
||||
"uuid": "^11.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/bullmq/node_modules/uuid": {
|
||||
"version": "11.1.0",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz",
|
||||
"integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==",
|
||||
"funding": [
|
||||
"https://github.com/sponsors/broofa",
|
||||
"https://github.com/sponsors/ctavan"
|
||||
],
|
||||
"bin": {
|
||||
"uuid": "dist/esm/bin/uuid"
|
||||
}
|
||||
},
|
||||
"node_modules/bytes": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
|
||||
@@ -1219,6 +1343,14 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/cluster-key-slot": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz",
|
||||
"integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/color": {
|
||||
"version": "4.2.3",
|
||||
"resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz",
|
||||
@@ -1354,6 +1486,17 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/cron-parser": {
|
||||
"version": "4.9.0",
|
||||
"resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-4.9.0.tgz",
|
||||
"integrity": "sha512-p0SaNjrHOnQeR8/VnfGbmg9te2kfyYSQ7Sc/j/6DtPL3JQvKxmjO9TSjNFpujqV3vEYYBvNNvXSxzyksBWAx1Q==",
|
||||
"dependencies": {
|
||||
"luxon": "^3.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/cross-fetch": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-4.0.0.tgz",
|
||||
@@ -1484,6 +1627,14 @@
|
||||
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
|
||||
"integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ=="
|
||||
},
|
||||
"node_modules/denque": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
|
||||
"integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==",
|
||||
"engines": {
|
||||
"node": ">=0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/depd": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
|
||||
@@ -2551,6 +2702,50 @@
|
||||
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
|
||||
"integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew=="
|
||||
},
|
||||
"node_modules/ioredis": {
|
||||
"version": "5.8.2",
|
||||
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.8.2.tgz",
|
||||
"integrity": "sha512-C6uC+kleiIMmjViJINWk80sOQw5lEzse1ZmvD+S/s8p8CWapftSaC+kocGTx6xrbrJ4WmYQGC08ffHLr6ToR6Q==",
|
||||
"dependencies": {
|
||||
"@ioredis/commands": "1.4.0",
|
||||
"cluster-key-slot": "^1.1.0",
|
||||
"debug": "^4.3.4",
|
||||
"denque": "^2.1.0",
|
||||
"lodash.defaults": "^4.2.0",
|
||||
"lodash.isarguments": "^3.1.0",
|
||||
"redis-errors": "^1.2.0",
|
||||
"redis-parser": "^3.0.0",
|
||||
"standard-as-callback": "^2.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.22.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/ioredis"
|
||||
}
|
||||
},
|
||||
"node_modules/ioredis/node_modules/debug": {
|
||||
"version": "4.4.3",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
|
||||
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/ioredis/node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
|
||||
},
|
||||
"node_modules/ip-address": {
|
||||
"version": "10.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.1.0.tgz",
|
||||
@@ -2798,11 +2993,21 @@
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
||||
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
|
||||
},
|
||||
"node_modules/lodash.defaults": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
|
||||
"integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ=="
|
||||
},
|
||||
"node_modules/lodash.includes": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz",
|
||||
"integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w=="
|
||||
},
|
||||
"node_modules/lodash.isarguments": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz",
|
||||
"integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg=="
|
||||
},
|
||||
"node_modules/lodash.isboolean": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz",
|
||||
@@ -2841,6 +3046,14 @@
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/luxon": {
|
||||
"version": "3.7.2",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.7.2.tgz",
|
||||
"integrity": "sha512-vtEhXh/gNjI9Yg1u4jX/0YVPMvxzHuGgCm6tC5kZyb08yjGWGnqAjGJvcXbqQR2P3MyMEFnRbpcdFS6PBcLqew==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/make-dir": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz",
|
||||
@@ -3069,6 +3282,35 @@
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
|
||||
},
|
||||
"node_modules/msgpackr": {
|
||||
"version": "1.11.5",
|
||||
"resolved": "https://registry.npmjs.org/msgpackr/-/msgpackr-1.11.5.tgz",
|
||||
"integrity": "sha512-UjkUHN0yqp9RWKy0Lplhh+wlpdt9oQBYgULZOiFhV3VclSF1JnSQWZ5r9gORQlNYaUKQoR8itv7g7z1xDDuACA==",
|
||||
"optionalDependencies": {
|
||||
"msgpackr-extract": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/msgpackr-extract": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/msgpackr-extract/-/msgpackr-extract-3.0.3.tgz",
|
||||
"integrity": "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==",
|
||||
"hasInstallScript": true,
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"node-gyp-build-optional-packages": "5.2.2"
|
||||
},
|
||||
"bin": {
|
||||
"download-msgpackr-prebuilds": "bin/download-prebuilds.js"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3",
|
||||
"@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3",
|
||||
"@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3",
|
||||
"@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3",
|
||||
"@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3",
|
||||
"@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/napi-build-utils": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-2.0.0.tgz",
|
||||
@@ -3101,6 +3343,11 @@
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/node-abort-controller": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/node-abort-controller/-/node-abort-controller-3.1.1.tgz",
|
||||
"integrity": "sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ=="
|
||||
},
|
||||
"node_modules/node-addon-api": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz",
|
||||
@@ -3144,6 +3391,20 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/node-gyp-build-optional-packages": {
|
||||
"version": "5.2.2",
|
||||
"resolved": "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.2.2.tgz",
|
||||
"integrity": "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"detect-libc": "^2.0.1"
|
||||
},
|
||||
"bin": {
|
||||
"node-gyp-build-optional-packages": "bin.js",
|
||||
"node-gyp-build-optional-packages-optional": "optional.js",
|
||||
"node-gyp-build-optional-packages-test": "build-test.js"
|
||||
}
|
||||
},
|
||||
"node_modules/nopt": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz",
|
||||
@@ -4091,6 +4352,25 @@
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/redis-errors": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz",
|
||||
"integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/redis-parser": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz",
|
||||
"integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==",
|
||||
"dependencies": {
|
||||
"redis-errors": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/require-directory": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
|
||||
@@ -4533,6 +4813,11 @@
|
||||
"node": ">= 10.x"
|
||||
}
|
||||
},
|
||||
"node_modules/standard-as-callback": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz",
|
||||
"integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A=="
|
||||
},
|
||||
"node_modules/statuses": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
|
||||
|
||||
@@ -5,8 +5,11 @@
|
||||
"main": "dist/index.js",
|
||||
"scripts": {
|
||||
"dev": "tsx watch src/index.ts",
|
||||
"dev:worker": "tsx watch src/cli.ts --worker",
|
||||
"build": "tsc",
|
||||
"start": "node dist/index.js",
|
||||
"start:worker": "node dist/cli.js --worker",
|
||||
"worker": "tsx src/cli.ts --worker",
|
||||
"migrate": "tsx src/db/migrate.ts",
|
||||
"seed": "tsx src/db/seed.ts",
|
||||
"migrate:az": "tsx src/dutchie-az/db/migrate.ts",
|
||||
@@ -19,8 +22,11 @@
|
||||
"seed:dt:cities:bulk": "tsx src/scripts/seed-dt-cities-bulk.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/bcryptjs": "^3.0.0",
|
||||
"axios": "^1.6.2",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bcryptjs": "^3.0.3",
|
||||
"bullmq": "^5.65.1",
|
||||
"cheerio": "^1.1.2",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.3.1",
|
||||
@@ -28,6 +34,7 @@
|
||||
"express-rate-limit": "^7.1.5",
|
||||
"helmet": "^7.1.0",
|
||||
"https-proxy-agent": "^7.0.2",
|
||||
"ioredis": "^5.8.2",
|
||||
"ipaddr.js": "^2.2.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"minio": "^7.1.3",
|
||||
|
||||
@@ -140,11 +140,72 @@ export function requireRole(...roles: string[]) {
|
||||
if (!req.user) {
|
||||
return res.status(401).json({ error: 'Not authenticated' });
|
||||
}
|
||||
|
||||
|
||||
if (!roles.includes(req.user.role)) {
|
||||
return res.status(403).json({ error: 'Insufficient permissions' });
|
||||
}
|
||||
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional auth middleware - attempts to authenticate but allows unauthenticated requests
|
||||
*
|
||||
* If a valid token is provided, sets req.user with the authenticated user.
|
||||
* If no token or invalid token, continues without setting req.user.
|
||||
*
|
||||
* Use this for endpoints that work for both authenticated and anonymous users
|
||||
* (e.g., product click tracking where we want user_id when available).
|
||||
*/
|
||||
export async function optionalAuthMiddleware(req: AuthRequest, res: Response, next: NextFunction) {
|
||||
const authHeader = req.headers.authorization;
|
||||
|
||||
// No token provided - continue without auth
|
||||
if (!authHeader || !authHeader.startsWith('Bearer ')) {
|
||||
return next();
|
||||
}
|
||||
|
||||
const token = authHeader.substring(7);
|
||||
|
||||
// Try JWT first
|
||||
const jwtUser = verifyToken(token);
|
||||
|
||||
if (jwtUser) {
|
||||
req.user = jwtUser;
|
||||
return next();
|
||||
}
|
||||
|
||||
// If JWT fails, try API token
|
||||
try {
|
||||
const result = await pool.query(`
|
||||
SELECT id, name, rate_limit, active, expires_at
|
||||
FROM api_tokens
|
||||
WHERE token = $1
|
||||
`, [token]);
|
||||
|
||||
if (result.rows.length > 0) {
|
||||
const apiToken = result.rows[0];
|
||||
|
||||
// Check if token is active and not expired
|
||||
if (apiToken.active && (!apiToken.expires_at || new Date(apiToken.expires_at) >= new Date())) {
|
||||
req.apiToken = {
|
||||
id: apiToken.id,
|
||||
name: apiToken.name,
|
||||
rate_limit: apiToken.rate_limit
|
||||
};
|
||||
|
||||
req.user = {
|
||||
id: apiToken.id,
|
||||
email: `api-token-${apiToken.id}@system`,
|
||||
role: 'api'
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Silently ignore errors - optional auth should not fail the request
|
||||
console.warn('[OptionalAuth] Error checking API token:', error);
|
||||
}
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
67
backend/src/cli.ts
Normal file
67
backend/src/cli.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* CLI Entrypoint for CannaIQ Backend
|
||||
*
|
||||
* Usage:
|
||||
* npx tsx src/cli.ts # Start API server
|
||||
* npx tsx src/cli.ts --worker # Start worker process
|
||||
* npx tsx src/cli.ts --help # Show help
|
||||
*
|
||||
* Environment Variables:
|
||||
* DATABASE_URL - PostgreSQL connection string (required)
|
||||
* PORT - API server port (default: 3010)
|
||||
* WORKER_ID - Worker instance identifier (auto-generated if not set)
|
||||
*/
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
function showHelp() {
|
||||
console.log(`
|
||||
CannaIQ Backend CLI
|
||||
|
||||
Usage:
|
||||
npx tsx src/cli.ts [options]
|
||||
|
||||
Options:
|
||||
--worker Start as a job queue worker (processes crawl jobs)
|
||||
--api Start as API server (default)
|
||||
--help Show this help message
|
||||
|
||||
Environment Variables:
|
||||
DATABASE_URL PostgreSQL connection string (required)
|
||||
PORT API server port (default: 3010)
|
||||
WORKER_ID Worker instance identifier (auto-generated)
|
||||
|
||||
Examples:
|
||||
# Start API server on default port
|
||||
DATABASE_URL="postgresql://..." npx tsx src/cli.ts
|
||||
|
||||
# Start worker process
|
||||
DATABASE_URL="postgresql://..." npx tsx src/cli.ts --worker
|
||||
|
||||
# Start API on custom port
|
||||
PORT=3015 DATABASE_URL="postgresql://..." npx tsx src/cli.ts --api
|
||||
`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
if (args.includes('--help') || args.includes('-h')) {
|
||||
showHelp();
|
||||
}
|
||||
|
||||
if (args.includes('--worker')) {
|
||||
console.log('[CLI] Starting worker process...');
|
||||
const { startWorker } = await import('./dutchie-az/services/worker');
|
||||
await startWorker();
|
||||
} else {
|
||||
// Default: start API server
|
||||
console.log('[CLI] Starting API server...');
|
||||
await import('./index');
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error('[CLI] Fatal error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,8 +1,9 @@
|
||||
/**
|
||||
* Dutchie AZ API Routes
|
||||
* Market Data API Routes
|
||||
*
|
||||
* Express routes for the Dutchie AZ data pipeline.
|
||||
* Express routes for the cannabis market data pipeline.
|
||||
* Provides API endpoints for stores, products, categories, and dashboard.
|
||||
* Mounted at /api/markets (with legacy aliases at /api/az and /api/dutchie-az)
|
||||
*/
|
||||
|
||||
import { Router, Request, Response } from 'express';
|
||||
@@ -203,101 +204,113 @@ router.get('/stores/:id', async (req: Request, res: Response) => {
|
||||
* GET /api/dutchie-az/stores/:id/summary
|
||||
* Get store summary with product count, categories, and brands
|
||||
* This is the main endpoint for the DispensaryDetail panel
|
||||
* OPTIMIZED: Combined 5 sequential queries into 2 parallel queries
|
||||
*/
|
||||
router.get('/stores/:id/summary', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const dispensaryId = parseInt(id, 10);
|
||||
|
||||
// Get dispensary info
|
||||
const { rows: dispensaryRows } = await query(
|
||||
`SELECT ${DISPENSARY_COLUMNS} FROM dispensaries WHERE id = $1`,
|
||||
[parseInt(id, 10)]
|
||||
);
|
||||
// Run all queries in parallel using Promise.all
|
||||
const [dispensaryResult, aggregateResult] = await Promise.all([
|
||||
// Query 1: Get dispensary info
|
||||
query(
|
||||
`SELECT ${DISPENSARY_COLUMNS} FROM dispensaries WHERE id = $1`,
|
||||
[dispensaryId]
|
||||
),
|
||||
|
||||
if (dispensaryRows.length === 0) {
|
||||
// Query 2: All product aggregations in one query using CTEs
|
||||
query(
|
||||
`
|
||||
WITH stock_counts AS (
|
||||
SELECT
|
||||
COUNT(*) as total_products,
|
||||
COUNT(*) FILTER (WHERE stock_status = 'in_stock') as in_stock_count,
|
||||
COUNT(*) FILTER (WHERE stock_status = 'out_of_stock') as out_of_stock_count,
|
||||
COUNT(*) FILTER (WHERE stock_status = 'unknown') as unknown_count,
|
||||
COUNT(*) FILTER (WHERE stock_status = 'missing_from_feed') as missing_count
|
||||
FROM dutchie_products
|
||||
WHERE dispensary_id = $1
|
||||
),
|
||||
category_agg AS (
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('type', type, 'subcategory', subcategory, 'product_count', cnt)
|
||||
ORDER BY type, subcategory
|
||||
) as categories
|
||||
FROM (
|
||||
SELECT type, subcategory, COUNT(*) as cnt
|
||||
FROM dutchie_products
|
||||
WHERE dispensary_id = $1 AND type IS NOT NULL
|
||||
GROUP BY type, subcategory
|
||||
) cat
|
||||
),
|
||||
brand_agg AS (
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('brand_name', brand_name, 'product_count', cnt)
|
||||
ORDER BY cnt DESC
|
||||
) as brands
|
||||
FROM (
|
||||
SELECT brand_name, COUNT(*) as cnt
|
||||
FROM dutchie_products
|
||||
WHERE dispensary_id = $1 AND brand_name IS NOT NULL
|
||||
GROUP BY brand_name
|
||||
) br
|
||||
),
|
||||
last_crawl AS (
|
||||
SELECT
|
||||
id, status, started_at, completed_at,
|
||||
products_found, products_new, products_updated, error_message
|
||||
FROM dispensary_crawl_jobs
|
||||
WHERE dispensary_id = $1
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1
|
||||
)
|
||||
SELECT
|
||||
sc.total_products, sc.in_stock_count, sc.out_of_stock_count, sc.unknown_count, sc.missing_count,
|
||||
COALESCE(ca.categories, '[]'::jsonb) as categories,
|
||||
COALESCE(ba.brands, '[]'::jsonb) as brands,
|
||||
lc.id as last_crawl_id, lc.status as last_crawl_status,
|
||||
lc.started_at as last_crawl_started, lc.completed_at as last_crawl_completed,
|
||||
lc.products_found, lc.products_new, lc.products_updated, lc.error_message
|
||||
FROM stock_counts sc
|
||||
CROSS JOIN category_agg ca
|
||||
CROSS JOIN brand_agg ba
|
||||
LEFT JOIN last_crawl lc ON true
|
||||
`,
|
||||
[dispensaryId]
|
||||
)
|
||||
]);
|
||||
|
||||
if (dispensaryResult.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Store not found' });
|
||||
}
|
||||
|
||||
const dispensary = dispensaryRows[0];
|
||||
|
||||
// Get product counts by stock status
|
||||
const { rows: countRows } = await query(
|
||||
`
|
||||
SELECT
|
||||
COUNT(*) as total_products,
|
||||
COUNT(*) FILTER (WHERE stock_status = 'in_stock') as in_stock_count,
|
||||
COUNT(*) FILTER (WHERE stock_status = 'out_of_stock') as out_of_stock_count,
|
||||
COUNT(*) FILTER (WHERE stock_status = 'unknown') as unknown_count,
|
||||
COUNT(*) FILTER (WHERE stock_status = 'missing_from_feed') as missing_count
|
||||
FROM dutchie_products
|
||||
WHERE dispensary_id = $1
|
||||
`,
|
||||
[id]
|
||||
);
|
||||
|
||||
// Get categories with counts for this store
|
||||
const { rows: categories } = await query(
|
||||
`
|
||||
SELECT
|
||||
type,
|
||||
subcategory,
|
||||
COUNT(*) as product_count
|
||||
FROM dutchie_products
|
||||
WHERE dispensary_id = $1 AND type IS NOT NULL
|
||||
GROUP BY type, subcategory
|
||||
ORDER BY type, subcategory
|
||||
`,
|
||||
[id]
|
||||
);
|
||||
|
||||
// Get brands with counts for this store
|
||||
const { rows: brands } = await query(
|
||||
`
|
||||
SELECT
|
||||
brand_name,
|
||||
COUNT(*) as product_count
|
||||
FROM dutchie_products
|
||||
WHERE dispensary_id = $1 AND brand_name IS NOT NULL
|
||||
GROUP BY brand_name
|
||||
ORDER BY product_count DESC
|
||||
`,
|
||||
[id]
|
||||
);
|
||||
|
||||
// Get last crawl info
|
||||
const { rows: lastCrawl } = await query(
|
||||
`
|
||||
SELECT
|
||||
id,
|
||||
status,
|
||||
started_at,
|
||||
completed_at,
|
||||
products_found,
|
||||
products_new,
|
||||
products_updated,
|
||||
error_message
|
||||
FROM dispensary_crawl_jobs
|
||||
WHERE dispensary_id = $1
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1
|
||||
`,
|
||||
[id]
|
||||
);
|
||||
|
||||
const counts = countRows[0] || {};
|
||||
const dispensary = dispensaryResult.rows[0];
|
||||
const agg = aggregateResult.rows[0] || {};
|
||||
const categories = agg.categories || [];
|
||||
const brands = agg.brands || [];
|
||||
|
||||
res.json({
|
||||
dispensary,
|
||||
totalProducts: parseInt(counts.total_products || '0', 10),
|
||||
inStockCount: parseInt(counts.in_stock_count || '0', 10),
|
||||
outOfStockCount: parseInt(counts.out_of_stock_count || '0', 10),
|
||||
unknownStockCount: parseInt(counts.unknown_count || '0', 10),
|
||||
missingFromFeedCount: parseInt(counts.missing_count || '0', 10),
|
||||
totalProducts: parseInt(agg.total_products || '0', 10),
|
||||
inStockCount: parseInt(agg.in_stock_count || '0', 10),
|
||||
outOfStockCount: parseInt(agg.out_of_stock_count || '0', 10),
|
||||
unknownStockCount: parseInt(agg.unknown_count || '0', 10),
|
||||
missingFromFeedCount: parseInt(agg.missing_count || '0', 10),
|
||||
categories,
|
||||
brands,
|
||||
brandCount: brands.length,
|
||||
categoryCount: categories.length,
|
||||
lastCrawl: lastCrawl[0] || null,
|
||||
lastCrawl: agg.last_crawl_id ? {
|
||||
id: agg.last_crawl_id,
|
||||
status: agg.last_crawl_status,
|
||||
started_at: agg.last_crawl_started,
|
||||
completed_at: agg.last_crawl_completed,
|
||||
products_found: agg.products_found,
|
||||
products_new: agg.products_new,
|
||||
products_updated: agg.products_updated,
|
||||
error_message: agg.error_message
|
||||
} : null,
|
||||
});
|
||||
} catch (error: any) {
|
||||
res.status(500).json({ error: error.message });
|
||||
@@ -1082,12 +1095,24 @@ router.post('/admin/crawl/:id', async (req: Request, res: Response) => {
|
||||
import { bulkEnqueueJobs, getQueueStats as getJobQueueStats } from '../services/job-queue';
|
||||
|
||||
/**
|
||||
* GET /api/dutchie-az/admin/dutchie-stores
|
||||
* Get all Dutchie stores with their crawl status
|
||||
* GET /api/markets/admin/crawlable-stores
|
||||
* Get all crawlable stores with their crawl status
|
||||
* OPTIMIZED: Replaced correlated subqueries with LEFT JOINs
|
||||
*/
|
||||
router.get('/admin/dutchie-stores', async (_req: Request, res: Response) => {
|
||||
router.get('/admin/crawlable-stores', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const { rows } = await query(`
|
||||
WITH product_counts AS (
|
||||
SELECT dispensary_id, COUNT(*) as product_count
|
||||
FROM dutchie_products
|
||||
GROUP BY dispensary_id
|
||||
),
|
||||
snapshot_times AS (
|
||||
SELECT p.dispensary_id, MAX(s.crawled_at) as last_snapshot_at
|
||||
FROM dutchie_product_snapshots s
|
||||
JOIN dutchie_products p ON s.dutchie_product_id = p.id
|
||||
GROUP BY p.dispensary_id
|
||||
)
|
||||
SELECT
|
||||
d.id,
|
||||
d.name,
|
||||
@@ -1100,18 +1125,11 @@ router.get('/admin/dutchie-stores', async (_req: Request, res: Response) => {
|
||||
d.last_crawl_at,
|
||||
d.consecutive_failures,
|
||||
d.failed_at,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM dutchie_products
|
||||
WHERE dispensary_id = d.id
|
||||
) as product_count,
|
||||
(
|
||||
SELECT MAX(crawled_at)
|
||||
FROM dutchie_product_snapshots s
|
||||
JOIN dutchie_products p ON s.dutchie_product_id = p.id
|
||||
WHERE p.dispensary_id = d.id
|
||||
) as last_snapshot_at
|
||||
COALESCE(pc.product_count, 0) as product_count,
|
||||
st.last_snapshot_at
|
||||
FROM dispensaries d
|
||||
LEFT JOIN product_counts pc ON pc.dispensary_id = d.id
|
||||
LEFT JOIN snapshot_times st ON st.dispensary_id = d.id
|
||||
WHERE d.menu_type = 'dutchie'
|
||||
AND d.state = 'AZ'
|
||||
ORDER BY d.name
|
||||
@@ -1150,9 +1168,14 @@ router.get('/admin/dutchie-stores', async (_req: Request, res: Response) => {
|
||||
}
|
||||
});
|
||||
|
||||
// Legacy alias (deprecated - use /admin/crawlable-stores)
|
||||
router.get('/admin/dutchie-stores', (req: Request, res: Response) => {
|
||||
res.redirect(307, '/api/markets/admin/crawlable-stores');
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/dutchie-az/admin/crawl-all
|
||||
* Enqueue crawl jobs for ALL ready Dutchie stores
|
||||
* POST /api/markets/admin/crawl-all
|
||||
* Enqueue crawl jobs for ALL ready stores
|
||||
* This is a convenience endpoint to queue all stores without triggering the scheduler
|
||||
*/
|
||||
router.post('/admin/crawl-all', async (req: Request, res: Response) => {
|
||||
@@ -1699,69 +1722,74 @@ import {
|
||||
/**
|
||||
* GET /api/dutchie-az/monitor/active-jobs
|
||||
* Get all currently running jobs with real-time status including worker info
|
||||
* OPTIMIZED: Run all queries in parallel
|
||||
*/
|
||||
router.get('/monitor/active-jobs', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
// Get running jobs from job_run_logs (scheduled jobs like "enqueue all")
|
||||
// Includes worker_name and run_role for named workforce display
|
||||
const { rows: runningScheduledJobs } = await query<any>(`
|
||||
SELECT
|
||||
jrl.id,
|
||||
jrl.schedule_id,
|
||||
jrl.job_name,
|
||||
jrl.status,
|
||||
jrl.started_at,
|
||||
jrl.items_processed,
|
||||
jrl.items_succeeded,
|
||||
jrl.items_failed,
|
||||
jrl.metadata,
|
||||
jrl.worker_name,
|
||||
jrl.run_role,
|
||||
js.description as job_description,
|
||||
js.worker_name as schedule_worker_name,
|
||||
js.worker_role as schedule_worker_role,
|
||||
EXTRACT(EPOCH FROM (NOW() - jrl.started_at)) as duration_seconds
|
||||
FROM job_run_logs jrl
|
||||
LEFT JOIN job_schedules js ON jrl.schedule_id = js.id
|
||||
WHERE jrl.status = 'running'
|
||||
ORDER BY jrl.started_at DESC
|
||||
`);
|
||||
// Run all queries in parallel for better performance
|
||||
const [scheduledJobsResult, crawlJobsResult, queueStats, activeWorkers] = await Promise.all([
|
||||
// Query 1: Running scheduled jobs from job_run_logs
|
||||
query<any>(`
|
||||
SELECT
|
||||
jrl.id,
|
||||
jrl.schedule_id,
|
||||
jrl.job_name,
|
||||
jrl.status,
|
||||
jrl.started_at,
|
||||
jrl.items_processed,
|
||||
jrl.items_succeeded,
|
||||
jrl.items_failed,
|
||||
jrl.metadata,
|
||||
jrl.worker_name,
|
||||
jrl.run_role,
|
||||
js.description as job_description,
|
||||
js.worker_name as schedule_worker_name,
|
||||
js.worker_role as schedule_worker_role,
|
||||
EXTRACT(EPOCH FROM (NOW() - jrl.started_at)) as duration_seconds
|
||||
FROM job_run_logs jrl
|
||||
LEFT JOIN job_schedules js ON jrl.schedule_id = js.id
|
||||
WHERE jrl.status = 'running'
|
||||
ORDER BY jrl.started_at DESC
|
||||
`),
|
||||
|
||||
// Get running crawl jobs (individual store crawls with worker info)
|
||||
// Includes enqueued_by_worker for tracking which named worker enqueued the job
|
||||
const { rows: runningCrawlJobs } = await query<any>(`
|
||||
SELECT
|
||||
cj.id,
|
||||
cj.job_type,
|
||||
cj.dispensary_id,
|
||||
d.name as dispensary_name,
|
||||
d.city,
|
||||
d.platform_dispensary_id,
|
||||
cj.status,
|
||||
cj.started_at,
|
||||
cj.claimed_by as worker_id,
|
||||
cj.worker_hostname,
|
||||
cj.claimed_at,
|
||||
cj.enqueued_by_worker,
|
||||
cj.products_found,
|
||||
cj.products_upserted,
|
||||
cj.snapshots_created,
|
||||
cj.current_page,
|
||||
cj.total_pages,
|
||||
cj.last_heartbeat_at,
|
||||
cj.retry_count,
|
||||
EXTRACT(EPOCH FROM (NOW() - cj.started_at)) as duration_seconds
|
||||
FROM dispensary_crawl_jobs cj
|
||||
LEFT JOIN dispensaries d ON cj.dispensary_id = d.id
|
||||
WHERE cj.status = 'running'
|
||||
ORDER BY cj.started_at DESC
|
||||
`);
|
||||
// Query 2: Running crawl jobs with dispensary info
|
||||
query<any>(`
|
||||
SELECT
|
||||
cj.id,
|
||||
cj.job_type,
|
||||
cj.dispensary_id,
|
||||
d.name as dispensary_name,
|
||||
d.city,
|
||||
d.platform_dispensary_id,
|
||||
cj.status,
|
||||
cj.started_at,
|
||||
cj.claimed_by as worker_id,
|
||||
cj.worker_hostname,
|
||||
cj.claimed_at,
|
||||
cj.enqueued_by_worker,
|
||||
cj.products_found,
|
||||
cj.products_upserted,
|
||||
cj.snapshots_created,
|
||||
cj.current_page,
|
||||
cj.total_pages,
|
||||
cj.last_heartbeat_at,
|
||||
cj.retry_count,
|
||||
EXTRACT(EPOCH FROM (NOW() - cj.started_at)) as duration_seconds
|
||||
FROM dispensary_crawl_jobs cj
|
||||
LEFT JOIN dispensaries d ON cj.dispensary_id = d.id
|
||||
WHERE cj.status = 'running'
|
||||
ORDER BY cj.started_at DESC
|
||||
`),
|
||||
|
||||
// Get queue stats
|
||||
const queueStats = await getQueueStats();
|
||||
// Query 3: Queue stats
|
||||
getQueueStats(),
|
||||
|
||||
// Get active workers
|
||||
const activeWorkers = await getActiveWorkers();
|
||||
// Query 4: Active workers
|
||||
getActiveWorkers()
|
||||
]);
|
||||
|
||||
const runningScheduledJobs = scheduledJobsResult.rows;
|
||||
const runningCrawlJobs = crawlJobsResult.rows;
|
||||
|
||||
// Also get in-memory scrapers if any (from the legacy system)
|
||||
let inMemoryScrapers: any[] = [];
|
||||
@@ -2490,102 +2518,146 @@ router.get('/admin/crawl-traces/run/:runId', async (req: Request, res: Response)
|
||||
/**
|
||||
* GET /api/dutchie-az/scraper/overview
|
||||
* Comprehensive scraper overview for the new dashboard
|
||||
* OPTIMIZED: Combined 6 queries into 4 using CTEs (was 6)
|
||||
*/
|
||||
router.get('/scraper/overview', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
// 1. Core KPI metrics
|
||||
const { rows: kpiRows } = await query<any>(`
|
||||
SELECT
|
||||
-- Total products
|
||||
(SELECT COUNT(*) FROM dutchie_products) AS total_products,
|
||||
(SELECT COUNT(*) FROM dutchie_products WHERE stock_status = 'in_stock') AS in_stock_products,
|
||||
-- Total dispensaries
|
||||
(SELECT COUNT(*) FROM dispensaries WHERE menu_type = 'dutchie' AND state = 'AZ') AS total_dispensaries,
|
||||
(SELECT COUNT(*) FROM dispensaries WHERE menu_type = 'dutchie' AND state = 'AZ' AND platform_dispensary_id IS NOT NULL) AS crawlable_dispensaries,
|
||||
-- Visibility stats (24h)
|
||||
(SELECT COUNT(*) FROM dutchie_products WHERE visibility_lost = true AND visibility_lost_at > NOW() - INTERVAL '24 hours') AS visibility_lost_24h,
|
||||
(SELECT COUNT(*) FROM dutchie_products WHERE visibility_restored_at > NOW() - INTERVAL '24 hours') AS visibility_restored_24h,
|
||||
(SELECT COUNT(*) FROM dutchie_products WHERE visibility_lost = true) AS total_visibility_lost,
|
||||
-- Job stats (24h)
|
||||
(SELECT COUNT(*) FROM job_run_logs WHERE status IN ('error', 'partial') AND created_at > NOW() - INTERVAL '24 hours') AS errors_24h,
|
||||
(SELECT COUNT(*) FROM job_run_logs WHERE status = 'success' AND created_at > NOW() - INTERVAL '24 hours') AS successful_jobs_24h,
|
||||
-- Active workers
|
||||
(SELECT COUNT(*) FROM job_schedules WHERE enabled = true) AS active_workers
|
||||
`);
|
||||
// Run all queries in parallel using Promise.all for better performance
|
||||
const [kpiResult, workerResult, timeSeriesResult, visibilityResult] = await Promise.all([
|
||||
// Query 1: All KPI metrics in a single query using CTEs
|
||||
query<any>(`
|
||||
WITH product_stats AS (
|
||||
SELECT
|
||||
COUNT(*) AS total_products,
|
||||
COUNT(*) FILTER (WHERE stock_status = 'in_stock') AS in_stock_products,
|
||||
COUNT(*) FILTER (WHERE visibility_lost = true AND visibility_lost_at > NOW() - INTERVAL '24 hours') AS visibility_lost_24h,
|
||||
COUNT(*) FILTER (WHERE visibility_restored_at > NOW() - INTERVAL '24 hours') AS visibility_restored_24h,
|
||||
COUNT(*) FILTER (WHERE visibility_lost = true) AS total_visibility_lost
|
||||
FROM dutchie_products
|
||||
),
|
||||
dispensary_stats AS (
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE menu_type = 'dutchie' AND state = 'AZ') AS total_dispensaries,
|
||||
COUNT(*) FILTER (WHERE menu_type = 'dutchie' AND state = 'AZ' AND platform_dispensary_id IS NOT NULL) AS crawlable_dispensaries
|
||||
FROM dispensaries
|
||||
),
|
||||
job_stats AS (
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE status IN ('error', 'partial') AND created_at > NOW() - INTERVAL '24 hours') AS errors_24h,
|
||||
COUNT(*) FILTER (WHERE status = 'success' AND created_at > NOW() - INTERVAL '24 hours') AS successful_jobs_24h
|
||||
FROM job_run_logs
|
||||
),
|
||||
worker_stats AS (
|
||||
SELECT COUNT(*) AS active_workers FROM job_schedules WHERE enabled = true
|
||||
)
|
||||
SELECT
|
||||
ps.total_products, ps.in_stock_products, ps.visibility_lost_24h, ps.visibility_restored_24h, ps.total_visibility_lost,
|
||||
ds.total_dispensaries, ds.crawlable_dispensaries,
|
||||
js.errors_24h, js.successful_jobs_24h,
|
||||
ws.active_workers
|
||||
FROM product_stats ps, dispensary_stats ds, job_stats js, worker_stats ws
|
||||
`),
|
||||
|
||||
// 2. Get active worker names
|
||||
const { rows: workerRows } = await query<any>(`
|
||||
SELECT worker_name, worker_role, enabled, last_status, last_run_at, next_run_at
|
||||
FROM job_schedules
|
||||
WHERE enabled = true
|
||||
ORDER BY next_run_at ASC NULLS LAST
|
||||
`);
|
||||
// Query 2: Active worker details
|
||||
query<any>(`
|
||||
SELECT worker_name, worker_role, enabled, last_status, last_run_at, next_run_at
|
||||
FROM job_schedules
|
||||
WHERE enabled = true
|
||||
ORDER BY next_run_at ASC NULLS LAST
|
||||
`),
|
||||
|
||||
// 3. Scrape activity by hour (last 24h)
|
||||
const { rows: activityRows } = await query<any>(`
|
||||
SELECT
|
||||
date_trunc('hour', started_at) AS hour,
|
||||
COUNT(*) FILTER (WHERE status = 'success') AS successful,
|
||||
COUNT(*) FILTER (WHERE status IN ('error', 'partial')) AS failed,
|
||||
COUNT(*) AS total
|
||||
FROM job_run_logs
|
||||
WHERE started_at > NOW() - INTERVAL '24 hours'
|
||||
GROUP BY date_trunc('hour', started_at)
|
||||
ORDER BY hour ASC
|
||||
`);
|
||||
// Query 3: Time-series data (activity + growth + recent runs)
|
||||
query<any>(`
|
||||
WITH activity_by_hour AS (
|
||||
SELECT
|
||||
date_trunc('hour', started_at) AS hour,
|
||||
COUNT(*) FILTER (WHERE status = 'success') AS successful,
|
||||
COUNT(*) FILTER (WHERE status IN ('error', 'partial')) AS failed,
|
||||
COUNT(*) AS total
|
||||
FROM job_run_logs
|
||||
WHERE started_at > NOW() - INTERVAL '24 hours'
|
||||
GROUP BY date_trunc('hour', started_at)
|
||||
),
|
||||
product_growth AS (
|
||||
SELECT
|
||||
date_trunc('day', created_at) AS day,
|
||||
COUNT(*) AS new_products
|
||||
FROM dutchie_products
|
||||
WHERE created_at > NOW() - INTERVAL '7 days'
|
||||
GROUP BY date_trunc('day', created_at)
|
||||
),
|
||||
recent_runs AS (
|
||||
SELECT
|
||||
jrl.id,
|
||||
jrl.job_name,
|
||||
jrl.status,
|
||||
jrl.started_at,
|
||||
jrl.completed_at,
|
||||
jrl.items_processed,
|
||||
jrl.items_succeeded,
|
||||
jrl.items_failed,
|
||||
jrl.metadata,
|
||||
js.worker_name,
|
||||
js.worker_role
|
||||
FROM job_run_logs jrl
|
||||
LEFT JOIN job_schedules js ON jrl.schedule_id = js.id
|
||||
ORDER BY jrl.started_at DESC
|
||||
LIMIT 20
|
||||
)
|
||||
SELECT
|
||||
'activity' AS query_type,
|
||||
jsonb_agg(jsonb_build_object('hour', hour, 'successful', successful, 'failed', failed, 'total', total) ORDER BY hour) AS data
|
||||
FROM activity_by_hour
|
||||
UNION ALL
|
||||
SELECT
|
||||
'growth' AS query_type,
|
||||
jsonb_agg(jsonb_build_object('day', day, 'new_products', new_products) ORDER BY day) AS data
|
||||
FROM product_growth
|
||||
UNION ALL
|
||||
SELECT
|
||||
'runs' AS query_type,
|
||||
jsonb_agg(jsonb_build_object(
|
||||
'id', id, 'job_name', job_name, 'status', status, 'started_at', started_at,
|
||||
'completed_at', completed_at, 'items_processed', items_processed,
|
||||
'items_succeeded', items_succeeded, 'items_failed', items_failed,
|
||||
'metadata', metadata, 'worker_name', worker_name, 'worker_role', worker_role
|
||||
) ORDER BY started_at DESC) AS data
|
||||
FROM recent_runs
|
||||
`),
|
||||
|
||||
// 4. Product growth / coverage (last 7 days)
|
||||
const { rows: growthRows } = await query<any>(`
|
||||
SELECT
|
||||
date_trunc('day', created_at) AS day,
|
||||
COUNT(*) AS new_products
|
||||
FROM dutchie_products
|
||||
WHERE created_at > NOW() - INTERVAL '7 days'
|
||||
GROUP BY date_trunc('day', created_at)
|
||||
ORDER BY day ASC
|
||||
`);
|
||||
// Query 4: Visibility changes by store
|
||||
query<any>(`
|
||||
SELECT
|
||||
d.id AS dispensary_id,
|
||||
d.name AS dispensary_name,
|
||||
d.state,
|
||||
COUNT(dp.id) FILTER (WHERE dp.visibility_lost = true AND dp.visibility_lost_at > NOW() - INTERVAL '24 hours') AS lost_24h,
|
||||
COUNT(dp.id) FILTER (WHERE dp.visibility_restored_at > NOW() - INTERVAL '24 hours') AS restored_24h,
|
||||
MAX(dp.visibility_lost_at) AS latest_loss,
|
||||
MAX(dp.visibility_restored_at) AS latest_restore
|
||||
FROM dispensaries d
|
||||
LEFT JOIN dutchie_products dp ON d.id = dp.dispensary_id
|
||||
WHERE d.menu_type = 'dutchie'
|
||||
GROUP BY d.id, d.name, d.state
|
||||
HAVING COUNT(dp.id) FILTER (WHERE dp.visibility_lost = true AND dp.visibility_lost_at > NOW() - INTERVAL '24 hours') > 0
|
||||
OR COUNT(dp.id) FILTER (WHERE dp.visibility_restored_at > NOW() - INTERVAL '24 hours') > 0
|
||||
ORDER BY lost_24h DESC, restored_24h DESC
|
||||
LIMIT 15
|
||||
`)
|
||||
]);
|
||||
|
||||
// 5. Recent worker runs (last 20)
|
||||
const { rows: recentRuns } = await query<any>(`
|
||||
SELECT
|
||||
jrl.id,
|
||||
jrl.job_name,
|
||||
jrl.status,
|
||||
jrl.started_at,
|
||||
jrl.completed_at,
|
||||
jrl.items_processed,
|
||||
jrl.items_succeeded,
|
||||
jrl.items_failed,
|
||||
jrl.metadata,
|
||||
js.worker_name,
|
||||
js.worker_role
|
||||
FROM job_run_logs jrl
|
||||
LEFT JOIN job_schedules js ON jrl.schedule_id = js.id
|
||||
ORDER BY jrl.started_at DESC
|
||||
LIMIT 20
|
||||
`);
|
||||
// Parse results
|
||||
const kpi = kpiResult.rows[0] || {};
|
||||
const workerRows = workerResult.rows;
|
||||
const visibilityChanges = visibilityResult.rows;
|
||||
|
||||
// 6. Recent visibility changes by store
|
||||
const { rows: visibilityChanges } = await query<any>(`
|
||||
SELECT
|
||||
d.id AS dispensary_id,
|
||||
d.name AS dispensary_name,
|
||||
d.state,
|
||||
COUNT(dp.id) FILTER (WHERE dp.visibility_lost = true AND dp.visibility_lost_at > NOW() - INTERVAL '24 hours') AS lost_24h,
|
||||
COUNT(dp.id) FILTER (WHERE dp.visibility_restored_at > NOW() - INTERVAL '24 hours') AS restored_24h,
|
||||
MAX(dp.visibility_lost_at) AS latest_loss,
|
||||
MAX(dp.visibility_restored_at) AS latest_restore
|
||||
FROM dispensaries d
|
||||
LEFT JOIN dutchie_products dp ON d.id = dp.dispensary_id
|
||||
WHERE d.menu_type = 'dutchie'
|
||||
GROUP BY d.id, d.name, d.state
|
||||
HAVING COUNT(dp.id) FILTER (WHERE dp.visibility_lost = true AND dp.visibility_lost_at > NOW() - INTERVAL '24 hours') > 0
|
||||
OR COUNT(dp.id) FILTER (WHERE dp.visibility_restored_at > NOW() - INTERVAL '24 hours') > 0
|
||||
ORDER BY lost_24h DESC, restored_24h DESC
|
||||
LIMIT 15
|
||||
`);
|
||||
|
||||
const kpi = kpiRows[0] || {};
|
||||
// Parse time-series aggregated results
|
||||
const timeSeriesMap = Object.fromEntries(
|
||||
timeSeriesResult.rows.map((r: any) => [r.query_type, r.data || []])
|
||||
);
|
||||
const activityRows = timeSeriesMap['activity'] || [];
|
||||
const growthRows = timeSeriesMap['growth'] || [];
|
||||
const recentRuns = timeSeriesMap['runs'] || [];
|
||||
|
||||
res.json({
|
||||
kpi: {
|
||||
|
||||
@@ -23,10 +23,14 @@ app.use('/images', express.static(LOCAL_IMAGES_PATH));
|
||||
const LOCAL_DOWNLOADS_PATH = process.env.LOCAL_DOWNLOADS_PATH || '/app/public/downloads';
|
||||
app.use('/downloads', express.static(LOCAL_DOWNLOADS_PATH));
|
||||
|
||||
// Simple health check for load balancers/K8s probes
|
||||
app.get('/health', (req, res) => {
|
||||
res.json({ status: 'ok', timestamp: new Date().toISOString() });
|
||||
});
|
||||
|
||||
// Comprehensive health endpoints for monitoring (no auth required)
|
||||
app.use('/api/health', healthRoutes);
|
||||
|
||||
// Endpoint to check server's outbound IP (for proxy whitelist setup)
|
||||
app.get('/outbound-ip', async (req, res) => {
|
||||
try {
|
||||
@@ -62,6 +66,8 @@ import usersRoutes from './routes/users';
|
||||
import staleProcessesRoutes from './routes/stale-processes';
|
||||
import orchestratorAdminRoutes from './routes/orchestrator-admin';
|
||||
import adminRoutes from './routes/admin';
|
||||
import healthRoutes from './routes/health';
|
||||
import workersRoutes from './routes/workers';
|
||||
import { dutchieAZRouter, startScheduler as startDutchieAZScheduler, initializeDefaultSchedules } from './dutchie-az';
|
||||
import { getPool } from './dutchie-az/db/connection';
|
||||
import { createAnalyticsRouter } from './dutchie-az/routes/analytics';
|
||||
@@ -77,6 +83,16 @@ import { createAnalyticsV2Router } from './routes/analytics-v2';
|
||||
import { createDiscoveryRoutes } from './discovery';
|
||||
import { createDutchieDiscoveryRoutes, promoteDiscoveryLocation } from './dutchie-az/discovery';
|
||||
|
||||
// Consumer API routes (findadispo.com, findagram.co)
|
||||
import consumerAuthRoutes from './routes/consumer-auth';
|
||||
import consumerFavoritesRoutes from './routes/consumer-favorites';
|
||||
import consumerAlertsRoutes from './routes/consumer-alerts';
|
||||
import consumerSavedSearchesRoutes from './routes/consumer-saved-searches';
|
||||
import consumerDealsRoutes from './routes/consumer-deals';
|
||||
import eventsRoutes from './routes/events';
|
||||
import clickAnalyticsRoutes from './routes/click-analytics';
|
||||
import seoRoutes from './routes/seo';
|
||||
|
||||
// Mark requests from trusted domains (cannaiq.co, findagram.co, findadispo.com)
|
||||
// These domains can access the API without authentication
|
||||
app.use(markTrustedDomains);
|
||||
@@ -96,6 +112,18 @@ app.use('/api/changes', changesRoutes);
|
||||
app.use('/api/categories', categoriesRoutes);
|
||||
app.use('/api/products', productsRoutes);
|
||||
app.use('/api/campaigns', campaignsRoutes);
|
||||
|
||||
// Multi-state API routes - national analytics and cross-state comparisons (NO AUTH)
|
||||
// IMPORTANT: Must be mounted BEFORE /api/analytics to avoid auth middleware blocking these routes
|
||||
try {
|
||||
const multiStateRoutes = createMultiStateRoutes(getPool());
|
||||
app.use('/api', multiStateRoutes);
|
||||
console.log('[MultiState] Routes registered at /api (analytics/national/*, states/*, etc.)');
|
||||
} catch (error) {
|
||||
console.warn('[MultiState] Failed to register routes (DB may not be configured):', error);
|
||||
}
|
||||
|
||||
// Legacy click analytics routes (requires auth)
|
||||
app.use('/api/analytics', analyticsRoutes);
|
||||
app.use('/api/settings', settingsRoutes);
|
||||
app.use('/api/proxies', proxiesRoutes);
|
||||
@@ -112,16 +140,29 @@ app.use('/api/stale-processes', staleProcessesRoutes);
|
||||
// Admin routes - operator actions (crawl triggers, health checks)
|
||||
app.use('/api/admin', adminRoutes);
|
||||
app.use('/api/admin/orchestrator', orchestratorAdminRoutes);
|
||||
// Vendor-agnostic AZ data pipeline routes (new public surface)
|
||||
|
||||
// SEO orchestrator routes
|
||||
app.use('/api/seo', seoRoutes);
|
||||
|
||||
// Provider-agnostic worker management routes (replaces /api/dutchie-az/admin/schedules)
|
||||
app.use('/api/workers', workersRoutes);
|
||||
// Monitor routes - aliased from workers for convenience
|
||||
app.use('/api/monitor', workersRoutes);
|
||||
console.log('[Workers] Routes registered at /api/workers and /api/monitor');
|
||||
|
||||
// Market data pipeline routes (provider-agnostic)
|
||||
app.use('/api/markets', dutchieAZRouter);
|
||||
// Legacy aliases (deprecated - remove after frontend migration)
|
||||
app.use('/api/az', dutchieAZRouter);
|
||||
// Legacy alias (kept temporarily for backward compatibility)
|
||||
app.use('/api/dutchie-az', dutchieAZRouter);
|
||||
|
||||
// Phase 3: Analytics Dashboards - price trends, penetration, category growth, etc.
|
||||
try {
|
||||
const analyticsRouter = createAnalyticsRouter(getPool());
|
||||
app.use('/api/markets/analytics', analyticsRouter);
|
||||
// Legacy alias for backwards compatibility
|
||||
app.use('/api/az/analytics', analyticsRouter);
|
||||
console.log('[Analytics] Routes registered at /api/az/analytics');
|
||||
console.log('[Analytics] Routes registered at /api/markets/analytics');
|
||||
} catch (error) {
|
||||
console.warn('[Analytics] Failed to register routes:', error);
|
||||
}
|
||||
@@ -139,15 +180,24 @@ try {
|
||||
// Uses dutchie_az data pipeline with per-dispensary API key auth
|
||||
app.use('/api/v1', publicApiRoutes);
|
||||
|
||||
// Multi-state API routes - national analytics and cross-state comparisons
|
||||
// Phase 4: Multi-State Expansion
|
||||
try {
|
||||
const multiStateRoutes = createMultiStateRoutes(getPool());
|
||||
app.use('/api', multiStateRoutes);
|
||||
console.log('[MultiState] Routes registered');
|
||||
} catch (error) {
|
||||
console.warn('[MultiState] Failed to register routes (DB may not be configured):', error);
|
||||
}
|
||||
// Consumer API - findadispo.com and findagram.co user features
|
||||
// Auth routes don't require authentication
|
||||
app.use('/api/consumer/auth', consumerAuthRoutes);
|
||||
// Protected consumer routes (favorites, alerts, saved searches)
|
||||
app.use('/api/consumer/favorites', consumerFavoritesRoutes);
|
||||
app.use('/api/consumer/alerts', consumerAlertsRoutes);
|
||||
app.use('/api/consumer/saved-searches', consumerSavedSearchesRoutes);
|
||||
// Deals endpoint - public, no auth required
|
||||
app.use('/api/v1/deals', consumerDealsRoutes);
|
||||
console.log('[Consumer] Routes registered at /api/consumer/*');
|
||||
|
||||
// Events API - product click tracking for analytics and campaigns
|
||||
app.use('/api/events', eventsRoutes);
|
||||
console.log('[Events] Routes registered at /api/events');
|
||||
|
||||
// Click Analytics API - brand and campaign engagement aggregations
|
||||
app.use('/api/analytics/clicks', clickAnalyticsRoutes);
|
||||
console.log('[ClickAnalytics] Routes registered at /api/analytics/clicks');
|
||||
|
||||
// States API routes - cannabis legalization status and targeting
|
||||
try {
|
||||
|
||||
108
backend/src/lib/redis.ts
Normal file
108
backend/src/lib/redis.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
/**
|
||||
* Redis Connection Utility
|
||||
*
|
||||
* Provides a singleton Redis connection for:
|
||||
* - BullMQ job queues (notifications, crawl jobs)
|
||||
* - Rate limiting
|
||||
* - Caching
|
||||
* - Session storage
|
||||
*/
|
||||
|
||||
import Redis from 'ioredis';
|
||||
|
||||
// Lazy-initialized Redis client singleton
|
||||
let _redis: Redis | null = null;
|
||||
|
||||
/**
|
||||
* Get Redis connection URL from environment
|
||||
*/
|
||||
function getRedisUrl(): string {
|
||||
// Priority 1: Full Redis URL
|
||||
if (process.env.REDIS_URL) {
|
||||
return process.env.REDIS_URL;
|
||||
}
|
||||
|
||||
// Priority 2: Individual env vars
|
||||
const host = process.env.REDIS_HOST || 'localhost';
|
||||
const port = process.env.REDIS_PORT || '6379';
|
||||
const password = process.env.REDIS_PASSWORD;
|
||||
|
||||
if (password) {
|
||||
return `redis://:${password}@${host}:${port}`;
|
||||
}
|
||||
|
||||
return `redis://${host}:${port}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the Redis client (lazy singleton)
|
||||
*/
|
||||
export function getRedis(): Redis {
|
||||
if (!_redis) {
|
||||
const url = getRedisUrl();
|
||||
console.log(`[Redis] Connecting to ${url.replace(/:[^:@]+@/, ':***@')}`);
|
||||
|
||||
_redis = new Redis(url, {
|
||||
maxRetriesPerRequest: null, // Required for BullMQ
|
||||
enableReadyCheck: false,
|
||||
retryStrategy: (times: number) => {
|
||||
if (times > 10) {
|
||||
console.error('[Redis] Max retries reached, giving up');
|
||||
return null;
|
||||
}
|
||||
const delay = Math.min(times * 100, 3000);
|
||||
console.log(`[Redis] Retry attempt ${times}, waiting ${delay}ms`);
|
||||
return delay;
|
||||
},
|
||||
});
|
||||
|
||||
_redis.on('connect', () => {
|
||||
console.log('[Redis] Connected');
|
||||
});
|
||||
|
||||
_redis.on('error', (err) => {
|
||||
console.error('[Redis] Error:', err.message);
|
||||
});
|
||||
|
||||
_redis.on('close', () => {
|
||||
console.log('[Redis] Connection closed');
|
||||
});
|
||||
}
|
||||
|
||||
return _redis;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if Redis is available
|
||||
*/
|
||||
export async function isRedisAvailable(): Promise<boolean> {
|
||||
try {
|
||||
const redis = getRedis();
|
||||
const pong = await redis.ping();
|
||||
return pong === 'PONG';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Close Redis connection
|
||||
*/
|
||||
export async function closeRedis(): Promise<void> {
|
||||
if (_redis) {
|
||||
await _redis.quit();
|
||||
_redis = null;
|
||||
console.log('[Redis] Disconnected');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get BullMQ-compatible connection options
|
||||
* BullMQ requires a specific connection format
|
||||
*/
|
||||
export function getBullMQConnection(): Redis {
|
||||
return getRedis();
|
||||
}
|
||||
|
||||
// Export types for convenience
|
||||
export type { Redis };
|
||||
27
backend/src/migrations/052_create_seo_pages.sql
Normal file
27
backend/src/migrations/052_create_seo_pages.sql
Normal file
@@ -0,0 +1,27 @@
|
||||
-- Migration: 052_create_seo_pages.sql
|
||||
-- Purpose: Create seo_pages table for SEO orchestrator
|
||||
|
||||
CREATE TABLE IF NOT EXISTS seo_pages (
|
||||
id SERIAL PRIMARY KEY,
|
||||
type VARCHAR(50) NOT NULL CHECK (type IN ('state', 'brand', 'competitor_alternative', 'high_intent', 'insight_post')),
|
||||
slug VARCHAR(255) NOT NULL UNIQUE,
|
||||
page_key VARCHAR(255) NOT NULL,
|
||||
primary_keyword VARCHAR(255),
|
||||
status VARCHAR(50) DEFAULT 'pending_generation' CHECK (status IN ('draft', 'pending_generation', 'live', 'stale')),
|
||||
data_source VARCHAR(100),
|
||||
meta_title VARCHAR(255),
|
||||
meta_description TEXT,
|
||||
last_generated_at TIMESTAMPTZ,
|
||||
last_reviewed_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_seo_pages_type ON seo_pages(type);
|
||||
CREATE INDEX IF NOT EXISTS idx_seo_pages_status ON seo_pages(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_seo_pages_slug ON seo_pages(slug);
|
||||
|
||||
-- Record migration
|
||||
INSERT INTO schema_migrations (version, name, applied_at)
|
||||
VALUES (52, '052_create_seo_pages', NOW())
|
||||
ON CONFLICT (version) DO NOTHING;
|
||||
24
backend/src/migrations/053_create_seo_page_contents.sql
Normal file
24
backend/src/migrations/053_create_seo_page_contents.sql
Normal file
@@ -0,0 +1,24 @@
|
||||
-- Migration: 053_create_seo_page_contents.sql
|
||||
-- Stores generated SEO content for each page
|
||||
|
||||
CREATE TABLE IF NOT EXISTS seo_page_contents (
|
||||
id SERIAL PRIMARY KEY,
|
||||
page_id INTEGER NOT NULL REFERENCES seo_pages(id) ON DELETE CASCADE,
|
||||
blocks JSONB NOT NULL DEFAULT '[]',
|
||||
meta_title VARCHAR(255),
|
||||
meta_description TEXT,
|
||||
h1 VARCHAR(255),
|
||||
canonical_url VARCHAR(500),
|
||||
og_title VARCHAR(255),
|
||||
og_description TEXT,
|
||||
og_image_url VARCHAR(500),
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
UNIQUE(page_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_seo_page_contents_page_id ON seo_page_contents(page_id);
|
||||
|
||||
INSERT INTO schema_migrations (version, name, applied_at)
|
||||
VALUES (53, '053_create_seo_page_contents', NOW())
|
||||
ON CONFLICT (version) DO NOTHING;
|
||||
@@ -447,5 +447,55 @@ export function createMultiStateRoutes(pool: Pool): Router {
|
||||
}
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Health Check Endpoint
|
||||
// =========================================================================
|
||||
|
||||
/**
|
||||
* GET /api/health/analytics
|
||||
* Health check for analytics subsystem
|
||||
*/
|
||||
router.get('/health/analytics', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const startTime = Date.now();
|
||||
|
||||
// Check materialized view is accessible
|
||||
const result = await pool.query(`
|
||||
SELECT COUNT(*) as state_count,
|
||||
MAX(refreshed_at) as last_refresh
|
||||
FROM mv_state_metrics
|
||||
`);
|
||||
|
||||
const dbLatency = Date.now() - startTime;
|
||||
const stateCount = parseInt(result.rows[0]?.state_count || '0', 10);
|
||||
const lastRefresh = result.rows[0]?.last_refresh;
|
||||
|
||||
// Check if data is stale (more than 24 hours old)
|
||||
const isStale = lastRefresh
|
||||
? Date.now() - new Date(lastRefresh).getTime() > 24 * 60 * 60 * 1000
|
||||
: true;
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
status: isStale ? 'degraded' : 'healthy',
|
||||
data: {
|
||||
statesInCache: stateCount,
|
||||
lastRefresh: lastRefresh || null,
|
||||
isStale,
|
||||
dbLatencyMs: dbLatency,
|
||||
},
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[MultiState] Health check failed:', error);
|
||||
res.status(503).json({
|
||||
success: false,
|
||||
status: 'unhealthy',
|
||||
error: error.message,
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -162,17 +162,17 @@ router.post('/:id/products', requireRole('superadmin', 'admin'), async (req, res
|
||||
router.delete('/:id/products/:product_id', requireRole('superadmin', 'admin'), async (req, res) => {
|
||||
try {
|
||||
const { id, product_id } = req.params;
|
||||
|
||||
|
||||
const result = await pool.query(`
|
||||
DELETE FROM campaign_products
|
||||
DELETE FROM campaign_products
|
||||
WHERE campaign_id = $1 AND product_id = $2
|
||||
RETURNING *
|
||||
`, [id, product_id]);
|
||||
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Product not in campaign' });
|
||||
}
|
||||
|
||||
|
||||
res.json({ message: 'Product removed from campaign' });
|
||||
} catch (error) {
|
||||
console.error('Error removing product from campaign:', error);
|
||||
@@ -180,4 +180,139 @@ router.delete('/:id/products/:product_id', requireRole('superadmin', 'admin'), a
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/campaigns/:id/click-summary
|
||||
* Get product click event summary for a campaign
|
||||
*
|
||||
* Query params:
|
||||
* - from: Start date (ISO)
|
||||
* - to: End date (ISO)
|
||||
*/
|
||||
router.get('/:id/click-summary', async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const { from, to } = req.query;
|
||||
|
||||
// Check campaign exists
|
||||
const campaignResult = await pool.query(
|
||||
'SELECT id, name FROM campaigns WHERE id = $1',
|
||||
[id]
|
||||
);
|
||||
if (campaignResult.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Campaign not found' });
|
||||
}
|
||||
|
||||
// Build date filter conditions
|
||||
const conditions: string[] = ['campaign_id = $1'];
|
||||
const params: any[] = [id];
|
||||
let paramIndex = 2;
|
||||
|
||||
if (from) {
|
||||
conditions.push(`occurred_at >= $${paramIndex++}`);
|
||||
params.push(new Date(from as string));
|
||||
}
|
||||
if (to) {
|
||||
conditions.push(`occurred_at <= $${paramIndex++}`);
|
||||
params.push(new Date(to as string));
|
||||
}
|
||||
|
||||
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
||||
|
||||
// Get overall stats
|
||||
const statsResult = await pool.query(`
|
||||
SELECT
|
||||
COUNT(*) as total_clicks,
|
||||
COUNT(DISTINCT product_id) as unique_products,
|
||||
COUNT(DISTINCT store_id) as unique_stores,
|
||||
COUNT(DISTINCT brand_id) as unique_brands,
|
||||
COUNT(DISTINCT user_id) FILTER (WHERE user_id IS NOT NULL) as unique_users
|
||||
FROM product_click_events
|
||||
${whereClause}
|
||||
`, params);
|
||||
|
||||
// Get clicks by action type
|
||||
const byActionResult = await pool.query(`
|
||||
SELECT
|
||||
action,
|
||||
COUNT(*) as count
|
||||
FROM product_click_events
|
||||
${whereClause}
|
||||
GROUP BY action
|
||||
ORDER BY count DESC
|
||||
`, params);
|
||||
|
||||
// Get clicks by source
|
||||
const bySourceResult = await pool.query(`
|
||||
SELECT
|
||||
source,
|
||||
COUNT(*) as count
|
||||
FROM product_click_events
|
||||
${whereClause}
|
||||
GROUP BY source
|
||||
ORDER BY count DESC
|
||||
`, params);
|
||||
|
||||
// Get top products (by click count)
|
||||
const topProductsResult = await pool.query(`
|
||||
SELECT
|
||||
product_id,
|
||||
COUNT(*) as click_count
|
||||
FROM product_click_events
|
||||
${whereClause}
|
||||
GROUP BY product_id
|
||||
ORDER BY click_count DESC
|
||||
LIMIT 10
|
||||
`, params);
|
||||
|
||||
// Get daily click counts (last 30 days by default)
|
||||
const dailyParams = [...params];
|
||||
let dailyWhereClause = whereClause;
|
||||
if (!from) {
|
||||
// Default to last 30 days
|
||||
conditions.push(`occurred_at >= NOW() - INTERVAL '30 days'`);
|
||||
dailyWhereClause = `WHERE ${conditions.join(' AND ')}`;
|
||||
}
|
||||
|
||||
const dailyResult = await pool.query(`
|
||||
SELECT
|
||||
DATE(occurred_at) as date,
|
||||
COUNT(*) as click_count
|
||||
FROM product_click_events
|
||||
${dailyWhereClause}
|
||||
GROUP BY DATE(occurred_at)
|
||||
ORDER BY date ASC
|
||||
`, dailyParams);
|
||||
|
||||
res.json({
|
||||
campaign: campaignResult.rows[0],
|
||||
summary: {
|
||||
totalClicks: parseInt(statsResult.rows[0].total_clicks, 10),
|
||||
uniqueProducts: parseInt(statsResult.rows[0].unique_products, 10),
|
||||
uniqueStores: parseInt(statsResult.rows[0].unique_stores, 10),
|
||||
uniqueBrands: parseInt(statsResult.rows[0].unique_brands, 10),
|
||||
uniqueUsers: parseInt(statsResult.rows[0].unique_users, 10)
|
||||
},
|
||||
byAction: byActionResult.rows.map(row => ({
|
||||
action: row.action,
|
||||
count: parseInt(row.count, 10)
|
||||
})),
|
||||
bySource: bySourceResult.rows.map(row => ({
|
||||
source: row.source,
|
||||
count: parseInt(row.count, 10)
|
||||
})),
|
||||
topProducts: topProductsResult.rows.map(row => ({
|
||||
productId: row.product_id,
|
||||
clickCount: parseInt(row.click_count, 10)
|
||||
})),
|
||||
daily: dailyResult.rows.map(row => ({
|
||||
date: row.date,
|
||||
clickCount: parseInt(row.click_count, 10)
|
||||
}))
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[Campaigns] Error fetching click summary:', error.message);
|
||||
res.status(500).json({ error: 'Failed to fetch campaign click summary' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
521
backend/src/routes/click-analytics.ts
Normal file
521
backend/src/routes/click-analytics.ts
Normal file
@@ -0,0 +1,521 @@
|
||||
/**
|
||||
* Click Analytics API Routes
|
||||
*
|
||||
* Aggregates product click events by brand and campaign for analytics dashboards.
|
||||
*
|
||||
* Endpoints:
|
||||
* GET /api/analytics/clicks/brands - Top brands by click engagement
|
||||
* GET /api/analytics/clicks/campaigns - Top campaigns/specials by engagement
|
||||
* GET /api/analytics/clicks/stores/:storeId/brands - Per-store brand engagement
|
||||
* GET /api/analytics/clicks/summary - Overall click summary stats
|
||||
*/
|
||||
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { pool } from '../db/pool';
|
||||
import { authMiddleware } from '../auth/middleware';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// All click analytics endpoints require authentication
|
||||
router.use(authMiddleware);
|
||||
|
||||
/**
|
||||
* GET /api/analytics/clicks/brands
|
||||
* Get top brands by click engagement
|
||||
*
|
||||
* Query params:
|
||||
* - state: Filter by store state (e.g., 'AZ')
|
||||
* - store_id: Filter by specific store
|
||||
* - brand_id: Filter by specific brand
|
||||
* - days: Lookback window (default 30)
|
||||
* - limit: Max results (default 25)
|
||||
*/
|
||||
router.get('/brands', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
state,
|
||||
store_id,
|
||||
brand_id,
|
||||
days = '30',
|
||||
limit = '25'
|
||||
} = req.query;
|
||||
|
||||
const daysNum = parseInt(days as string, 10) || 30;
|
||||
const limitNum = Math.min(parseInt(limit as string, 10) || 25, 100);
|
||||
|
||||
// Build conditions and params
|
||||
const conditions: string[] = [
|
||||
'e.brand_id IS NOT NULL',
|
||||
`e.occurred_at >= NOW() - INTERVAL '${daysNum} days'`
|
||||
];
|
||||
const params: any[] = [];
|
||||
let paramIdx = 1;
|
||||
|
||||
if (state) {
|
||||
conditions.push(`d.state = $${paramIdx++}`);
|
||||
params.push(state);
|
||||
}
|
||||
|
||||
if (store_id) {
|
||||
conditions.push(`e.store_id = $${paramIdx++}`);
|
||||
params.push(store_id);
|
||||
}
|
||||
|
||||
if (brand_id) {
|
||||
conditions.push(`e.brand_id = $${paramIdx++}`);
|
||||
params.push(brand_id);
|
||||
}
|
||||
|
||||
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
||||
|
||||
// Query for brand engagement
|
||||
const result = await pool.query(`
|
||||
SELECT
|
||||
e.brand_id,
|
||||
e.brand_id as brand_name,
|
||||
COUNT(*) as clicks,
|
||||
COUNT(DISTINCT e.product_id) as unique_products,
|
||||
COUNT(DISTINCT e.store_id) as unique_stores,
|
||||
MIN(e.occurred_at) as first_click_at,
|
||||
MAX(e.occurred_at) as last_click_at
|
||||
FROM product_click_events e
|
||||
LEFT JOIN dispensaries d ON e.store_id::int = d.id
|
||||
${whereClause}
|
||||
GROUP BY e.brand_id
|
||||
ORDER BY clicks DESC
|
||||
LIMIT ${limitNum}
|
||||
`, params);
|
||||
|
||||
// Try to get actual brand names from products
|
||||
const brandIds = result.rows.map(r => r.brand_id).filter(Boolean);
|
||||
let brandNamesMap: Record<string, string> = {};
|
||||
|
||||
if (brandIds.length > 0) {
|
||||
const brandNamesResult = await pool.query(`
|
||||
SELECT DISTINCT brand_name
|
||||
FROM dutchie_products
|
||||
WHERE brand_name = ANY($1)
|
||||
`, [brandIds]);
|
||||
|
||||
brandNamesResult.rows.forEach(r => {
|
||||
brandNamesMap[r.brand_name] = r.brand_name;
|
||||
});
|
||||
}
|
||||
|
||||
const brands = result.rows.map(row => ({
|
||||
brand_id: row.brand_id,
|
||||
brand_name: brandNamesMap[row.brand_id] || row.brand_id,
|
||||
clicks: parseInt(row.clicks, 10),
|
||||
unique_products: parseInt(row.unique_products, 10),
|
||||
unique_stores: parseInt(row.unique_stores, 10),
|
||||
first_click_at: row.first_click_at,
|
||||
last_click_at: row.last_click_at
|
||||
}));
|
||||
|
||||
res.json({
|
||||
filters: {
|
||||
state: state || null,
|
||||
store_id: store_id || null,
|
||||
brand_id: brand_id || null,
|
||||
days: daysNum
|
||||
},
|
||||
brands
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[ClickAnalytics] Error fetching brand analytics:', error.message);
|
||||
res.status(500).json({ error: 'Failed to fetch brand analytics' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/analytics/clicks/products
|
||||
* Get top products by click engagement
|
||||
*
|
||||
* Query params:
|
||||
* - state: Filter by store state (e.g., 'AZ')
|
||||
* - store_id: Filter by specific store
|
||||
* - brand_id: Filter by specific brand
|
||||
* - days: Lookback window (default 30)
|
||||
* - limit: Max results (default 25)
|
||||
*/
|
||||
router.get('/products', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
state,
|
||||
store_id,
|
||||
brand_id,
|
||||
days = '30',
|
||||
limit = '25'
|
||||
} = req.query;
|
||||
|
||||
const daysNum = parseInt(days as string, 10) || 30;
|
||||
const limitNum = Math.min(parseInt(limit as string, 10) || 25, 100);
|
||||
|
||||
// Build conditions and params
|
||||
const conditions: string[] = [
|
||||
'e.product_id IS NOT NULL',
|
||||
`e.occurred_at >= NOW() - INTERVAL '${daysNum} days'`
|
||||
];
|
||||
const params: any[] = [];
|
||||
let paramIdx = 1;
|
||||
|
||||
if (state) {
|
||||
conditions.push(`d.state = $${paramIdx++}`);
|
||||
params.push(state);
|
||||
}
|
||||
|
||||
if (store_id) {
|
||||
conditions.push(`e.store_id = $${paramIdx++}`);
|
||||
params.push(store_id);
|
||||
}
|
||||
|
||||
if (brand_id) {
|
||||
conditions.push(`e.brand_id = $${paramIdx++}`);
|
||||
params.push(brand_id);
|
||||
}
|
||||
|
||||
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
||||
|
||||
// Query for product engagement with product details from dutchie_products
|
||||
const result = await pool.query(`
|
||||
SELECT
|
||||
e.product_id,
|
||||
e.brand_id,
|
||||
COUNT(*) as clicks,
|
||||
COUNT(DISTINCT e.store_id) as unique_stores,
|
||||
MIN(e.occurred_at) as first_click_at,
|
||||
MAX(e.occurred_at) as last_click_at
|
||||
FROM product_click_events e
|
||||
LEFT JOIN dispensaries d ON e.store_id::int = d.id
|
||||
${whereClause}
|
||||
GROUP BY e.product_id, e.brand_id
|
||||
ORDER BY clicks DESC
|
||||
LIMIT ${limitNum}
|
||||
`, params);
|
||||
|
||||
// Try to get product details from dutchie_products
|
||||
const productIds = result.rows.map(r => r.product_id).filter(Boolean);
|
||||
let productDetailsMap: Record<string, { name: string; brand: string; type: string; subcategory: string }> = {};
|
||||
|
||||
if (productIds.length > 0) {
|
||||
// Try to match by external_id or id
|
||||
const productDetailsResult = await pool.query(`
|
||||
SELECT
|
||||
external_id,
|
||||
id::text as product_id,
|
||||
name,
|
||||
brand_name,
|
||||
type,
|
||||
subcategory
|
||||
FROM dutchie_products
|
||||
WHERE external_id = ANY($1) OR id::text = ANY($1)
|
||||
`, [productIds]);
|
||||
|
||||
productDetailsResult.rows.forEach(r => {
|
||||
productDetailsMap[r.external_id] = {
|
||||
name: r.name,
|
||||
brand: r.brand_name,
|
||||
type: r.type,
|
||||
subcategory: r.subcategory
|
||||
};
|
||||
productDetailsMap[r.product_id] = {
|
||||
name: r.name,
|
||||
brand: r.brand_name,
|
||||
type: r.type,
|
||||
subcategory: r.subcategory
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
const products = result.rows.map(row => {
|
||||
const details = productDetailsMap[row.product_id];
|
||||
return {
|
||||
product_id: row.product_id,
|
||||
product_name: details?.name || `Product ${row.product_id}`,
|
||||
brand_id: row.brand_id,
|
||||
brand_name: details?.brand || row.brand_id || 'Unknown',
|
||||
category: details?.type || null,
|
||||
subcategory: details?.subcategory || null,
|
||||
clicks: parseInt(row.clicks, 10),
|
||||
unique_stores: parseInt(row.unique_stores, 10),
|
||||
first_click_at: row.first_click_at,
|
||||
last_click_at: row.last_click_at
|
||||
};
|
||||
});
|
||||
|
||||
res.json({
|
||||
filters: {
|
||||
state: state || null,
|
||||
store_id: store_id || null,
|
||||
brand_id: brand_id || null,
|
||||
days: daysNum
|
||||
},
|
||||
products
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[ClickAnalytics] Error fetching product analytics:', error.message);
|
||||
res.status(500).json({ error: 'Failed to fetch product analytics' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/analytics/clicks/campaigns
|
||||
* Get top campaigns/specials by click engagement
|
||||
*
|
||||
* Query params:
|
||||
* - state: Filter by store state
|
||||
* - store_id: Filter by specific store
|
||||
* - days: Lookback window (default 30)
|
||||
* - limit: Max results (default 25)
|
||||
*/
|
||||
router.get('/campaigns', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
state,
|
||||
store_id,
|
||||
days = '30',
|
||||
limit = '25'
|
||||
} = req.query;
|
||||
|
||||
const daysNum = parseInt(days as string, 10) || 30;
|
||||
const limitNum = Math.min(parseInt(limit as string, 10) || 25, 100);
|
||||
|
||||
// Build conditions
|
||||
const conditions: string[] = [
|
||||
'e.campaign_id IS NOT NULL',
|
||||
`e.occurred_at >= NOW() - INTERVAL '${daysNum} days'`
|
||||
];
|
||||
const params: any[] = [];
|
||||
let paramIdx = 1;
|
||||
|
||||
if (state) {
|
||||
conditions.push(`d.state = $${paramIdx++}`);
|
||||
params.push(state);
|
||||
}
|
||||
|
||||
if (store_id) {
|
||||
conditions.push(`e.store_id = $${paramIdx++}`);
|
||||
params.push(store_id);
|
||||
}
|
||||
|
||||
const whereClause = `WHERE ${conditions.join(' AND ')}`;
|
||||
|
||||
// Query for campaign engagement with campaign details
|
||||
const result = await pool.query(`
|
||||
SELECT
|
||||
e.campaign_id,
|
||||
c.name as campaign_name,
|
||||
c.slug as campaign_slug,
|
||||
c.description as campaign_description,
|
||||
c.active as is_active,
|
||||
c.start_date,
|
||||
c.end_date,
|
||||
COUNT(*) as clicks,
|
||||
COUNT(DISTINCT e.product_id) as unique_products,
|
||||
COUNT(DISTINCT e.store_id) as unique_stores,
|
||||
MIN(e.occurred_at) as first_event_at,
|
||||
MAX(e.occurred_at) as last_event_at
|
||||
FROM product_click_events e
|
||||
LEFT JOIN dispensaries d ON e.store_id::int = d.id
|
||||
LEFT JOIN campaigns c ON e.campaign_id = c.id
|
||||
${whereClause}
|
||||
GROUP BY e.campaign_id, c.name, c.slug, c.description, c.active, c.start_date, c.end_date
|
||||
ORDER BY clicks DESC
|
||||
LIMIT ${limitNum}
|
||||
`, params);
|
||||
|
||||
const campaigns = result.rows.map(row => ({
|
||||
campaign_id: row.campaign_id,
|
||||
campaign_name: row.campaign_name || `Campaign ${row.campaign_id}`,
|
||||
campaign_slug: row.campaign_slug,
|
||||
campaign_description: row.campaign_description,
|
||||
is_active: row.is_active,
|
||||
start_date: row.start_date,
|
||||
end_date: row.end_date,
|
||||
clicks: parseInt(row.clicks, 10),
|
||||
unique_products: parseInt(row.unique_products, 10),
|
||||
unique_stores: parseInt(row.unique_stores, 10),
|
||||
first_event_at: row.first_event_at,
|
||||
last_event_at: row.last_event_at
|
||||
}));
|
||||
|
||||
res.json({
|
||||
filters: {
|
||||
state: state || null,
|
||||
store_id: store_id || null,
|
||||
days: daysNum
|
||||
},
|
||||
campaigns
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[ClickAnalytics] Error fetching campaign analytics:', error.message);
|
||||
res.status(500).json({ error: 'Failed to fetch campaign analytics' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/analytics/clicks/stores/:storeId/brands
|
||||
* Get brand engagement for a specific store
|
||||
*
|
||||
* Query params:
|
||||
* - days: Lookback window (default 30)
|
||||
* - limit: Max results (default 25)
|
||||
*/
|
||||
router.get('/stores/:storeId/brands', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { storeId } = req.params;
|
||||
const { days = '30', limit = '25' } = req.query;
|
||||
|
||||
const daysNum = parseInt(days as string, 10) || 30;
|
||||
const limitNum = Math.min(parseInt(limit as string, 10) || 25, 100);
|
||||
|
||||
// Get store info
|
||||
const storeResult = await pool.query(
|
||||
'SELECT id, name, dba_name, city, state FROM dispensaries WHERE id = $1',
|
||||
[storeId]
|
||||
);
|
||||
|
||||
if (storeResult.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Store not found' });
|
||||
}
|
||||
|
||||
const store = storeResult.rows[0];
|
||||
|
||||
// Query brand engagement for this store
|
||||
const result = await pool.query(`
|
||||
SELECT
|
||||
e.brand_id,
|
||||
COUNT(*) as clicks,
|
||||
COUNT(DISTINCT e.product_id) as unique_products,
|
||||
MIN(e.occurred_at) as first_click_at,
|
||||
MAX(e.occurred_at) as last_click_at
|
||||
FROM product_click_events e
|
||||
WHERE e.store_id = $1
|
||||
AND e.brand_id IS NOT NULL
|
||||
AND e.occurred_at >= NOW() - INTERVAL '${daysNum} days'
|
||||
GROUP BY e.brand_id
|
||||
ORDER BY clicks DESC
|
||||
LIMIT ${limitNum}
|
||||
`, [storeId]);
|
||||
|
||||
const brands = result.rows.map(row => ({
|
||||
brand_id: row.brand_id,
|
||||
brand_name: row.brand_id, // Use brand_id as name for now
|
||||
clicks: parseInt(row.clicks, 10),
|
||||
unique_products: parseInt(row.unique_products, 10),
|
||||
first_click_at: row.first_click_at,
|
||||
last_click_at: row.last_click_at
|
||||
}));
|
||||
|
||||
res.json({
|
||||
store: {
|
||||
id: store.id,
|
||||
name: store.dba_name || store.name,
|
||||
city: store.city,
|
||||
state: store.state
|
||||
},
|
||||
filters: {
|
||||
days: daysNum
|
||||
},
|
||||
brands
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[ClickAnalytics] Error fetching store brand analytics:', error.message);
|
||||
res.status(500).json({ error: 'Failed to fetch store brand analytics' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/analytics/clicks/summary
|
||||
* Get overall click summary stats
|
||||
*
|
||||
* Query params:
|
||||
* - state: Filter by store state
|
||||
* - days: Lookback window (default 30)
|
||||
*/
|
||||
router.get('/summary', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { state, days = '30' } = req.query;
|
||||
const daysNum = parseInt(days as string, 10) || 30;
|
||||
|
||||
const conditions: string[] = [`e.occurred_at >= NOW() - INTERVAL '${daysNum} days'`];
|
||||
const params: any[] = [];
|
||||
let paramIdx = 1;
|
||||
|
||||
if (state) {
|
||||
conditions.push(`d.state = $${paramIdx++}`);
|
||||
params.push(state);
|
||||
}
|
||||
|
||||
const whereClause = `WHERE ${conditions.join(' AND ')}`;
|
||||
|
||||
// Get overall stats
|
||||
const statsResult = await pool.query(`
|
||||
SELECT
|
||||
COUNT(*) as total_clicks,
|
||||
COUNT(DISTINCT e.product_id) as unique_products,
|
||||
COUNT(DISTINCT e.store_id) as unique_stores,
|
||||
COUNT(DISTINCT e.brand_id) FILTER (WHERE e.brand_id IS NOT NULL) as unique_brands,
|
||||
COUNT(*) FILTER (WHERE e.campaign_id IS NOT NULL) as campaign_clicks,
|
||||
COUNT(DISTINCT e.campaign_id) FILTER (WHERE e.campaign_id IS NOT NULL) as unique_campaigns
|
||||
FROM product_click_events e
|
||||
LEFT JOIN dispensaries d ON e.store_id::int = d.id
|
||||
${whereClause}
|
||||
`, params);
|
||||
|
||||
// Get clicks by action
|
||||
const actionResult = await pool.query(`
|
||||
SELECT
|
||||
action,
|
||||
COUNT(*) as count
|
||||
FROM product_click_events e
|
||||
LEFT JOIN dispensaries d ON e.store_id::int = d.id
|
||||
${whereClause}
|
||||
GROUP BY action
|
||||
ORDER BY count DESC
|
||||
`, params);
|
||||
|
||||
// Get clicks by day (last 14 days for chart)
|
||||
const dailyResult = await pool.query(`
|
||||
SELECT
|
||||
DATE(occurred_at) as date,
|
||||
COUNT(*) as clicks
|
||||
FROM product_click_events e
|
||||
LEFT JOIN dispensaries d ON e.store_id::int = d.id
|
||||
${whereClause}
|
||||
GROUP BY DATE(occurred_at)
|
||||
ORDER BY date DESC
|
||||
LIMIT 14
|
||||
`, params);
|
||||
|
||||
const stats = statsResult.rows[0];
|
||||
|
||||
res.json({
|
||||
filters: {
|
||||
state: state || null,
|
||||
days: daysNum
|
||||
},
|
||||
summary: {
|
||||
total_clicks: parseInt(stats.total_clicks, 10),
|
||||
unique_products: parseInt(stats.unique_products, 10),
|
||||
unique_stores: parseInt(stats.unique_stores, 10),
|
||||
unique_brands: parseInt(stats.unique_brands, 10),
|
||||
campaign_clicks: parseInt(stats.campaign_clicks, 10),
|
||||
unique_campaigns: parseInt(stats.unique_campaigns, 10)
|
||||
},
|
||||
by_action: actionResult.rows.map(row => ({
|
||||
action: row.action,
|
||||
count: parseInt(row.count, 10)
|
||||
})),
|
||||
daily: dailyResult.rows.map(row => ({
|
||||
date: row.date,
|
||||
clicks: parseInt(row.clicks, 10)
|
||||
})).reverse()
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[ClickAnalytics] Error fetching click summary:', error.message);
|
||||
res.status(500).json({ error: 'Failed to fetch click summary' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
378
backend/src/routes/consumer-alerts.ts
Normal file
378
backend/src/routes/consumer-alerts.ts
Normal file
@@ -0,0 +1,378 @@
|
||||
/**
|
||||
* Consumer Alerts API Routes
|
||||
* Handles price alerts for findagram.co and deal alerts for findadispo.com
|
||||
*/
|
||||
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { pool } from '../db/pool';
|
||||
import { authenticateConsumer } from './consumer-auth';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// All routes require authentication
|
||||
router.use(authenticateConsumer);
|
||||
|
||||
/**
|
||||
* GET /api/consumer/alerts
|
||||
* Get user's alerts
|
||||
*/
|
||||
router.get('/', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
|
||||
if (domain === 'findagram.co') {
|
||||
const result = await pool.query(
|
||||
`SELECT a.*,
|
||||
p.name as product_name,
|
||||
p.brand as product_brand,
|
||||
p.image_url as product_image,
|
||||
d.name as dispensary_name,
|
||||
d.dba_name,
|
||||
d.city as dispensary_city,
|
||||
d.state as dispensary_state,
|
||||
ls.price as current_price,
|
||||
ls.stock_status as current_stock_status
|
||||
FROM findagram_alerts a
|
||||
LEFT JOIN store_products p ON a.product_id = p.id
|
||||
LEFT JOIN dispensaries d ON a.dispensary_id = d.id
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT price, stock_status FROM store_product_snapshots
|
||||
WHERE product_id = a.product_id
|
||||
ORDER BY crawled_at DESC LIMIT 1
|
||||
) ls ON true
|
||||
WHERE a.user_id = $1
|
||||
ORDER BY a.created_at DESC`,
|
||||
[userId]
|
||||
);
|
||||
|
||||
res.json({
|
||||
alerts: result.rows.map(row => ({
|
||||
id: row.id,
|
||||
alertType: row.alert_type,
|
||||
productId: row.product_id,
|
||||
productName: row.product_name,
|
||||
productBrand: row.product_brand,
|
||||
productImage: row.product_image,
|
||||
dispensaryId: row.dispensary_id,
|
||||
dispensaryName: row.dba_name || row.dispensary_name,
|
||||
dispensaryCity: row.dispensary_city,
|
||||
dispensaryState: row.dispensary_state,
|
||||
brand: row.brand,
|
||||
category: row.category,
|
||||
targetPrice: row.target_price,
|
||||
currentPrice: row.current_price,
|
||||
currentStockStatus: row.current_stock_status,
|
||||
isActive: row.is_active,
|
||||
lastTriggeredAt: row.last_triggered_at,
|
||||
triggerCount: row.trigger_count,
|
||||
// Computed: is alert condition met?
|
||||
isTriggered: row.alert_type === 'price_drop' && row.current_price && row.target_price &&
|
||||
parseFloat(row.current_price) <= parseFloat(row.target_price),
|
||||
createdAt: row.created_at
|
||||
}))
|
||||
});
|
||||
} else if (domain === 'findadispo.com') {
|
||||
const result = await pool.query(
|
||||
`SELECT a.*,
|
||||
d.name as dispensary_name,
|
||||
d.dba_name,
|
||||
d.city as dispensary_city,
|
||||
d.state as dispensary_state
|
||||
FROM findadispo_alerts a
|
||||
LEFT JOIN dispensaries d ON a.dispensary_id = d.id
|
||||
WHERE a.user_id = $1
|
||||
ORDER BY a.created_at DESC`,
|
||||
[userId]
|
||||
);
|
||||
|
||||
res.json({
|
||||
alerts: result.rows.map(row => ({
|
||||
id: row.id,
|
||||
alertType: row.alert_type,
|
||||
dispensaryId: row.dispensary_id,
|
||||
dispensaryName: row.dba_name || row.dispensary_name,
|
||||
dispensaryCity: row.dispensary_city || row.city,
|
||||
dispensaryState: row.dispensary_state || row.state,
|
||||
city: row.city,
|
||||
state: row.state,
|
||||
isActive: row.is_active,
|
||||
lastTriggeredAt: row.last_triggered_at,
|
||||
triggerCount: row.trigger_count,
|
||||
createdAt: row.created_at
|
||||
}))
|
||||
});
|
||||
} else {
|
||||
res.status(400).json({ error: 'Invalid domain' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Consumer Alerts] Get error:', error);
|
||||
res.status(500).json({ error: 'Failed to get alerts' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/consumer/alerts
|
||||
* Create a new alert
|
||||
*/
|
||||
router.post('/', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
|
||||
if (domain === 'findagram.co') {
|
||||
const { alertType, productId, dispensaryId, brand, category, targetPrice } = req.body;
|
||||
|
||||
// Validate alert type
|
||||
const validTypes = ['price_drop', 'back_in_stock', 'product_on_special'];
|
||||
if (!validTypes.includes(alertType)) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid alert type',
|
||||
validTypes
|
||||
});
|
||||
}
|
||||
|
||||
// At least one target must be specified
|
||||
if (!productId && !brand && !category) {
|
||||
return res.status(400).json({
|
||||
error: 'Must specify at least one of: productId, brand, or category'
|
||||
});
|
||||
}
|
||||
|
||||
// Price drop alerts require target price
|
||||
if (alertType === 'price_drop' && !targetPrice) {
|
||||
return res.status(400).json({
|
||||
error: 'targetPrice required for price_drop alerts'
|
||||
});
|
||||
}
|
||||
|
||||
const result = await pool.query(
|
||||
`INSERT INTO findagram_alerts
|
||||
(user_id, alert_type, product_id, dispensary_id, brand, category, target_price, is_active)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, true)
|
||||
RETURNING id`,
|
||||
[userId, alertType, productId || null, dispensaryId || null, brand || null, category || null, targetPrice || null]
|
||||
);
|
||||
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
alertId: result.rows[0].id,
|
||||
message: 'Alert created'
|
||||
});
|
||||
|
||||
} else if (domain === 'findadispo.com') {
|
||||
const { alertType, dispensaryId, city, state } = req.body;
|
||||
|
||||
const validTypes = ['new_dispensary', 'deal_available'];
|
||||
if (!validTypes.includes(alertType)) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid alert type',
|
||||
validTypes
|
||||
});
|
||||
}
|
||||
|
||||
// Location alerts require city/state, dispensary alerts require dispensaryId
|
||||
if (alertType === 'new_dispensary' && !city && !state) {
|
||||
return res.status(400).json({
|
||||
error: 'city or state required for new_dispensary alerts'
|
||||
});
|
||||
}
|
||||
|
||||
if (alertType === 'deal_available' && !dispensaryId && !city && !state) {
|
||||
return res.status(400).json({
|
||||
error: 'dispensaryId, city, or state required for deal_available alerts'
|
||||
});
|
||||
}
|
||||
|
||||
const result = await pool.query(
|
||||
`INSERT INTO findadispo_alerts
|
||||
(user_id, alert_type, dispensary_id, city, state, is_active)
|
||||
VALUES ($1, $2, $3, $4, $5, true)
|
||||
RETURNING id`,
|
||||
[userId, alertType, dispensaryId || null, city || null, state || null]
|
||||
);
|
||||
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
alertId: result.rows[0].id,
|
||||
message: 'Alert created'
|
||||
});
|
||||
|
||||
} else {
|
||||
res.status(400).json({ error: 'Invalid domain' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Consumer Alerts] Create error:', error);
|
||||
res.status(500).json({ error: 'Failed to create alert' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* PUT /api/consumer/alerts/:id
|
||||
* Update an alert
|
||||
*/
|
||||
router.put('/:id', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
const alertId = parseInt(req.params.id);
|
||||
|
||||
if (isNaN(alertId)) {
|
||||
return res.status(400).json({ error: 'Invalid alert ID' });
|
||||
}
|
||||
|
||||
const { isActive, targetPrice } = req.body;
|
||||
|
||||
const table = domain === 'findagram.co' ? 'findagram_alerts' : 'findadispo_alerts';
|
||||
|
||||
// Build update query dynamically
|
||||
const updates: string[] = [];
|
||||
const values: any[] = [];
|
||||
let paramIndex = 1;
|
||||
|
||||
if (typeof isActive === 'boolean') {
|
||||
updates.push(`is_active = $${paramIndex++}`);
|
||||
values.push(isActive);
|
||||
}
|
||||
|
||||
if (domain === 'findagram.co' && targetPrice !== undefined) {
|
||||
updates.push(`target_price = $${paramIndex++}`);
|
||||
values.push(targetPrice);
|
||||
}
|
||||
|
||||
updates.push(`updated_at = NOW()`);
|
||||
|
||||
if (updates.length === 1) {
|
||||
return res.status(400).json({ error: 'No valid fields to update' });
|
||||
}
|
||||
|
||||
values.push(alertId, userId);
|
||||
|
||||
const result = await pool.query(
|
||||
`UPDATE ${table} SET ${updates.join(', ')}
|
||||
WHERE id = $${paramIndex++} AND user_id = $${paramIndex}
|
||||
RETURNING id`,
|
||||
values
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Alert not found' });
|
||||
}
|
||||
|
||||
res.json({ success: true, message: 'Alert updated' });
|
||||
} catch (error) {
|
||||
console.error('[Consumer Alerts] Update error:', error);
|
||||
res.status(500).json({ error: 'Failed to update alert' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* DELETE /api/consumer/alerts/:id
|
||||
* Delete an alert
|
||||
*/
|
||||
router.delete('/:id', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
const alertId = parseInt(req.params.id);
|
||||
|
||||
if (isNaN(alertId)) {
|
||||
return res.status(400).json({ error: 'Invalid alert ID' });
|
||||
}
|
||||
|
||||
const table = domain === 'findagram.co' ? 'findagram_alerts' : 'findadispo_alerts';
|
||||
|
||||
const result = await pool.query(
|
||||
`DELETE FROM ${table} WHERE id = $1 AND user_id = $2 RETURNING id`,
|
||||
[alertId, userId]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Alert not found' });
|
||||
}
|
||||
|
||||
res.json({ success: true, message: 'Alert deleted' });
|
||||
} catch (error) {
|
||||
console.error('[Consumer Alerts] Delete error:', error);
|
||||
res.status(500).json({ error: 'Failed to delete alert' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/consumer/alerts/:id/toggle
|
||||
* Toggle alert active status
|
||||
*/
|
||||
router.post('/:id/toggle', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
const alertId = parseInt(req.params.id);
|
||||
|
||||
if (isNaN(alertId)) {
|
||||
return res.status(400).json({ error: 'Invalid alert ID' });
|
||||
}
|
||||
|
||||
const table = domain === 'findagram.co' ? 'findagram_alerts' : 'findadispo_alerts';
|
||||
|
||||
const result = await pool.query(
|
||||
`UPDATE ${table}
|
||||
SET is_active = NOT is_active, updated_at = NOW()
|
||||
WHERE id = $1 AND user_id = $2
|
||||
RETURNING id, is_active`,
|
||||
[alertId, userId]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Alert not found' });
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
isActive: result.rows[0].is_active,
|
||||
message: result.rows[0].is_active ? 'Alert activated' : 'Alert deactivated'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[Consumer Alerts] Toggle error:', error);
|
||||
res.status(500).json({ error: 'Failed to toggle alert' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/consumer/alerts/stats
|
||||
* Get alert statistics for user
|
||||
*/
|
||||
router.get('/stats', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
|
||||
const table = domain === 'findagram.co' ? 'findagram_alerts' : 'findadispo_alerts';
|
||||
|
||||
const result = await pool.query(
|
||||
`SELECT
|
||||
COUNT(*) as total,
|
||||
COUNT(*) FILTER (WHERE is_active = true) as active,
|
||||
COUNT(*) FILTER (WHERE is_active = false) as inactive,
|
||||
SUM(trigger_count) as total_triggers,
|
||||
COUNT(*) FILTER (WHERE last_triggered_at > NOW() - INTERVAL '7 days') as triggered_this_week
|
||||
FROM ${table}
|
||||
WHERE user_id = $1`,
|
||||
[userId]
|
||||
);
|
||||
|
||||
const stats = result.rows[0];
|
||||
|
||||
res.json({
|
||||
total: parseInt(stats.total) || 0,
|
||||
active: parseInt(stats.active) || 0,
|
||||
inactive: parseInt(stats.inactive) || 0,
|
||||
totalTriggers: parseInt(stats.total_triggers) || 0,
|
||||
triggeredThisWeek: parseInt(stats.triggered_this_week) || 0
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[Consumer Alerts] Stats error:', error);
|
||||
res.status(500).json({ error: 'Failed to get alert stats' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
531
backend/src/routes/consumer-auth.ts
Normal file
531
backend/src/routes/consumer-auth.ts
Normal file
@@ -0,0 +1,531 @@
|
||||
/**
|
||||
* Consumer Authentication Routes
|
||||
* Handles registration, login, and verification for findadispo.com and findagram.co users
|
||||
*/
|
||||
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { pool } from '../db/pool';
|
||||
import bcrypt from 'bcryptjs';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import crypto from 'crypto';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// JWT secret - should be in env vars
|
||||
const JWT_SECRET = process.env.JWT_SECRET || 'consumer-jwt-secret-change-in-production';
|
||||
const JWT_EXPIRES_IN = '30d';
|
||||
|
||||
// SMS API configuration (sms.cannabrands.com)
|
||||
const SMS_API_URL = process.env.SMS_API_URL || 'https://sms.cannabrands.com/api';
|
||||
const SMS_API_KEY = process.env.SMS_API_KEY || '';
|
||||
|
||||
interface RegisterRequest {
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
email: string;
|
||||
password: string;
|
||||
phone?: string;
|
||||
city?: string;
|
||||
state?: string;
|
||||
notificationPreference?: 'email' | 'sms' | 'both';
|
||||
domain: 'findadispo.com' | 'findagram.co';
|
||||
}
|
||||
|
||||
interface LoginRequest {
|
||||
email: string;
|
||||
password: string;
|
||||
domain: 'findadispo.com' | 'findagram.co';
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/consumer/auth/register
|
||||
* Register a new consumer user
|
||||
*/
|
||||
router.post('/register', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
firstName,
|
||||
lastName,
|
||||
email,
|
||||
password,
|
||||
phone,
|
||||
city,
|
||||
state,
|
||||
notificationPreference = 'email',
|
||||
domain
|
||||
} = req.body as RegisterRequest;
|
||||
|
||||
// Validation
|
||||
if (!firstName || !lastName || !email || !password || !domain) {
|
||||
return res.status(400).json({
|
||||
error: 'Missing required fields',
|
||||
required: ['firstName', 'lastName', 'email', 'password', 'domain']
|
||||
});
|
||||
}
|
||||
|
||||
if (!['findadispo.com', 'findagram.co'].includes(domain)) {
|
||||
return res.status(400).json({ error: 'Invalid domain' });
|
||||
}
|
||||
|
||||
// Check if email already exists for this domain
|
||||
const existingUser = await pool.query(
|
||||
'SELECT id FROM users WHERE email = $1 AND domain = $2',
|
||||
[email.toLowerCase(), domain]
|
||||
);
|
||||
|
||||
if (existingUser.rows.length > 0) {
|
||||
return res.status(409).json({ error: 'Email already registered' });
|
||||
}
|
||||
|
||||
// Hash password
|
||||
const passwordHash = await bcrypt.hash(password, 12);
|
||||
|
||||
// Generate email verification token
|
||||
const emailVerificationToken = crypto.randomBytes(32).toString('hex');
|
||||
|
||||
// Insert user
|
||||
const userResult = await pool.query(
|
||||
`INSERT INTO users (
|
||||
email, password_hash, first_name, last_name, phone, city, state,
|
||||
domain, role, notification_preference,
|
||||
email_verification_token, email_verification_sent_at,
|
||||
created_at, updated_at
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, NOW(), NOW(), NOW())
|
||||
RETURNING id, email, first_name, last_name, phone, city, state, domain, notification_preference`,
|
||||
[
|
||||
email.toLowerCase(),
|
||||
passwordHash,
|
||||
firstName,
|
||||
lastName,
|
||||
phone || null,
|
||||
city || null,
|
||||
state || null,
|
||||
domain,
|
||||
'consumer',
|
||||
notificationPreference,
|
||||
emailVerificationToken
|
||||
]
|
||||
);
|
||||
|
||||
const user = userResult.rows[0];
|
||||
|
||||
// Create domain-specific profile
|
||||
if (domain === 'findagram.co') {
|
||||
await pool.query(
|
||||
`INSERT INTO findagram_users (user_id, preferred_city, preferred_state, created_at)
|
||||
VALUES ($1, $2, $3, NOW())`,
|
||||
[user.id, city || null, state || null]
|
||||
);
|
||||
} else if (domain === 'findadispo.com') {
|
||||
await pool.query(
|
||||
`INSERT INTO findadispo_users (user_id, preferred_city, preferred_state, created_at)
|
||||
VALUES ($1, $2, $3, NOW())`,
|
||||
[user.id, city || null, state || null]
|
||||
);
|
||||
}
|
||||
|
||||
// Generate JWT token
|
||||
const token = jwt.sign(
|
||||
{ userId: user.id, email: user.email, domain },
|
||||
JWT_SECRET,
|
||||
{ expiresIn: JWT_EXPIRES_IN }
|
||||
);
|
||||
|
||||
// TODO: Send email verification (integrate with SMTP service)
|
||||
// For now, just log it
|
||||
console.log(`[Consumer Auth] Verification email would be sent to ${email} with token ${emailVerificationToken}`);
|
||||
|
||||
// If phone provided, send SMS verification code
|
||||
if (phone) {
|
||||
const verificationCode = Math.floor(100000 + Math.random() * 900000).toString();
|
||||
await pool.query(
|
||||
'UPDATE users SET phone_verification_code = $1, phone_verification_sent_at = NOW() WHERE id = $2',
|
||||
[verificationCode, user.id]
|
||||
);
|
||||
|
||||
// TODO: Send SMS via sms.cannabrands.com
|
||||
console.log(`[Consumer Auth] SMS verification code ${verificationCode} would be sent to ${phone}`);
|
||||
}
|
||||
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
user: {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
firstName: user.first_name,
|
||||
lastName: user.last_name,
|
||||
phone: user.phone,
|
||||
city: user.city,
|
||||
state: user.state,
|
||||
domain: user.domain,
|
||||
notificationPreference: user.notification_preference,
|
||||
emailVerified: false,
|
||||
phoneVerified: false
|
||||
},
|
||||
token,
|
||||
message: 'Registration successful. Please verify your email.'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[Consumer Auth] Registration error:', error);
|
||||
res.status(500).json({ error: 'Registration failed' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/consumer/auth/login
|
||||
* Login a consumer user
|
||||
*/
|
||||
router.post('/login', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { email, password, domain } = req.body as LoginRequest;
|
||||
|
||||
if (!email || !password || !domain) {
|
||||
return res.status(400).json({
|
||||
error: 'Missing required fields',
|
||||
required: ['email', 'password', 'domain']
|
||||
});
|
||||
}
|
||||
|
||||
// Find user
|
||||
const userResult = await pool.query(
|
||||
`SELECT id, email, password_hash, first_name, last_name, phone, city, state,
|
||||
domain, notification_preference, email_verified, phone_verified
|
||||
FROM users
|
||||
WHERE email = $1 AND domain = $2 AND role = 'consumer'`,
|
||||
[email.toLowerCase(), domain]
|
||||
);
|
||||
|
||||
if (userResult.rows.length === 0) {
|
||||
return res.status(401).json({ error: 'Invalid email or password' });
|
||||
}
|
||||
|
||||
const user = userResult.rows[0];
|
||||
|
||||
// Verify password
|
||||
const isValidPassword = await bcrypt.compare(password, user.password_hash);
|
||||
if (!isValidPassword) {
|
||||
return res.status(401).json({ error: 'Invalid email or password' });
|
||||
}
|
||||
|
||||
// Generate JWT token
|
||||
const token = jwt.sign(
|
||||
{ userId: user.id, email: user.email, domain },
|
||||
JWT_SECRET,
|
||||
{ expiresIn: JWT_EXPIRES_IN }
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
user: {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
firstName: user.first_name,
|
||||
lastName: user.last_name,
|
||||
phone: user.phone,
|
||||
city: user.city,
|
||||
state: user.state,
|
||||
domain: user.domain,
|
||||
notificationPreference: user.notification_preference,
|
||||
emailVerified: user.email_verified,
|
||||
phoneVerified: user.phone_verified
|
||||
},
|
||||
token
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[Consumer Auth] Login error:', error);
|
||||
res.status(500).json({ error: 'Login failed' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/consumer/auth/verify-email
|
||||
* Verify email with token
|
||||
*/
|
||||
router.post('/verify-email', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { token } = req.body;
|
||||
|
||||
if (!token) {
|
||||
return res.status(400).json({ error: 'Verification token required' });
|
||||
}
|
||||
|
||||
const result = await pool.query(
|
||||
`UPDATE users
|
||||
SET email_verified = true,
|
||||
email_verification_token = NULL,
|
||||
updated_at = NOW()
|
||||
WHERE email_verification_token = $1
|
||||
RETURNING id, email, domain`,
|
||||
[token]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(400).json({ error: 'Invalid or expired verification token' });
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Email verified successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[Consumer Auth] Email verification error:', error);
|
||||
res.status(500).json({ error: 'Verification failed' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/consumer/auth/verify-phone
|
||||
* Verify phone with SMS code
|
||||
*/
|
||||
router.post('/verify-phone', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { email, code, domain } = req.body;
|
||||
|
||||
if (!email || !code || !domain) {
|
||||
return res.status(400).json({ error: 'Email, code, and domain required' });
|
||||
}
|
||||
|
||||
// Check if code is valid and not expired (15 minute window)
|
||||
const result = await pool.query(
|
||||
`UPDATE users
|
||||
SET phone_verified = true,
|
||||
phone_verification_code = NULL,
|
||||
updated_at = NOW()
|
||||
WHERE email = $1
|
||||
AND domain = $2
|
||||
AND phone_verification_code = $3
|
||||
AND phone_verification_sent_at > NOW() - INTERVAL '15 minutes'
|
||||
RETURNING id, email`,
|
||||
[email.toLowerCase(), domain, code]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(400).json({ error: 'Invalid or expired verification code' });
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Phone verified successfully'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[Consumer Auth] Phone verification error:', error);
|
||||
res.status(500).json({ error: 'Verification failed' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/consumer/auth/resend-email-verification
|
||||
* Resend email verification
|
||||
*/
|
||||
router.post('/resend-email-verification', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { email, domain } = req.body;
|
||||
|
||||
if (!email || !domain) {
|
||||
return res.status(400).json({ error: 'Email and domain required' });
|
||||
}
|
||||
|
||||
const newToken = crypto.randomBytes(32).toString('hex');
|
||||
|
||||
const result = await pool.query(
|
||||
`UPDATE users
|
||||
SET email_verification_token = $1,
|
||||
email_verification_sent_at = NOW()
|
||||
WHERE email = $2 AND domain = $3 AND email_verified = false
|
||||
RETURNING id, email`,
|
||||
[newToken, email.toLowerCase(), domain]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(400).json({ error: 'User not found or already verified' });
|
||||
}
|
||||
|
||||
// TODO: Send email
|
||||
console.log(`[Consumer Auth] Resending verification email to ${email} with token ${newToken}`);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Verification email sent'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[Consumer Auth] Resend verification error:', error);
|
||||
res.status(500).json({ error: 'Failed to resend verification' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/consumer/auth/resend-phone-verification
|
||||
* Resend SMS verification code
|
||||
*/
|
||||
router.post('/resend-phone-verification', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { email, domain } = req.body;
|
||||
|
||||
if (!email || !domain) {
|
||||
return res.status(400).json({ error: 'Email and domain required' });
|
||||
}
|
||||
|
||||
const verificationCode = Math.floor(100000 + Math.random() * 900000).toString();
|
||||
|
||||
const result = await pool.query(
|
||||
`UPDATE users
|
||||
SET phone_verification_code = $1,
|
||||
phone_verification_sent_at = NOW()
|
||||
WHERE email = $2 AND domain = $3 AND phone IS NOT NULL AND phone_verified = false
|
||||
RETURNING id, phone`,
|
||||
[verificationCode, email.toLowerCase(), domain]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(400).json({ error: 'User not found, no phone, or already verified' });
|
||||
}
|
||||
|
||||
// TODO: Send SMS via sms.cannabrands.com
|
||||
console.log(`[Consumer Auth] Resending SMS code ${verificationCode} to ${result.rows[0].phone}`);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Verification code sent'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[Consumer Auth] Resend phone verification error:', error);
|
||||
res.status(500).json({ error: 'Failed to resend verification' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/consumer/auth/me
|
||||
* Get current user profile (requires auth)
|
||||
*/
|
||||
router.get('/me', authenticateConsumer, async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
|
||||
const userResult = await pool.query(
|
||||
`SELECT id, email, first_name, last_name, phone, city, state,
|
||||
domain, notification_preference, email_verified, phone_verified
|
||||
FROM users WHERE id = $1`,
|
||||
[userId]
|
||||
);
|
||||
|
||||
if (userResult.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
const user = userResult.rows[0];
|
||||
|
||||
// Get domain-specific profile
|
||||
let profile = null;
|
||||
if (domain === 'findagram.co') {
|
||||
const profileResult = await pool.query(
|
||||
'SELECT * FROM findagram_users WHERE user_id = $1',
|
||||
[userId]
|
||||
);
|
||||
profile = profileResult.rows[0] || null;
|
||||
} else if (domain === 'findadispo.com') {
|
||||
const profileResult = await pool.query(
|
||||
'SELECT * FROM findadispo_users WHERE user_id = $1',
|
||||
[userId]
|
||||
);
|
||||
profile = profileResult.rows[0] || null;
|
||||
}
|
||||
|
||||
res.json({
|
||||
user: {
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
firstName: user.first_name,
|
||||
lastName: user.last_name,
|
||||
phone: user.phone,
|
||||
city: user.city,
|
||||
state: user.state,
|
||||
domain: user.domain,
|
||||
notificationPreference: user.notification_preference,
|
||||
emailVerified: user.email_verified,
|
||||
phoneVerified: user.phone_verified
|
||||
},
|
||||
profile
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[Consumer Auth] Get profile error:', error);
|
||||
res.status(500).json({ error: 'Failed to get profile' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* PUT /api/consumer/auth/me
|
||||
* Update current user profile
|
||||
*/
|
||||
router.put('/me', authenticateConsumer, async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
const { firstName, lastName, phone, city, state, notificationPreference } = req.body;
|
||||
|
||||
// Update user table
|
||||
await pool.query(
|
||||
`UPDATE users SET
|
||||
first_name = COALESCE($1, first_name),
|
||||
last_name = COALESCE($2, last_name),
|
||||
phone = COALESCE($3, phone),
|
||||
city = COALESCE($4, city),
|
||||
state = COALESCE($5, state),
|
||||
notification_preference = COALESCE($6, notification_preference),
|
||||
updated_at = NOW()
|
||||
WHERE id = $7`,
|
||||
[firstName, lastName, phone, city, state, notificationPreference, userId]
|
||||
);
|
||||
|
||||
// Update domain-specific profile
|
||||
if (domain === 'findagram.co') {
|
||||
await pool.query(
|
||||
`UPDATE findagram_users SET
|
||||
preferred_city = COALESCE($1, preferred_city),
|
||||
preferred_state = COALESCE($2, preferred_state),
|
||||
updated_at = NOW()
|
||||
WHERE user_id = $3`,
|
||||
[city, state, userId]
|
||||
);
|
||||
} else if (domain === 'findadispo.com') {
|
||||
await pool.query(
|
||||
`UPDATE findadispo_users SET
|
||||
preferred_city = COALESCE($1, preferred_city),
|
||||
preferred_state = COALESCE($2, preferred_state),
|
||||
updated_at = NOW()
|
||||
WHERE user_id = $3`,
|
||||
[city, state, userId]
|
||||
);
|
||||
}
|
||||
|
||||
res.json({ success: true, message: 'Profile updated' });
|
||||
} catch (error) {
|
||||
console.error('[Consumer Auth] Update profile error:', error);
|
||||
res.status(500).json({ error: 'Failed to update profile' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Middleware to authenticate consumer requests
|
||||
*/
|
||||
export function authenticateConsumer(req: Request, res: Response, next: Function) {
|
||||
const authHeader = req.headers.authorization;
|
||||
|
||||
if (!authHeader || !authHeader.startsWith('Bearer ')) {
|
||||
return res.status(401).json({ error: 'Authorization required' });
|
||||
}
|
||||
|
||||
const token = authHeader.substring(7);
|
||||
|
||||
try {
|
||||
const decoded = jwt.verify(token, JWT_SECRET) as { userId: number; email: string; domain: string };
|
||||
(req as any).userId = decoded.userId;
|
||||
(req as any).email = decoded.email;
|
||||
(req as any).domain = decoded.domain;
|
||||
next();
|
||||
} catch (error) {
|
||||
return res.status(401).json({ error: 'Invalid or expired token' });
|
||||
}
|
||||
}
|
||||
|
||||
export default router;
|
||||
409
backend/src/routes/consumer-deals.ts
Normal file
409
backend/src/routes/consumer-deals.ts
Normal file
@@ -0,0 +1,409 @@
|
||||
/**
|
||||
* Consumer Deals API Routes
|
||||
* Provides pre-calculated deals and specials for findagram.co and findadispo.com
|
||||
*/
|
||||
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { pool } from '../db/pool';
|
||||
|
||||
const router = Router();
|
||||
|
||||
/**
|
||||
* GET /api/v1/deals
|
||||
* Get products currently on special with calculated savings
|
||||
*
|
||||
* Query params:
|
||||
* - state: Filter by state (default: AZ)
|
||||
* - city: Filter by city
|
||||
* - dispensaryId: Filter by specific dispensary
|
||||
* - category: Filter by product category
|
||||
* - brand: Filter by brand
|
||||
* - minSavings: Minimum savings percentage (e.g., 20 for 20%)
|
||||
* - limit: Results per page (default: 50)
|
||||
* - offset: Pagination offset
|
||||
* - sortBy: 'savings_percent' | 'savings_amount' | 'price' | 'newest' (default: savings_percent)
|
||||
*/
|
||||
router.get('/', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
state = 'AZ',
|
||||
city,
|
||||
dispensaryId,
|
||||
category,
|
||||
brand,
|
||||
minSavings,
|
||||
limit = '50',
|
||||
offset = '0',
|
||||
sortBy = 'savings_percent'
|
||||
} = req.query;
|
||||
|
||||
const params: any[] = [];
|
||||
let paramIndex = 1;
|
||||
const conditions: string[] = [
|
||||
'sp.is_on_special = TRUE',
|
||||
'sp.is_in_stock = TRUE'
|
||||
];
|
||||
|
||||
// State filter
|
||||
if (state) {
|
||||
conditions.push(`d.state = $${paramIndex}`);
|
||||
params.push(state);
|
||||
paramIndex++;
|
||||
}
|
||||
|
||||
// City filter
|
||||
if (city) {
|
||||
conditions.push(`d.city = $${paramIndex}`);
|
||||
params.push(city);
|
||||
paramIndex++;
|
||||
}
|
||||
|
||||
// Dispensary filter
|
||||
if (dispensaryId) {
|
||||
conditions.push(`sp.dispensary_id = $${paramIndex}`);
|
||||
params.push(parseInt(dispensaryId as string));
|
||||
paramIndex++;
|
||||
}
|
||||
|
||||
// Category filter
|
||||
if (category) {
|
||||
conditions.push(`sp.category_raw ILIKE $${paramIndex}`);
|
||||
params.push(`%${category}%`);
|
||||
paramIndex++;
|
||||
}
|
||||
|
||||
// Brand filter
|
||||
if (brand) {
|
||||
conditions.push(`sp.brand_name_raw ILIKE $${paramIndex}`);
|
||||
params.push(`%${brand}%`);
|
||||
paramIndex++;
|
||||
}
|
||||
|
||||
// Min savings filter
|
||||
if (minSavings) {
|
||||
conditions.push(`sp.discount_percent >= $${paramIndex}`);
|
||||
params.push(parseFloat(minSavings as string));
|
||||
paramIndex++;
|
||||
}
|
||||
|
||||
// Build ORDER BY clause
|
||||
let orderBy = 'sp.discount_percent DESC NULLS LAST';
|
||||
switch (sortBy) {
|
||||
case 'savings_amount':
|
||||
orderBy = 'savings_amount DESC NULLS LAST';
|
||||
break;
|
||||
case 'price':
|
||||
orderBy = 'COALESCE(sp.price_rec_special, sp.price_rec) ASC NULLS LAST';
|
||||
break;
|
||||
case 'newest':
|
||||
orderBy = 'sp.updated_at DESC';
|
||||
break;
|
||||
case 'savings_percent':
|
||||
default:
|
||||
orderBy = 'sp.discount_percent DESC NULLS LAST';
|
||||
}
|
||||
|
||||
// Add pagination
|
||||
params.push(parseInt(limit as string));
|
||||
params.push(parseInt(offset as string));
|
||||
|
||||
const query = `
|
||||
SELECT
|
||||
sp.id,
|
||||
sp.dispensary_id,
|
||||
sp.name_raw as product_name,
|
||||
sp.brand_name_raw as brand,
|
||||
sp.category_raw as category,
|
||||
sp.subcategory_raw as subcategory,
|
||||
-- Pricing
|
||||
sp.price_rec as original_price,
|
||||
sp.price_rec_special as sale_price,
|
||||
sp.price_med as original_price_med,
|
||||
sp.price_med_special as sale_price_med,
|
||||
sp.is_on_special,
|
||||
sp.special_name,
|
||||
-- Calculated savings
|
||||
sp.discount_percent as savings_percent,
|
||||
CASE
|
||||
WHEN sp.price_rec IS NOT NULL AND sp.price_rec_special IS NOT NULL
|
||||
THEN sp.price_rec - sp.price_rec_special
|
||||
ELSE NULL
|
||||
END as savings_amount,
|
||||
-- Product info
|
||||
sp.thc_percent,
|
||||
sp.cbd_percent,
|
||||
sp.image_url,
|
||||
sp.stock_status,
|
||||
sp.provider_product_id,
|
||||
-- Dispensary info
|
||||
d.id as disp_id,
|
||||
COALESCE(d.dba_name, d.name) as dispensary_name,
|
||||
d.city as dispensary_city,
|
||||
d.state as dispensary_state,
|
||||
d.address as dispensary_address,
|
||||
d.slug as dispensary_slug,
|
||||
sp.updated_at as last_updated
|
||||
FROM store_products sp
|
||||
INNER JOIN dispensaries d ON sp.dispensary_id = d.id
|
||||
WHERE ${conditions.join(' AND ')}
|
||||
ORDER BY ${orderBy}
|
||||
LIMIT $${paramIndex} OFFSET $${paramIndex + 1}
|
||||
`;
|
||||
|
||||
const result = await pool.query(query, params);
|
||||
|
||||
// Get total count
|
||||
const countQuery = `
|
||||
SELECT COUNT(*) as total
|
||||
FROM store_products sp
|
||||
INNER JOIN dispensaries d ON sp.dispensary_id = d.id
|
||||
WHERE ${conditions.join(' AND ')}
|
||||
`;
|
||||
const countResult = await pool.query(countQuery, params.slice(0, -2));
|
||||
const total = parseInt(countResult.rows[0]?.total || '0');
|
||||
|
||||
res.json({
|
||||
deals: result.rows.map(row => ({
|
||||
id: row.id,
|
||||
productName: row.product_name,
|
||||
brand: row.brand,
|
||||
category: row.category,
|
||||
subcategory: row.subcategory,
|
||||
// Pricing with calculated savings
|
||||
originalPrice: parseFloat(row.original_price) || null,
|
||||
salePrice: parseFloat(row.sale_price) || null,
|
||||
originalPriceMed: parseFloat(row.original_price_med) || null,
|
||||
salePriceMed: parseFloat(row.sale_price_med) || null,
|
||||
savingsAmount: parseFloat(row.savings_amount) || null,
|
||||
savingsPercent: parseFloat(row.savings_percent) || null,
|
||||
specialName: row.special_name,
|
||||
// Product details
|
||||
thcPercent: parseFloat(row.thc_percent) || null,
|
||||
cbdPercent: parseFloat(row.cbd_percent) || null,
|
||||
imageUrl: row.image_url,
|
||||
stockStatus: row.stock_status,
|
||||
productId: row.provider_product_id,
|
||||
// Dispensary
|
||||
dispensary: {
|
||||
id: row.disp_id,
|
||||
name: row.dispensary_name,
|
||||
city: row.dispensary_city,
|
||||
state: row.dispensary_state,
|
||||
address: row.dispensary_address,
|
||||
slug: row.dispensary_slug
|
||||
},
|
||||
lastUpdated: row.last_updated
|
||||
})),
|
||||
pagination: {
|
||||
total,
|
||||
limit: parseInt(limit as string),
|
||||
offset: parseInt(offset as string),
|
||||
hasMore: parseInt(offset as string) + result.rows.length < total
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[Consumer Deals] Error:', error);
|
||||
res.status(500).json({ error: 'Failed to get deals' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/v1/deals/summary
|
||||
* Get summary statistics about current deals
|
||||
*/
|
||||
router.get('/summary', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { state = 'AZ' } = req.query;
|
||||
|
||||
const result = await pool.query(`
|
||||
SELECT
|
||||
COUNT(*) as total_deals,
|
||||
COUNT(DISTINCT sp.dispensary_id) as dispensaries_with_deals,
|
||||
COUNT(DISTINCT sp.brand_name_raw) as brands_on_sale,
|
||||
AVG(sp.discount_percent) as avg_discount,
|
||||
MAX(sp.discount_percent) as max_discount,
|
||||
MIN(sp.discount_percent) FILTER (WHERE sp.discount_percent > 0) as min_discount,
|
||||
SUM(CASE WHEN sp.discount_percent >= 20 THEN 1 ELSE 0 END) as deals_over_20_pct,
|
||||
SUM(CASE WHEN sp.discount_percent >= 30 THEN 1 ELSE 0 END) as deals_over_30_pct
|
||||
FROM store_products sp
|
||||
INNER JOIN dispensaries d ON sp.dispensary_id = d.id
|
||||
WHERE sp.is_on_special = TRUE
|
||||
AND sp.is_in_stock = TRUE
|
||||
AND d.state = $1
|
||||
`, [state]);
|
||||
|
||||
const stats = result.rows[0] || {};
|
||||
|
||||
// Get top categories with deals
|
||||
const categoriesResult = await pool.query(`
|
||||
SELECT sp.category_raw as category, COUNT(*) as deal_count
|
||||
FROM store_products sp
|
||||
INNER JOIN dispensaries d ON sp.dispensary_id = d.id
|
||||
WHERE sp.is_on_special = TRUE
|
||||
AND sp.is_in_stock = TRUE
|
||||
AND d.state = $1
|
||||
AND sp.category_raw IS NOT NULL
|
||||
GROUP BY sp.category_raw
|
||||
ORDER BY deal_count DESC
|
||||
LIMIT 5
|
||||
`, [state]);
|
||||
|
||||
res.json({
|
||||
totalDeals: parseInt(stats.total_deals) || 0,
|
||||
dispensariesWithDeals: parseInt(stats.dispensaries_with_deals) || 0,
|
||||
brandsOnSale: parseInt(stats.brands_on_sale) || 0,
|
||||
avgDiscount: parseFloat(stats.avg_discount)?.toFixed(1) || 0,
|
||||
maxDiscount: parseFloat(stats.max_discount)?.toFixed(1) || 0,
|
||||
minDiscount: parseFloat(stats.min_discount)?.toFixed(1) || 0,
|
||||
dealsOver20Pct: parseInt(stats.deals_over_20_pct) || 0,
|
||||
dealsOver30Pct: parseInt(stats.deals_over_30_pct) || 0,
|
||||
topCategories: categoriesResult.rows.map(r => ({
|
||||
category: r.category,
|
||||
dealCount: parseInt(r.deal_count)
|
||||
}))
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[Consumer Deals] Summary error:', error);
|
||||
res.status(500).json({ error: 'Failed to get deals summary' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/v1/deals/by-dispensary/:dispensaryId
|
||||
* Get all deals for a specific dispensary
|
||||
*/
|
||||
router.get('/by-dispensary/:dispensaryId', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const dispensaryId = parseInt(req.params.dispensaryId);
|
||||
const { limit = '100', offset = '0' } = req.query;
|
||||
|
||||
if (isNaN(dispensaryId)) {
|
||||
return res.status(400).json({ error: 'Invalid dispensary ID' });
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
SELECT
|
||||
sp.id,
|
||||
sp.name_raw as product_name,
|
||||
sp.brand_name_raw as brand,
|
||||
sp.category_raw as category,
|
||||
sp.price_rec as original_price,
|
||||
sp.price_rec_special as sale_price,
|
||||
sp.discount_percent as savings_percent,
|
||||
CASE
|
||||
WHEN sp.price_rec IS NOT NULL AND sp.price_rec_special IS NOT NULL
|
||||
THEN sp.price_rec - sp.price_rec_special
|
||||
ELSE NULL
|
||||
END as savings_amount,
|
||||
sp.special_name,
|
||||
sp.thc_percent,
|
||||
sp.image_url,
|
||||
sp.provider_product_id
|
||||
FROM store_products sp
|
||||
WHERE sp.dispensary_id = $1
|
||||
AND sp.is_on_special = TRUE
|
||||
AND sp.is_in_stock = TRUE
|
||||
ORDER BY sp.discount_percent DESC NULLS LAST
|
||||
LIMIT $2 OFFSET $3
|
||||
`, [dispensaryId, parseInt(limit as string), parseInt(offset as string)]);
|
||||
|
||||
// Get dispensary info
|
||||
const dispResult = await pool.query(`
|
||||
SELECT id, COALESCE(dba_name, name) as name, city, state, address, slug
|
||||
FROM dispensaries WHERE id = $1
|
||||
`, [dispensaryId]);
|
||||
|
||||
const dispensary = dispResult.rows[0];
|
||||
|
||||
res.json({
|
||||
dispensary: dispensary ? {
|
||||
id: dispensary.id,
|
||||
name: dispensary.name,
|
||||
city: dispensary.city,
|
||||
state: dispensary.state,
|
||||
address: dispensary.address,
|
||||
slug: dispensary.slug
|
||||
} : null,
|
||||
deals: result.rows.map(row => ({
|
||||
id: row.id,
|
||||
productName: row.product_name,
|
||||
brand: row.brand,
|
||||
category: row.category,
|
||||
originalPrice: parseFloat(row.original_price) || null,
|
||||
salePrice: parseFloat(row.sale_price) || null,
|
||||
savingsAmount: parseFloat(row.savings_amount) || null,
|
||||
savingsPercent: parseFloat(row.savings_percent) || null,
|
||||
specialName: row.special_name,
|
||||
thcPercent: parseFloat(row.thc_percent) || null,
|
||||
imageUrl: row.image_url,
|
||||
productId: row.provider_product_id
|
||||
})),
|
||||
totalDeals: result.rows.length
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[Consumer Deals] By dispensary error:', error);
|
||||
res.status(500).json({ error: 'Failed to get dispensary deals' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/v1/deals/featured
|
||||
* Get featured/best deals (highest savings, manually curated, etc.)
|
||||
*/
|
||||
router.get('/featured', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { state = 'AZ', limit = '10' } = req.query;
|
||||
|
||||
// Get top deals by savings percent, ensuring variety
|
||||
const result = await pool.query(`
|
||||
WITH ranked_deals AS (
|
||||
SELECT
|
||||
sp.*,
|
||||
d.id as disp_id,
|
||||
COALESCE(d.dba_name, d.name) as dispensary_name,
|
||||
d.city as dispensary_city,
|
||||
d.state as dispensary_state,
|
||||
d.slug as dispensary_slug,
|
||||
ROW_NUMBER() OVER (PARTITION BY sp.category_raw ORDER BY sp.discount_percent DESC) as category_rank
|
||||
FROM store_products sp
|
||||
INNER JOIN dispensaries d ON sp.dispensary_id = d.id
|
||||
WHERE sp.is_on_special = TRUE
|
||||
AND sp.is_in_stock = TRUE
|
||||
AND d.state = $1
|
||||
AND sp.discount_percent >= 15 -- Only significant discounts
|
||||
)
|
||||
SELECT *
|
||||
FROM ranked_deals
|
||||
WHERE category_rank <= 3 -- Top 3 per category for variety
|
||||
ORDER BY discount_percent DESC
|
||||
LIMIT $2
|
||||
`, [state, parseInt(limit as string)]);
|
||||
|
||||
res.json({
|
||||
featuredDeals: result.rows.map(row => ({
|
||||
id: row.id,
|
||||
productName: row.name_raw,
|
||||
brand: row.brand_name_raw,
|
||||
category: row.category_raw,
|
||||
originalPrice: parseFloat(row.price_rec) || null,
|
||||
salePrice: parseFloat(row.price_rec_special) || null,
|
||||
savingsPercent: parseFloat(row.discount_percent) || null,
|
||||
specialName: row.special_name,
|
||||
thcPercent: parseFloat(row.thc_percent) || null,
|
||||
imageUrl: row.image_url,
|
||||
dispensary: {
|
||||
id: row.disp_id,
|
||||
name: row.dispensary_name,
|
||||
city: row.dispensary_city,
|
||||
state: row.dispensary_state,
|
||||
slug: row.dispensary_slug
|
||||
}
|
||||
}))
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[Consumer Deals] Featured error:', error);
|
||||
res.status(500).json({ error: 'Failed to get featured deals' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
369
backend/src/routes/consumer-favorites.ts
Normal file
369
backend/src/routes/consumer-favorites.ts
Normal file
@@ -0,0 +1,369 @@
|
||||
/**
|
||||
* Consumer Favorites API Routes
|
||||
* Handles product/dispensary favorites for findadispo.com and findagram.co users
|
||||
*/
|
||||
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { pool } from '../db/pool';
|
||||
import { authenticateConsumer } from './consumer-auth';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// All routes require authentication
|
||||
router.use(authenticateConsumer);
|
||||
|
||||
/**
|
||||
* GET /api/consumer/favorites
|
||||
* Get user's favorites (products for findagram, dispensaries for findadispo)
|
||||
*/
|
||||
router.get('/', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
|
||||
if (domain === 'findagram.co') {
|
||||
// Product favorites
|
||||
const result = await pool.query(
|
||||
`SELECT f.*,
|
||||
p.name as current_name,
|
||||
p.brand as current_brand,
|
||||
ls.price as current_price,
|
||||
ls.stock_status as current_stock_status,
|
||||
d.name as dispensary_name,
|
||||
d.city as dispensary_city,
|
||||
d.state as dispensary_state
|
||||
FROM findagram_favorites f
|
||||
LEFT JOIN store_products p ON f.product_id = p.id
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT price, stock_status
|
||||
FROM store_product_snapshots
|
||||
WHERE product_id = f.product_id
|
||||
ORDER BY crawled_at DESC LIMIT 1
|
||||
) ls ON true
|
||||
LEFT JOIN dispensaries d ON f.dispensary_id = d.id
|
||||
WHERE f.user_id = $1
|
||||
ORDER BY f.created_at DESC`,
|
||||
[userId]
|
||||
);
|
||||
|
||||
res.json({
|
||||
favorites: result.rows.map(row => ({
|
||||
id: row.id,
|
||||
productId: row.product_id,
|
||||
dispensaryId: row.dispensary_id,
|
||||
// Saved snapshot
|
||||
savedName: row.product_name,
|
||||
savedBrand: row.product_brand,
|
||||
savedPrice: row.product_price,
|
||||
imageUrl: row.product_image_url,
|
||||
// Current data
|
||||
currentName: row.current_name || row.product_name,
|
||||
currentBrand: row.current_brand || row.product_brand,
|
||||
currentPrice: row.current_price,
|
||||
currentStockStatus: row.current_stock_status,
|
||||
// Dispensary info
|
||||
dispensaryName: row.dispensary_name,
|
||||
dispensaryCity: row.dispensary_city,
|
||||
dispensaryState: row.dispensary_state,
|
||||
// Price change detection
|
||||
priceChanged: row.current_price && row.product_price &&
|
||||
parseFloat(row.current_price) !== parseFloat(row.product_price),
|
||||
priceDrop: row.current_price && row.product_price &&
|
||||
parseFloat(row.current_price) < parseFloat(row.product_price),
|
||||
createdAt: row.created_at
|
||||
}))
|
||||
});
|
||||
} else if (domain === 'findadispo.com') {
|
||||
// Dispensary favorites
|
||||
const result = await pool.query(
|
||||
`SELECT f.*,
|
||||
d.name as current_name,
|
||||
d.dba_name,
|
||||
d.address,
|
||||
d.city as current_city,
|
||||
d.state as current_state,
|
||||
d.phone,
|
||||
d.website,
|
||||
d.menu_url,
|
||||
d.hours,
|
||||
d.latitude,
|
||||
d.longitude,
|
||||
(SELECT COUNT(*) FROM store_products WHERE dispensary_id = d.id AND stock_status = 'in_stock') as product_count
|
||||
FROM findadispo_favorites f
|
||||
LEFT JOIN dispensaries d ON f.dispensary_id = d.id
|
||||
WHERE f.user_id = $1
|
||||
ORDER BY f.created_at DESC`,
|
||||
[userId]
|
||||
);
|
||||
|
||||
res.json({
|
||||
favorites: result.rows.map(row => ({
|
||||
id: row.id,
|
||||
dispensaryId: row.dispensary_id,
|
||||
// Saved snapshot
|
||||
savedName: row.dispensary_name,
|
||||
savedCity: row.dispensary_city,
|
||||
savedState: row.dispensary_state,
|
||||
// Current data
|
||||
currentName: row.dba_name || row.current_name || row.dispensary_name,
|
||||
currentCity: row.current_city,
|
||||
currentState: row.current_state,
|
||||
address: row.address,
|
||||
phone: row.phone,
|
||||
website: row.website,
|
||||
menuUrl: row.menu_url,
|
||||
hours: row.hours,
|
||||
latitude: row.latitude,
|
||||
longitude: row.longitude,
|
||||
productCount: parseInt(row.product_count) || 0,
|
||||
createdAt: row.created_at
|
||||
}))
|
||||
});
|
||||
} else {
|
||||
res.status(400).json({ error: 'Invalid domain' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Consumer Favorites] Get error:', error);
|
||||
res.status(500).json({ error: 'Failed to get favorites' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/consumer/favorites
|
||||
* Add a favorite
|
||||
*/
|
||||
router.post('/', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
|
||||
if (domain === 'findagram.co') {
|
||||
const { productId, dispensaryId } = req.body;
|
||||
|
||||
if (!productId) {
|
||||
return res.status(400).json({ error: 'productId required' });
|
||||
}
|
||||
|
||||
// Get product details for snapshot
|
||||
const productResult = await pool.query(
|
||||
`SELECT p.name, p.brand, p.image_url,
|
||||
ls.price
|
||||
FROM store_products p
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT price FROM store_product_snapshots
|
||||
WHERE product_id = p.id ORDER BY crawled_at DESC LIMIT 1
|
||||
) ls ON true
|
||||
WHERE p.id = $1`,
|
||||
[productId]
|
||||
);
|
||||
|
||||
if (productResult.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Product not found' });
|
||||
}
|
||||
|
||||
const product = productResult.rows[0];
|
||||
|
||||
// Insert favorite (ON CONFLICT to handle duplicates)
|
||||
const result = await pool.query(
|
||||
`INSERT INTO findagram_favorites
|
||||
(user_id, product_id, dispensary_id, product_name, product_brand, product_price, product_image_url)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
ON CONFLICT (user_id, product_id) DO UPDATE SET
|
||||
product_price = EXCLUDED.product_price,
|
||||
created_at = findagram_favorites.created_at
|
||||
RETURNING id`,
|
||||
[userId, productId, dispensaryId, product.name, product.brand, product.price, product.image_url]
|
||||
);
|
||||
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
favoriteId: result.rows[0].id,
|
||||
message: 'Product added to favorites'
|
||||
});
|
||||
|
||||
} else if (domain === 'findadispo.com') {
|
||||
const { dispensaryId } = req.body;
|
||||
|
||||
if (!dispensaryId) {
|
||||
return res.status(400).json({ error: 'dispensaryId required' });
|
||||
}
|
||||
|
||||
// Get dispensary details for snapshot
|
||||
const dispResult = await pool.query(
|
||||
'SELECT name, dba_name, city, state FROM dispensaries WHERE id = $1',
|
||||
[dispensaryId]
|
||||
);
|
||||
|
||||
if (dispResult.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Dispensary not found' });
|
||||
}
|
||||
|
||||
const dispensary = dispResult.rows[0];
|
||||
|
||||
// Insert favorite
|
||||
const result = await pool.query(
|
||||
`INSERT INTO findadispo_favorites
|
||||
(user_id, dispensary_id, dispensary_name, dispensary_city, dispensary_state)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
ON CONFLICT (user_id, dispensary_id) DO UPDATE SET
|
||||
created_at = findadispo_favorites.created_at
|
||||
RETURNING id`,
|
||||
[userId, dispensaryId, dispensary.dba_name || dispensary.name, dispensary.city, dispensary.state]
|
||||
);
|
||||
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
favoriteId: result.rows[0].id,
|
||||
message: 'Dispensary added to favorites'
|
||||
});
|
||||
|
||||
} else {
|
||||
res.status(400).json({ error: 'Invalid domain' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Consumer Favorites] Add error:', error);
|
||||
res.status(500).json({ error: 'Failed to add favorite' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* DELETE /api/consumer/favorites/:id
|
||||
* Remove a favorite by ID
|
||||
*/
|
||||
router.delete('/:id', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
const favoriteId = parseInt(req.params.id);
|
||||
|
||||
if (isNaN(favoriteId)) {
|
||||
return res.status(400).json({ error: 'Invalid favorite ID' });
|
||||
}
|
||||
|
||||
const table = domain === 'findagram.co' ? 'findagram_favorites' : 'findadispo_favorites';
|
||||
|
||||
const result = await pool.query(
|
||||
`DELETE FROM ${table} WHERE id = $1 AND user_id = $2 RETURNING id`,
|
||||
[favoriteId, userId]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Favorite not found' });
|
||||
}
|
||||
|
||||
res.json({ success: true, message: 'Favorite removed' });
|
||||
} catch (error) {
|
||||
console.error('[Consumer Favorites] Delete error:', error);
|
||||
res.status(500).json({ error: 'Failed to remove favorite' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* DELETE /api/consumer/favorites/product/:productId
|
||||
* Remove a product favorite by product ID (findagram only)
|
||||
*/
|
||||
router.delete('/product/:productId', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
const productId = parseInt(req.params.productId);
|
||||
|
||||
if (domain !== 'findagram.co') {
|
||||
return res.status(400).json({ error: 'This endpoint is for findagram only' });
|
||||
}
|
||||
|
||||
if (isNaN(productId)) {
|
||||
return res.status(400).json({ error: 'Invalid product ID' });
|
||||
}
|
||||
|
||||
const result = await pool.query(
|
||||
'DELETE FROM findagram_favorites WHERE product_id = $1 AND user_id = $2 RETURNING id',
|
||||
[productId, userId]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Favorite not found' });
|
||||
}
|
||||
|
||||
res.json({ success: true, message: 'Product removed from favorites' });
|
||||
} catch (error) {
|
||||
console.error('[Consumer Favorites] Delete by product error:', error);
|
||||
res.status(500).json({ error: 'Failed to remove favorite' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* DELETE /api/consumer/favorites/dispensary/:dispensaryId
|
||||
* Remove a dispensary favorite by dispensary ID (findadispo only)
|
||||
*/
|
||||
router.delete('/dispensary/:dispensaryId', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
const dispensaryId = parseInt(req.params.dispensaryId);
|
||||
|
||||
if (domain !== 'findadispo.com') {
|
||||
return res.status(400).json({ error: 'This endpoint is for findadispo only' });
|
||||
}
|
||||
|
||||
if (isNaN(dispensaryId)) {
|
||||
return res.status(400).json({ error: 'Invalid dispensary ID' });
|
||||
}
|
||||
|
||||
const result = await pool.query(
|
||||
'DELETE FROM findadispo_favorites WHERE dispensary_id = $1 AND user_id = $2 RETURNING id',
|
||||
[dispensaryId, userId]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Favorite not found' });
|
||||
}
|
||||
|
||||
res.json({ success: true, message: 'Dispensary removed from favorites' });
|
||||
} catch (error) {
|
||||
console.error('[Consumer Favorites] Delete by dispensary error:', error);
|
||||
res.status(500).json({ error: 'Failed to remove favorite' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/consumer/favorites/check/:type/:id
|
||||
* Check if an item is favorited
|
||||
*/
|
||||
router.get('/check/:type/:id', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
const { type, id } = req.params;
|
||||
const itemId = parseInt(id);
|
||||
|
||||
if (isNaN(itemId)) {
|
||||
return res.status(400).json({ error: 'Invalid ID' });
|
||||
}
|
||||
|
||||
let isFavorited = false;
|
||||
|
||||
if (type === 'product' && domain === 'findagram.co') {
|
||||
const result = await pool.query(
|
||||
'SELECT id FROM findagram_favorites WHERE user_id = $1 AND product_id = $2',
|
||||
[userId, itemId]
|
||||
);
|
||||
isFavorited = result.rows.length > 0;
|
||||
} else if (type === 'dispensary' && domain === 'findadispo.com') {
|
||||
const result = await pool.query(
|
||||
'SELECT id FROM findadispo_favorites WHERE user_id = $1 AND dispensary_id = $2',
|
||||
[userId, itemId]
|
||||
);
|
||||
isFavorited = result.rows.length > 0;
|
||||
} else {
|
||||
return res.status(400).json({ error: 'Invalid type for this domain' });
|
||||
}
|
||||
|
||||
res.json({ isFavorited });
|
||||
} catch (error) {
|
||||
console.error('[Consumer Favorites] Check error:', error);
|
||||
res.status(500).json({ error: 'Failed to check favorite status' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
389
backend/src/routes/consumer-saved-searches.ts
Normal file
389
backend/src/routes/consumer-saved-searches.ts
Normal file
@@ -0,0 +1,389 @@
|
||||
/**
|
||||
* Consumer Saved Searches API Routes
|
||||
* Handles saved searches for findagram.co (products) and findadispo.com (dispensaries)
|
||||
*/
|
||||
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { pool } from '../db/pool';
|
||||
import { authenticateConsumer } from './consumer-auth';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// All routes require authentication
|
||||
router.use(authenticateConsumer);
|
||||
|
||||
/**
|
||||
* GET /api/consumer/saved-searches
|
||||
* Get user's saved searches
|
||||
*/
|
||||
router.get('/', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
|
||||
if (domain === 'findagram.co') {
|
||||
const result = await pool.query(
|
||||
`SELECT * FROM findagram_saved_searches
|
||||
WHERE user_id = $1
|
||||
ORDER BY created_at DESC`,
|
||||
[userId]
|
||||
);
|
||||
|
||||
res.json({
|
||||
savedSearches: result.rows.map(row => ({
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
query: row.query,
|
||||
category: row.category,
|
||||
brand: row.brand,
|
||||
strainType: row.strain_type,
|
||||
minPrice: row.min_price,
|
||||
maxPrice: row.max_price,
|
||||
minThc: row.min_thc,
|
||||
maxThc: row.max_thc,
|
||||
city: row.city,
|
||||
state: row.state,
|
||||
notifyOnNew: row.notify_on_new,
|
||||
notifyOnPriceDrop: row.notify_on_price_drop,
|
||||
createdAt: row.created_at,
|
||||
updatedAt: row.updated_at
|
||||
}))
|
||||
});
|
||||
} else if (domain === 'findadispo.com') {
|
||||
const result = await pool.query(
|
||||
`SELECT * FROM findadispo_saved_searches
|
||||
WHERE user_id = $1
|
||||
ORDER BY created_at DESC`,
|
||||
[userId]
|
||||
);
|
||||
|
||||
res.json({
|
||||
savedSearches: result.rows.map(row => ({
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
query: row.query,
|
||||
city: row.city,
|
||||
state: row.state,
|
||||
minRating: row.min_rating,
|
||||
maxDistance: row.max_distance,
|
||||
amenities: row.amenities || [],
|
||||
notifyOnNewDispensary: row.notify_on_new_dispensary,
|
||||
notifyOnDeals: row.notify_on_deals,
|
||||
createdAt: row.created_at,
|
||||
updatedAt: row.updated_at
|
||||
}))
|
||||
});
|
||||
} else {
|
||||
res.status(400).json({ error: 'Invalid domain' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Consumer Saved Searches] Get error:', error);
|
||||
res.status(500).json({ error: 'Failed to get saved searches' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/consumer/saved-searches
|
||||
* Create a saved search
|
||||
*/
|
||||
router.post('/', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
|
||||
if (domain === 'findagram.co') {
|
||||
const {
|
||||
name,
|
||||
query,
|
||||
category,
|
||||
brand,
|
||||
strainType,
|
||||
minPrice,
|
||||
maxPrice,
|
||||
minThc,
|
||||
maxThc,
|
||||
city,
|
||||
state,
|
||||
notifyOnNew = false,
|
||||
notifyOnPriceDrop = false
|
||||
} = req.body;
|
||||
|
||||
if (!name) {
|
||||
return res.status(400).json({ error: 'name is required' });
|
||||
}
|
||||
|
||||
const result = await pool.query(
|
||||
`INSERT INTO findagram_saved_searches
|
||||
(user_id, name, query, category, brand, strain_type, min_price, max_price,
|
||||
min_thc, max_thc, city, state, notify_on_new, notify_on_price_drop)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
|
||||
RETURNING id`,
|
||||
[
|
||||
userId, name, query || null, category || null, brand || null,
|
||||
strainType || null, minPrice || null, maxPrice || null,
|
||||
minThc || null, maxThc || null, city || null, state || null,
|
||||
notifyOnNew, notifyOnPriceDrop
|
||||
]
|
||||
);
|
||||
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
savedSearchId: result.rows[0].id,
|
||||
message: 'Search saved'
|
||||
});
|
||||
|
||||
} else if (domain === 'findadispo.com') {
|
||||
const {
|
||||
name,
|
||||
query,
|
||||
city,
|
||||
state,
|
||||
minRating,
|
||||
maxDistance,
|
||||
amenities,
|
||||
notifyOnNewDispensary = false,
|
||||
notifyOnDeals = false
|
||||
} = req.body;
|
||||
|
||||
if (!name) {
|
||||
return res.status(400).json({ error: 'name is required' });
|
||||
}
|
||||
|
||||
const result = await pool.query(
|
||||
`INSERT INTO findadispo_saved_searches
|
||||
(user_id, name, query, city, state, min_rating, max_distance, amenities,
|
||||
notify_on_new_dispensary, notify_on_deals)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
||||
RETURNING id`,
|
||||
[
|
||||
userId, name, query || null, city || null, state || null,
|
||||
minRating || null, maxDistance || null, amenities || null,
|
||||
notifyOnNewDispensary, notifyOnDeals
|
||||
]
|
||||
);
|
||||
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
savedSearchId: result.rows[0].id,
|
||||
message: 'Search saved'
|
||||
});
|
||||
|
||||
} else {
|
||||
res.status(400).json({ error: 'Invalid domain' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Consumer Saved Searches] Create error:', error);
|
||||
res.status(500).json({ error: 'Failed to save search' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* PUT /api/consumer/saved-searches/:id
|
||||
* Update a saved search
|
||||
*/
|
||||
router.put('/:id', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
const searchId = parseInt(req.params.id);
|
||||
|
||||
if (isNaN(searchId)) {
|
||||
return res.status(400).json({ error: 'Invalid search ID' });
|
||||
}
|
||||
|
||||
if (domain === 'findagram.co') {
|
||||
const {
|
||||
name,
|
||||
query,
|
||||
category,
|
||||
brand,
|
||||
strainType,
|
||||
minPrice,
|
||||
maxPrice,
|
||||
minThc,
|
||||
maxThc,
|
||||
city,
|
||||
state,
|
||||
notifyOnNew,
|
||||
notifyOnPriceDrop
|
||||
} = req.body;
|
||||
|
||||
const result = await pool.query(
|
||||
`UPDATE findagram_saved_searches SET
|
||||
name = COALESCE($1, name),
|
||||
query = COALESCE($2, query),
|
||||
category = COALESCE($3, category),
|
||||
brand = COALESCE($4, brand),
|
||||
strain_type = COALESCE($5, strain_type),
|
||||
min_price = COALESCE($6, min_price),
|
||||
max_price = COALESCE($7, max_price),
|
||||
min_thc = COALESCE($8, min_thc),
|
||||
max_thc = COALESCE($9, max_thc),
|
||||
city = COALESCE($10, city),
|
||||
state = COALESCE($11, state),
|
||||
notify_on_new = COALESCE($12, notify_on_new),
|
||||
notify_on_price_drop = COALESCE($13, notify_on_price_drop),
|
||||
updated_at = NOW()
|
||||
WHERE id = $14 AND user_id = $15
|
||||
RETURNING id`,
|
||||
[
|
||||
name, query, category, brand, strainType, minPrice, maxPrice,
|
||||
minThc, maxThc, city, state, notifyOnNew, notifyOnPriceDrop,
|
||||
searchId, userId
|
||||
]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Saved search not found' });
|
||||
}
|
||||
|
||||
res.json({ success: true, message: 'Search updated' });
|
||||
|
||||
} else if (domain === 'findadispo.com') {
|
||||
const {
|
||||
name,
|
||||
query,
|
||||
city,
|
||||
state,
|
||||
minRating,
|
||||
maxDistance,
|
||||
amenities,
|
||||
notifyOnNewDispensary,
|
||||
notifyOnDeals
|
||||
} = req.body;
|
||||
|
||||
const result = await pool.query(
|
||||
`UPDATE findadispo_saved_searches SET
|
||||
name = COALESCE($1, name),
|
||||
query = COALESCE($2, query),
|
||||
city = COALESCE($3, city),
|
||||
state = COALESCE($4, state),
|
||||
min_rating = COALESCE($5, min_rating),
|
||||
max_distance = COALESCE($6, max_distance),
|
||||
amenities = COALESCE($7, amenities),
|
||||
notify_on_new_dispensary = COALESCE($8, notify_on_new_dispensary),
|
||||
notify_on_deals = COALESCE($9, notify_on_deals),
|
||||
updated_at = NOW()
|
||||
WHERE id = $10 AND user_id = $11
|
||||
RETURNING id`,
|
||||
[
|
||||
name, query, city, state, minRating, maxDistance,
|
||||
amenities, notifyOnNewDispensary, notifyOnDeals,
|
||||
searchId, userId
|
||||
]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Saved search not found' });
|
||||
}
|
||||
|
||||
res.json({ success: true, message: 'Search updated' });
|
||||
|
||||
} else {
|
||||
res.status(400).json({ error: 'Invalid domain' });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Consumer Saved Searches] Update error:', error);
|
||||
res.status(500).json({ error: 'Failed to update search' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* DELETE /api/consumer/saved-searches/:id
|
||||
* Delete a saved search
|
||||
*/
|
||||
router.delete('/:id', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
const searchId = parseInt(req.params.id);
|
||||
|
||||
if (isNaN(searchId)) {
|
||||
return res.status(400).json({ error: 'Invalid search ID' });
|
||||
}
|
||||
|
||||
const table = domain === 'findagram.co' ? 'findagram_saved_searches' : 'findadispo_saved_searches';
|
||||
|
||||
const result = await pool.query(
|
||||
`DELETE FROM ${table} WHERE id = $1 AND user_id = $2 RETURNING id`,
|
||||
[searchId, userId]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Saved search not found' });
|
||||
}
|
||||
|
||||
res.json({ success: true, message: 'Search deleted' });
|
||||
} catch (error) {
|
||||
console.error('[Consumer Saved Searches] Delete error:', error);
|
||||
res.status(500).json({ error: 'Failed to delete search' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/consumer/saved-searches/:id/run
|
||||
* Execute a saved search and return results
|
||||
* This builds the search URL/params that the frontend can use
|
||||
*/
|
||||
router.post('/:id/run', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const userId = (req as any).userId;
|
||||
const domain = (req as any).domain;
|
||||
const searchId = parseInt(req.params.id);
|
||||
|
||||
if (isNaN(searchId)) {
|
||||
return res.status(400).json({ error: 'Invalid search ID' });
|
||||
}
|
||||
|
||||
const table = domain === 'findagram.co' ? 'findagram_saved_searches' : 'findadispo_saved_searches';
|
||||
|
||||
const result = await pool.query(
|
||||
`SELECT * FROM ${table} WHERE id = $1 AND user_id = $2`,
|
||||
[searchId, userId]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Saved search not found' });
|
||||
}
|
||||
|
||||
const search = result.rows[0];
|
||||
|
||||
// Build search parameters for frontend to use
|
||||
if (domain === 'findagram.co') {
|
||||
const params: Record<string, any> = {};
|
||||
if (search.query) params.q = search.query;
|
||||
if (search.category) params.category = search.category;
|
||||
if (search.brand) params.brand = search.brand;
|
||||
if (search.strain_type) params.strainType = search.strain_type;
|
||||
if (search.min_price) params.minPrice = search.min_price;
|
||||
if (search.max_price) params.maxPrice = search.max_price;
|
||||
if (search.min_thc) params.minThc = search.min_thc;
|
||||
if (search.max_thc) params.maxThc = search.max_thc;
|
||||
if (search.city) params.city = search.city;
|
||||
if (search.state) params.state = search.state;
|
||||
|
||||
res.json({
|
||||
searchParams: params,
|
||||
searchUrl: `/products?${new URLSearchParams(params as any).toString()}`
|
||||
});
|
||||
} else {
|
||||
const params: Record<string, any> = {};
|
||||
if (search.query) params.q = search.query;
|
||||
if (search.city) params.city = search.city;
|
||||
if (search.state) params.state = search.state;
|
||||
if (search.min_rating) params.minRating = search.min_rating;
|
||||
if (search.max_distance) params.maxDistance = search.max_distance;
|
||||
if (search.amenities?.length) params.amenities = search.amenities.join(',');
|
||||
|
||||
res.json({
|
||||
searchParams: params,
|
||||
searchUrl: `/?${new URLSearchParams(params as any).toString()}`
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Consumer Saved Searches] Run error:', error);
|
||||
res.status(500).json({ error: 'Failed to run search' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
@@ -6,48 +6,57 @@ const router = Router();
|
||||
router.use(authMiddleware);
|
||||
|
||||
// Get dashboard stats - uses consolidated dutchie-az DB
|
||||
// OPTIMIZED: Combined 4 sequential queries into 1 using CTEs
|
||||
router.get('/stats', async (req, res) => {
|
||||
try {
|
||||
// Store stats from dispensaries table in consolidated DB
|
||||
const dispensariesResult = await azQuery(`
|
||||
// All stats in a single query using CTEs
|
||||
const result = await azQuery(`
|
||||
WITH dispensary_stats AS (
|
||||
SELECT
|
||||
COUNT(*) as total,
|
||||
COUNT(*) FILTER (WHERE menu_type IS NOT NULL AND menu_type != 'unknown') as active,
|
||||
COUNT(*) FILTER (WHERE platform_dispensary_id IS NOT NULL) as with_platform_id,
|
||||
COUNT(*) FILTER (WHERE menu_url IS NOT NULL) as with_menu_url,
|
||||
MIN(last_crawled_at) as oldest_crawl,
|
||||
MAX(last_crawled_at) as latest_crawl
|
||||
FROM dispensaries
|
||||
),
|
||||
product_stats AS (
|
||||
SELECT
|
||||
COUNT(*) as total,
|
||||
COUNT(*) FILTER (WHERE stock_status = 'in_stock') as in_stock,
|
||||
COUNT(*) FILTER (WHERE primary_image_url IS NOT NULL) as with_images,
|
||||
COUNT(DISTINCT brand_name) FILTER (WHERE brand_name IS NOT NULL AND brand_name != '') as unique_brands,
|
||||
COUNT(DISTINCT dispensary_id) as dispensaries_with_products,
|
||||
COUNT(*) FILTER (WHERE created_at >= NOW() - INTERVAL '24 hours') as new_products_24h
|
||||
FROM dutchie_products
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) as total,
|
||||
COUNT(*) FILTER (WHERE menu_type IS NOT NULL AND menu_type != 'unknown') as active,
|
||||
COUNT(*) FILTER (WHERE platform_dispensary_id IS NOT NULL) as with_platform_id,
|
||||
COUNT(*) FILTER (WHERE menu_url IS NOT NULL) as with_menu_url,
|
||||
MIN(last_crawled_at) as oldest_crawl,
|
||||
MAX(last_crawled_at) as latest_crawl
|
||||
FROM dispensaries
|
||||
ds.total as store_total, ds.active as store_active,
|
||||
ds.with_platform_id as store_with_platform_id, ds.with_menu_url as store_with_menu_url,
|
||||
ds.oldest_crawl, ds.latest_crawl,
|
||||
ps.total as product_total, ps.in_stock as product_in_stock,
|
||||
ps.with_images as product_with_images, ps.unique_brands as product_unique_brands,
|
||||
ps.dispensaries_with_products, ps.new_products_24h
|
||||
FROM dispensary_stats ds, product_stats ps
|
||||
`);
|
||||
|
||||
// Product stats from dutchie_products table
|
||||
const productsResult = await azQuery(`
|
||||
SELECT
|
||||
COUNT(*) as total,
|
||||
COUNT(*) FILTER (WHERE stock_status = 'in_stock') as in_stock,
|
||||
COUNT(*) FILTER (WHERE primary_image_url IS NOT NULL) as with_images,
|
||||
COUNT(DISTINCT brand_name) FILTER (WHERE brand_name IS NOT NULL AND brand_name != '') as unique_brands,
|
||||
COUNT(DISTINCT dispensary_id) as dispensaries_with_products
|
||||
FROM dutchie_products
|
||||
`);
|
||||
|
||||
// Brand stats from dutchie_products
|
||||
const brandResult = await azQuery(`
|
||||
SELECT COUNT(DISTINCT brand_name) as total
|
||||
FROM dutchie_products
|
||||
WHERE brand_name IS NOT NULL AND brand_name != ''
|
||||
`);
|
||||
|
||||
// Recent products added (last 24 hours)
|
||||
const recentProductsResult = await azQuery(`
|
||||
SELECT COUNT(*) as new_products_24h
|
||||
FROM dutchie_products
|
||||
WHERE created_at >= NOW() - INTERVAL '24 hours'
|
||||
`);
|
||||
|
||||
// Combine results
|
||||
const storeStats = dispensariesResult.rows[0];
|
||||
const productStats = productsResult.rows[0];
|
||||
const stats = result.rows[0] || {};
|
||||
const storeStats = {
|
||||
total: stats.store_total,
|
||||
active: stats.store_active,
|
||||
with_platform_id: stats.store_with_platform_id,
|
||||
with_menu_url: stats.store_with_menu_url,
|
||||
oldest_crawl: stats.oldest_crawl,
|
||||
latest_crawl: stats.latest_crawl
|
||||
};
|
||||
const productStats = {
|
||||
total: stats.product_total,
|
||||
in_stock: stats.product_in_stock,
|
||||
with_images: stats.product_with_images,
|
||||
unique_brands: stats.product_unique_brands,
|
||||
dispensaries_with_products: stats.dispensaries_with_products
|
||||
};
|
||||
|
||||
res.json({
|
||||
stores: {
|
||||
@@ -66,11 +75,11 @@ router.get('/stats', async (req, res) => {
|
||||
dispensaries_with_products: parseInt(productStats.dispensaries_with_products) || 0
|
||||
},
|
||||
brands: {
|
||||
total: parseInt(brandResult.rows[0].total) || 0
|
||||
total: parseInt(productStats.unique_brands) || 0 // Same as unique_brands from product stats
|
||||
},
|
||||
campaigns: { total: 0, active: 0 }, // Legacy - no longer used
|
||||
clicks: { clicks_24h: 0 }, // Legacy - no longer used
|
||||
recent: recentProductsResult.rows[0]
|
||||
recent: { new_products_24h: parseInt(stats.new_products_24h) || 0 }
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching dashboard stats:', error);
|
||||
|
||||
209
backend/src/routes/events.ts
Normal file
209
backend/src/routes/events.ts
Normal file
@@ -0,0 +1,209 @@
|
||||
/**
|
||||
* Events API Routes - Product click tracking
|
||||
*
|
||||
* Tracks user interactions with products for analytics and campaign measurement.
|
||||
*
|
||||
* Endpoints:
|
||||
* POST /api/events/product-click - Record a product click event
|
||||
* GET /api/events/product-clicks - Get product click events (admin)
|
||||
*/
|
||||
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { pool } from '../db/pool';
|
||||
import { authMiddleware, optionalAuthMiddleware } from '../auth/middleware';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Valid action types
|
||||
const VALID_ACTIONS = ['view', 'open_store', 'open_product', 'compare', 'other'];
|
||||
|
||||
interface ProductClickEventPayload {
|
||||
product_id: string;
|
||||
store_id?: string;
|
||||
brand_id?: string;
|
||||
campaign_id?: string;
|
||||
action: 'view' | 'open_store' | 'open_product' | 'compare' | 'other';
|
||||
source: string;
|
||||
page_type?: string; // Page where event occurred (e.g., StoreDetailPage, BrandsIntelligence)
|
||||
url_path?: string; // URL path for debugging
|
||||
occurred_at?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/events/product-click
|
||||
* Record a product click event
|
||||
*
|
||||
* Fire-and-forget from frontend - returns quickly with minimal validation
|
||||
*/
|
||||
router.post('/product-click', optionalAuthMiddleware, async (req: Request, res: Response) => {
|
||||
try {
|
||||
const payload: ProductClickEventPayload = req.body;
|
||||
|
||||
// Basic validation
|
||||
if (!payload.product_id || typeof payload.product_id !== 'string') {
|
||||
return res.status(400).json({ status: 'error', error: 'product_id is required' });
|
||||
}
|
||||
|
||||
if (!payload.action || !VALID_ACTIONS.includes(payload.action)) {
|
||||
return res.status(400).json({
|
||||
status: 'error',
|
||||
error: `action must be one of: ${VALID_ACTIONS.join(', ')}`
|
||||
});
|
||||
}
|
||||
|
||||
if (!payload.source || typeof payload.source !== 'string') {
|
||||
return res.status(400).json({ status: 'error', error: 'source is required' });
|
||||
}
|
||||
|
||||
// Get user ID from auth context if available
|
||||
const userId = (req as any).user?.id || null;
|
||||
|
||||
// Get IP and user agent from request
|
||||
const ipAddress = req.ip || req.headers['x-forwarded-for'] || null;
|
||||
const userAgent = req.headers['user-agent'] || null;
|
||||
|
||||
// Parse occurred_at or use current time
|
||||
const occurredAt = payload.occurred_at ? new Date(payload.occurred_at) : new Date();
|
||||
|
||||
// Detect device type from user agent (simple heuristic)
|
||||
let deviceType = 'desktop';
|
||||
if (userAgent) {
|
||||
const ua = userAgent.toLowerCase();
|
||||
if (/mobile|android|iphone|ipad|ipod|blackberry|windows phone/i.test(ua)) {
|
||||
deviceType = /ipad|tablet/i.test(ua) ? 'tablet' : 'mobile';
|
||||
}
|
||||
}
|
||||
|
||||
// Insert the event with enhanced fields
|
||||
await pool.query(
|
||||
`INSERT INTO product_click_events
|
||||
(product_id, store_id, brand_id, campaign_id, action, source, user_id, ip_address, user_agent, occurred_at, event_type, page_type, url_path, device_type)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)`,
|
||||
[
|
||||
payload.product_id,
|
||||
payload.store_id || null,
|
||||
payload.brand_id || null,
|
||||
payload.campaign_id || null,
|
||||
payload.action,
|
||||
payload.source,
|
||||
userId,
|
||||
ipAddress,
|
||||
userAgent,
|
||||
occurredAt,
|
||||
'product_click', // event_type
|
||||
payload.page_type || null,
|
||||
payload.url_path || null,
|
||||
deviceType
|
||||
]
|
||||
);
|
||||
|
||||
res.json({ status: 'ok' });
|
||||
} catch (error: any) {
|
||||
console.error('[Events] Error recording product click:', error.message);
|
||||
// Still return ok to not break frontend - events are non-critical
|
||||
res.json({ status: 'ok' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/events/product-clicks
|
||||
* Get product click events (admin only)
|
||||
*
|
||||
* Query params:
|
||||
* - product_id: Filter by product
|
||||
* - store_id: Filter by store
|
||||
* - brand_id: Filter by brand
|
||||
* - campaign_id: Filter by campaign
|
||||
* - action: Filter by action type
|
||||
* - source: Filter by source
|
||||
* - from: Start date (ISO)
|
||||
* - to: End date (ISO)
|
||||
* - limit: Max results (default 100)
|
||||
* - offset: Pagination offset
|
||||
*/
|
||||
router.get('/product-clicks', authMiddleware, async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
product_id,
|
||||
store_id,
|
||||
brand_id,
|
||||
campaign_id,
|
||||
action,
|
||||
source,
|
||||
from,
|
||||
to,
|
||||
limit = '100',
|
||||
offset = '0'
|
||||
} = req.query;
|
||||
|
||||
const conditions: string[] = [];
|
||||
const params: any[] = [];
|
||||
let paramIndex = 1;
|
||||
|
||||
if (product_id) {
|
||||
conditions.push(`product_id = $${paramIndex++}`);
|
||||
params.push(product_id);
|
||||
}
|
||||
if (store_id) {
|
||||
conditions.push(`store_id = $${paramIndex++}`);
|
||||
params.push(store_id);
|
||||
}
|
||||
if (brand_id) {
|
||||
conditions.push(`brand_id = $${paramIndex++}`);
|
||||
params.push(brand_id);
|
||||
}
|
||||
if (campaign_id) {
|
||||
conditions.push(`campaign_id = $${paramIndex++}`);
|
||||
params.push(campaign_id);
|
||||
}
|
||||
if (action) {
|
||||
conditions.push(`action = $${paramIndex++}`);
|
||||
params.push(action);
|
||||
}
|
||||
if (source) {
|
||||
conditions.push(`source = $${paramIndex++}`);
|
||||
params.push(source);
|
||||
}
|
||||
if (from) {
|
||||
conditions.push(`occurred_at >= $${paramIndex++}`);
|
||||
params.push(new Date(from as string));
|
||||
}
|
||||
if (to) {
|
||||
conditions.push(`occurred_at <= $${paramIndex++}`);
|
||||
params.push(new Date(to as string));
|
||||
}
|
||||
|
||||
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
||||
|
||||
// Get total count
|
||||
const countResult = await pool.query(
|
||||
`SELECT COUNT(*) as total FROM product_click_events ${whereClause}`,
|
||||
params
|
||||
);
|
||||
const total = parseInt(countResult.rows[0].total, 10);
|
||||
|
||||
// Get events
|
||||
params.push(parseInt(limit as string, 10));
|
||||
params.push(parseInt(offset as string, 10));
|
||||
|
||||
const result = await pool.query(
|
||||
`SELECT * FROM product_click_events
|
||||
${whereClause}
|
||||
ORDER BY occurred_at DESC
|
||||
LIMIT $${paramIndex++} OFFSET $${paramIndex}`,
|
||||
params
|
||||
);
|
||||
|
||||
res.json({
|
||||
events: result.rows,
|
||||
total,
|
||||
limit: parseInt(limit as string, 10),
|
||||
offset: parseInt(offset as string, 10)
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[Events] Error fetching product clicks:', error.message);
|
||||
res.status(500).json({ error: 'Failed to fetch product click events' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
455
backend/src/routes/health.ts
Normal file
455
backend/src/routes/health.ts
Normal file
@@ -0,0 +1,455 @@
|
||||
/**
|
||||
* Health Check Routes
|
||||
*
|
||||
* Comprehensive health endpoints for monitoring API, DB, Redis, Workers, Crawls, and Analytics.
|
||||
*
|
||||
* Endpoints:
|
||||
* GET /api/health - Quick API health check
|
||||
* GET /api/health/db - Postgres health
|
||||
* GET /api/health/redis - Redis health
|
||||
* GET /api/health/workers - Queue and worker status
|
||||
* GET /api/health/crawls - Crawl activity summary
|
||||
* GET /api/health/analytics - Analytics/aggregates status
|
||||
* GET /api/health/full - Aggregated view of all subsystems
|
||||
*/
|
||||
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { getPool, healthCheck as dbHealthCheck } from '../dutchie-az/db/connection';
|
||||
import { getRedis } from '../lib/redis';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Read package version
|
||||
let packageVersion = '1.0.0';
|
||||
try {
|
||||
const packagePath = path.join(__dirname, '../../package.json');
|
||||
if (fs.existsSync(packagePath)) {
|
||||
const pkg = JSON.parse(fs.readFileSync(packagePath, 'utf8'));
|
||||
packageVersion = pkg.version || '1.0.0';
|
||||
}
|
||||
} catch {
|
||||
// Ignore errors reading package.json
|
||||
}
|
||||
|
||||
// Store server start time for uptime calculation
|
||||
const serverStartTime = Date.now();
|
||||
|
||||
// Types
|
||||
interface HealthStatus {
|
||||
status: 'ok' | 'degraded' | 'error' | 'stale';
|
||||
}
|
||||
|
||||
interface ApiHealth extends HealthStatus {
|
||||
uptime: number;
|
||||
timestamp: string;
|
||||
version: string;
|
||||
}
|
||||
|
||||
interface DbHealth extends HealthStatus {
|
||||
connected: boolean;
|
||||
latency_ms: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
interface RedisHealth extends HealthStatus {
|
||||
connected: boolean;
|
||||
latency_ms: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
interface QueueInfo {
|
||||
name: string;
|
||||
waiting: number;
|
||||
active: number;
|
||||
completed: number;
|
||||
failed: number;
|
||||
paused: boolean;
|
||||
}
|
||||
|
||||
interface WorkerInfo {
|
||||
id: string;
|
||||
queue: string;
|
||||
status: string;
|
||||
last_heartbeat?: string;
|
||||
}
|
||||
|
||||
interface WorkersHealth extends HealthStatus {
|
||||
queues: QueueInfo[];
|
||||
workers: WorkerInfo[];
|
||||
}
|
||||
|
||||
interface CrawlsHealth extends HealthStatus {
|
||||
last_run: string | null;
|
||||
runs_last_24h: number;
|
||||
stores_with_recent_crawl: number;
|
||||
stores_total: number;
|
||||
stale_stores: number;
|
||||
}
|
||||
|
||||
interface AnalyticsHealth extends HealthStatus {
|
||||
last_aggregate: string | null;
|
||||
daily_runs_last_7d: number;
|
||||
missing_days: number;
|
||||
}
|
||||
|
||||
interface FullHealth extends HealthStatus {
|
||||
api: ApiHealth;
|
||||
db: DbHealth;
|
||||
redis: RedisHealth;
|
||||
workers: WorkersHealth;
|
||||
crawls: CrawlsHealth;
|
||||
analytics: AnalyticsHealth;
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Helper Functions
|
||||
// ============================================================
|
||||
|
||||
async function getApiHealth(): Promise<ApiHealth> {
|
||||
return {
|
||||
status: 'ok',
|
||||
uptime: Math.floor((Date.now() - serverStartTime) / 1000),
|
||||
timestamp: new Date().toISOString(),
|
||||
version: packageVersion,
|
||||
};
|
||||
}
|
||||
|
||||
async function getDbHealth(): Promise<DbHealth> {
|
||||
const start = Date.now();
|
||||
try {
|
||||
const pool = getPool();
|
||||
await pool.query('SELECT 1');
|
||||
return {
|
||||
status: 'ok',
|
||||
connected: true,
|
||||
latency_ms: Date.now() - start,
|
||||
};
|
||||
} catch (err: any) {
|
||||
return {
|
||||
status: 'error',
|
||||
connected: false,
|
||||
latency_ms: Date.now() - start,
|
||||
error: err.message || 'Database connection failed',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function getRedisHealth(): Promise<RedisHealth> {
|
||||
const start = Date.now();
|
||||
|
||||
// Check if Redis is configured
|
||||
if (!process.env.REDIS_URL && !process.env.REDIS_HOST) {
|
||||
return {
|
||||
status: 'ok', // Redis is optional
|
||||
connected: false,
|
||||
latency_ms: 0,
|
||||
error: 'Redis not configured',
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const redis = getRedis();
|
||||
// Use a timeout to prevent hanging
|
||||
const pingPromise = redis.ping();
|
||||
const timeoutPromise = new Promise<never>((_, reject) =>
|
||||
setTimeout(() => reject(new Error('Redis ping timeout')), 3000)
|
||||
);
|
||||
|
||||
await Promise.race([pingPromise, timeoutPromise]);
|
||||
return {
|
||||
status: 'ok',
|
||||
connected: true,
|
||||
latency_ms: Date.now() - start,
|
||||
};
|
||||
} catch (err: any) {
|
||||
return {
|
||||
status: 'degraded',
|
||||
connected: false,
|
||||
latency_ms: Date.now() - start,
|
||||
error: err.message || 'Redis ping failed',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function getWorkersHealth(): Promise<WorkersHealth> {
|
||||
try {
|
||||
const pool = getPool();
|
||||
|
||||
// Get queue stats from v_queue_stats view or equivalent
|
||||
const queueStatsResult = await pool.query(`
|
||||
SELECT
|
||||
job_type as name,
|
||||
COUNT(*) FILTER (WHERE status = 'pending') as waiting,
|
||||
COUNT(*) FILTER (WHERE status = 'running') as active,
|
||||
COUNT(*) FILTER (WHERE status = 'success') as completed,
|
||||
COUNT(*) FILTER (WHERE status IN ('error', 'failed')) as failed,
|
||||
false as paused
|
||||
FROM dispensary_crawl_jobs
|
||||
WHERE created_at > NOW() - INTERVAL '7 days'
|
||||
GROUP BY job_type
|
||||
`);
|
||||
|
||||
const queues: QueueInfo[] = queueStatsResult.rows.map((row: any) => ({
|
||||
name: row.name || 'unknown',
|
||||
waiting: parseInt(row.waiting) || 0,
|
||||
active: parseInt(row.active) || 0,
|
||||
completed: parseInt(row.completed) || 0,
|
||||
failed: parseInt(row.failed) || 0,
|
||||
paused: row.paused || false,
|
||||
}));
|
||||
|
||||
// Get active workers from job_schedules or active heartbeats
|
||||
const workersResult = await pool.query(`
|
||||
SELECT
|
||||
COALESCE(job_config->>'worker_name', job_name) as id,
|
||||
job_name as queue,
|
||||
CASE WHEN enabled THEN 'connected' ELSE 'disconnected' END as status,
|
||||
last_run_at as last_heartbeat
|
||||
FROM job_schedules
|
||||
WHERE enabled = true
|
||||
ORDER BY last_run_at DESC NULLS LAST
|
||||
LIMIT 20
|
||||
`);
|
||||
|
||||
const workers: WorkerInfo[] = workersResult.rows.map((row: any) => ({
|
||||
id: row.id,
|
||||
queue: row.queue,
|
||||
status: row.status,
|
||||
last_heartbeat: row.last_heartbeat?.toISOString() || undefined,
|
||||
}));
|
||||
|
||||
// Determine overall status
|
||||
const hasActiveWorkers = workers.length > 0;
|
||||
const hasFailedJobs = queues.some((q) => q.failed > 0);
|
||||
const hasStuckJobs = queues.some((q) => q.active > 5); // Arbitrary threshold
|
||||
|
||||
let status: 'ok' | 'degraded' | 'error' = 'ok';
|
||||
if (!hasActiveWorkers) {
|
||||
status = 'degraded';
|
||||
} else if (hasFailedJobs || hasStuckJobs) {
|
||||
status = 'degraded';
|
||||
}
|
||||
|
||||
return {
|
||||
status,
|
||||
queues,
|
||||
workers,
|
||||
};
|
||||
} catch (err: any) {
|
||||
return {
|
||||
status: 'error',
|
||||
queues: [],
|
||||
workers: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function getCrawlsHealth(): Promise<CrawlsHealth> {
|
||||
try {
|
||||
const pool = getPool();
|
||||
|
||||
// Get crawl statistics
|
||||
const statsResult = await pool.query(`
|
||||
SELECT
|
||||
(SELECT MAX(completed_at) FROM dispensary_crawl_jobs WHERE status = 'success') as last_run,
|
||||
(SELECT COUNT(*) FROM dispensary_crawl_jobs WHERE status = 'success' AND completed_at > NOW() - INTERVAL '24 hours') as runs_24h,
|
||||
(SELECT COUNT(*) FROM dispensaries WHERE last_crawl_at > NOW() - INTERVAL '24 hours') as stores_recent,
|
||||
(SELECT COUNT(*) FROM dispensaries WHERE menu_type IS NOT NULL AND platform_dispensary_id IS NOT NULL) as stores_total,
|
||||
(SELECT COUNT(*) FROM dispensaries WHERE menu_type = 'dutchie' AND platform_dispensary_id IS NOT NULL AND (last_crawl_at IS NULL OR last_crawl_at < NOW() - INTERVAL '24 hours')) as stores_stale
|
||||
`);
|
||||
|
||||
const stats = statsResult.rows[0] || {};
|
||||
const storesTotal = parseInt(stats.stores_total) || 0;
|
||||
const storesRecent = parseInt(stats.stores_recent) || 0;
|
||||
const staleStores = parseInt(stats.stores_stale) || 0;
|
||||
|
||||
// Calculate freshness percentage
|
||||
const freshPercent = storesTotal > 0 ? (storesRecent / storesTotal) * 100 : 0;
|
||||
|
||||
let status: 'ok' | 'degraded' | 'stale' | 'error' = 'ok';
|
||||
if (freshPercent >= 90) {
|
||||
status = 'ok';
|
||||
} else if (freshPercent >= 50) {
|
||||
status = 'degraded';
|
||||
} else {
|
||||
status = 'stale';
|
||||
}
|
||||
|
||||
return {
|
||||
status,
|
||||
last_run: stats.last_run?.toISOString() || null,
|
||||
runs_last_24h: parseInt(stats.runs_24h) || 0,
|
||||
stores_with_recent_crawl: storesRecent,
|
||||
stores_total: storesTotal,
|
||||
stale_stores: staleStores,
|
||||
};
|
||||
} catch (err: any) {
|
||||
return {
|
||||
status: 'error',
|
||||
last_run: null,
|
||||
runs_last_24h: 0,
|
||||
stores_with_recent_crawl: 0,
|
||||
stores_total: 0,
|
||||
stale_stores: 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function getAnalyticsHealth(): Promise<AnalyticsHealth> {
|
||||
try {
|
||||
const pool = getPool();
|
||||
|
||||
// Check analytics/aggregate job runs
|
||||
const statsResult = await pool.query(`
|
||||
SELECT
|
||||
(SELECT MAX(completed_at) FROM job_run_logs WHERE job_name LIKE '%analytics%' AND status = 'success') as last_aggregate,
|
||||
(SELECT COUNT(DISTINCT DATE(started_at)) FROM job_run_logs WHERE job_name LIKE '%analytics%' AND status = 'success' AND started_at > NOW() - INTERVAL '7 days') as runs_7d
|
||||
`);
|
||||
|
||||
const stats = statsResult.rows[0] || {};
|
||||
const runsLast7d = parseInt(stats.runs_7d) || 0;
|
||||
const missingDays = Math.max(0, 7 - runsLast7d);
|
||||
|
||||
let status: 'ok' | 'degraded' | 'stale' | 'error' = 'ok';
|
||||
if (missingDays === 0) {
|
||||
status = 'ok';
|
||||
} else if (missingDays <= 2) {
|
||||
status = 'degraded';
|
||||
} else {
|
||||
status = 'stale';
|
||||
}
|
||||
|
||||
return {
|
||||
status,
|
||||
last_aggregate: stats.last_aggregate?.toISOString() || null,
|
||||
daily_runs_last_7d: runsLast7d,
|
||||
missing_days: missingDays,
|
||||
};
|
||||
} catch (err: any) {
|
||||
return {
|
||||
status: 'error',
|
||||
last_aggregate: null,
|
||||
daily_runs_last_7d: 0,
|
||||
missing_days: 7,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function determineOverallStatus(
|
||||
api: ApiHealth,
|
||||
db: DbHealth,
|
||||
redis: RedisHealth,
|
||||
workers: WorkersHealth,
|
||||
crawls: CrawlsHealth,
|
||||
analytics: AnalyticsHealth
|
||||
): 'ok' | 'degraded' | 'error' {
|
||||
const statuses = [api.status, db.status, redis.status, workers.status, crawls.status, analytics.status];
|
||||
|
||||
if (statuses.includes('error')) {
|
||||
return 'error';
|
||||
}
|
||||
if (statuses.includes('degraded') || statuses.includes('stale')) {
|
||||
return 'degraded';
|
||||
}
|
||||
return 'ok';
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Routes
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* GET /api/health - Quick API health check (no auth required)
|
||||
*/
|
||||
router.get('/', async (_req: Request, res: Response) => {
|
||||
const health = await getApiHealth();
|
||||
res.json(health);
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/health/db - Postgres health
|
||||
*/
|
||||
router.get('/db', async (_req: Request, res: Response) => {
|
||||
const health = await getDbHealth();
|
||||
const statusCode = health.status === 'ok' ? 200 : 503;
|
||||
res.status(statusCode).json(health);
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/health/redis - Redis health
|
||||
*/
|
||||
router.get('/redis', async (_req: Request, res: Response) => {
|
||||
const health = await getRedisHealth();
|
||||
const statusCode = health.status === 'ok' ? 200 : health.status === 'degraded' ? 200 : 503;
|
||||
res.status(statusCode).json(health);
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/health/workers - Queue and worker status
|
||||
*/
|
||||
router.get('/workers', async (_req: Request, res: Response) => {
|
||||
const health = await getWorkersHealth();
|
||||
const statusCode = health.status === 'ok' ? 200 : health.status === 'degraded' ? 200 : 503;
|
||||
res.status(statusCode).json(health);
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/health/crawls - Crawl activity summary
|
||||
*/
|
||||
router.get('/crawls', async (_req: Request, res: Response) => {
|
||||
const health = await getCrawlsHealth();
|
||||
const statusCode = health.status === 'ok' ? 200 : health.status === 'degraded' ? 200 : 503;
|
||||
res.status(statusCode).json(health);
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/health/analytics - Analytics/aggregates status
|
||||
*/
|
||||
router.get('/analytics', async (_req: Request, res: Response) => {
|
||||
const health = await getAnalyticsHealth();
|
||||
const statusCode = health.status === 'ok' ? 200 : health.status === 'degraded' ? 200 : 503;
|
||||
res.status(statusCode).json(health);
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/health/full - Aggregated view of all subsystems
|
||||
*/
|
||||
router.get('/full', async (_req: Request, res: Response) => {
|
||||
const [api, db, redis, workers, crawls, analytics] = await Promise.all([
|
||||
getApiHealth(),
|
||||
getDbHealth(),
|
||||
getRedisHealth(),
|
||||
getWorkersHealth(),
|
||||
getCrawlsHealth(),
|
||||
getAnalyticsHealth(),
|
||||
]);
|
||||
|
||||
const overallStatus = determineOverallStatus(api, db, redis, workers, crawls, analytics);
|
||||
|
||||
const fullHealth: FullHealth = {
|
||||
status: overallStatus,
|
||||
api,
|
||||
db,
|
||||
redis,
|
||||
workers,
|
||||
crawls,
|
||||
analytics,
|
||||
};
|
||||
|
||||
const statusCode = overallStatus === 'ok' ? 200 : overallStatus === 'degraded' ? 200 : 503;
|
||||
res.status(statusCode).json(fullHealth);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
// Export helper functions for reuse in other modules
|
||||
export {
|
||||
getApiHealth,
|
||||
getDbHealth,
|
||||
getRedisHealth,
|
||||
getWorkersHealth,
|
||||
getCrawlsHealth,
|
||||
getAnalyticsHealth,
|
||||
};
|
||||
@@ -269,13 +269,12 @@ router.get('/dispensaries/:id/profile', async (req: Request, res: Response) => {
|
||||
dcp.dispensary_id,
|
||||
dcp.profile_key,
|
||||
dcp.profile_name,
|
||||
dcp.platform,
|
||||
dcp.crawler_type,
|
||||
dcp.version,
|
||||
dcp.status,
|
||||
dcp.config,
|
||||
dcp.enabled,
|
||||
dcp.sandbox_attempt_count,
|
||||
dcp.next_retry_at,
|
||||
dcp.sandbox_attempts,
|
||||
dcp.created_at,
|
||||
dcp.updated_at,
|
||||
d.name as dispensary_name,
|
||||
@@ -318,13 +317,12 @@ router.get('/dispensaries/:id/profile', async (req: Request, res: Response) => {
|
||||
id: profile.id,
|
||||
profileKey: profile.profile_key,
|
||||
profileName: profile.profile_name,
|
||||
platform: profile.platform,
|
||||
crawlerType: profile.crawler_type,
|
||||
version: profile.version,
|
||||
status: profile.status || profile.config?.status || 'unknown',
|
||||
config: profile.config,
|
||||
enabled: profile.enabled,
|
||||
sandboxAttemptCount: profile.sandbox_attempt_count,
|
||||
nextRetryAt: profile.next_retry_at,
|
||||
sandboxAttempts: profile.sandbox_attempts || [],
|
||||
createdAt: profile.created_at,
|
||||
updatedAt: profile.updated_at,
|
||||
},
|
||||
@@ -349,7 +347,7 @@ router.get('/dispensaries/:id/crawler-module', async (req: Request, res: Respons
|
||||
|
||||
// Get the profile key for this dispensary
|
||||
const { rows } = await pool.query(`
|
||||
SELECT profile_key, platform
|
||||
SELECT profile_key, crawler_type
|
||||
FROM dispensary_crawler_profiles
|
||||
WHERE dispensary_id = $1 AND enabled = true
|
||||
ORDER BY updated_at DESC
|
||||
@@ -364,14 +362,14 @@ router.get('/dispensaries/:id/crawler-module', async (req: Request, res: Respons
|
||||
}
|
||||
|
||||
const profileKey = rows[0].profile_key;
|
||||
const platform = rows[0].platform || 'dutchie';
|
||||
const crawlerType = rows[0].crawler_type || 'dutchie';
|
||||
|
||||
// Construct file path
|
||||
const modulePath = path.join(
|
||||
__dirname,
|
||||
'..',
|
||||
'crawlers',
|
||||
platform,
|
||||
crawlerType,
|
||||
'stores',
|
||||
`${profileKey}.ts`
|
||||
);
|
||||
@@ -381,7 +379,7 @@ router.get('/dispensaries/:id/crawler-module', async (req: Request, res: Respons
|
||||
return res.status(404).json({
|
||||
error: `Crawler module file not found: ${profileKey}.ts`,
|
||||
hasModule: false,
|
||||
expectedPath: `crawlers/${platform}/stores/${profileKey}.ts`,
|
||||
expectedPath: `crawlers/${crawlerType}/stores/${profileKey}.ts`,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -391,9 +389,9 @@ router.get('/dispensaries/:id/crawler-module', async (req: Request, res: Respons
|
||||
res.json({
|
||||
hasModule: true,
|
||||
profileKey,
|
||||
platform,
|
||||
crawlerType,
|
||||
fileName: `${profileKey}.ts`,
|
||||
filePath: `crawlers/${platform}/stores/${profileKey}.ts`,
|
||||
filePath: `crawlers/${crawlerType}/stores/${profileKey}.ts`,
|
||||
content,
|
||||
lines: content.split('\n').length,
|
||||
});
|
||||
|
||||
@@ -301,10 +301,19 @@ function getScopedDispensaryId(req: PublicApiRequest): { dispensaryId: number |
|
||||
* Query params:
|
||||
* - category: Filter by product type (e.g., 'flower', 'edible')
|
||||
* - brand: Filter by brand name
|
||||
* - strain_type: Filter by strain type (indica, sativa, hybrid)
|
||||
* - min_price: Minimum price filter (in dollars)
|
||||
* - max_price: Maximum price filter (in dollars)
|
||||
* - min_thc: Minimum THC percentage filter
|
||||
* - max_thc: Maximum THC percentage filter
|
||||
* - on_special: Only return products on special (true/false)
|
||||
* - search: Search by name or brand
|
||||
* - in_stock_only: Only return in-stock products (default: false)
|
||||
* - limit: Max products to return (default: 100, max: 500)
|
||||
* - offset: Pagination offset (default: 0)
|
||||
* - dispensary_id: (internal keys only) Filter by specific dispensary
|
||||
* - sort_by: Sort field (name, price, thc, updated) (default: name)
|
||||
* - sort_dir: Sort direction (asc, desc) (default: asc)
|
||||
*/
|
||||
router.get('/products', async (req: PublicApiRequest, res: Response) => {
|
||||
try {
|
||||
@@ -322,9 +331,18 @@ router.get('/products', async (req: PublicApiRequest, res: Response) => {
|
||||
const {
|
||||
category,
|
||||
brand,
|
||||
strain_type,
|
||||
min_price,
|
||||
max_price,
|
||||
min_thc,
|
||||
max_thc,
|
||||
on_special,
|
||||
search,
|
||||
in_stock_only = 'false',
|
||||
limit = '100',
|
||||
offset = '0'
|
||||
offset = '0',
|
||||
sort_by = 'name',
|
||||
sort_dir = 'asc'
|
||||
} = req.query;
|
||||
|
||||
// Build query
|
||||
@@ -364,12 +382,63 @@ router.get('/products', async (req: PublicApiRequest, res: Response) => {
|
||||
paramIndex++;
|
||||
}
|
||||
|
||||
// Filter by strain type (indica, sativa, hybrid)
|
||||
if (strain_type) {
|
||||
whereClause += ` AND LOWER(p.strain_type) = LOWER($${paramIndex})`;
|
||||
params.push(strain_type);
|
||||
paramIndex++;
|
||||
}
|
||||
|
||||
// Filter by THC range
|
||||
if (min_thc) {
|
||||
whereClause += ` AND CAST(NULLIF(p.thc, '') AS NUMERIC) >= $${paramIndex}`;
|
||||
params.push(parseFloat(min_thc as string));
|
||||
paramIndex++;
|
||||
}
|
||||
if (max_thc) {
|
||||
whereClause += ` AND CAST(NULLIF(p.thc, '') AS NUMERIC) <= $${paramIndex}`;
|
||||
params.push(parseFloat(max_thc as string));
|
||||
paramIndex++;
|
||||
}
|
||||
|
||||
// Filter by on special
|
||||
if (on_special === 'true' || on_special === '1') {
|
||||
whereClause += ` AND s.special = TRUE`;
|
||||
}
|
||||
|
||||
// Search by name or brand
|
||||
if (search) {
|
||||
whereClause += ` AND (LOWER(p.name) LIKE LOWER($${paramIndex}) OR LOWER(p.brand_name) LIKE LOWER($${paramIndex}))`;
|
||||
params.push(`%${search}%`);
|
||||
paramIndex++;
|
||||
}
|
||||
|
||||
// Enforce limits
|
||||
const limitNum = Math.min(parseInt(limit as string, 10) || 100, 500);
|
||||
const offsetNum = parseInt(offset as string, 10) || 0;
|
||||
|
||||
// Build ORDER BY clause
|
||||
const sortDirection = sort_dir === 'desc' ? 'DESC' : 'ASC';
|
||||
let orderBy = 'p.name ASC';
|
||||
switch (sort_by) {
|
||||
case 'price':
|
||||
orderBy = `s.rec_min_price_cents ${sortDirection} NULLS LAST`;
|
||||
break;
|
||||
case 'thc':
|
||||
orderBy = `CAST(NULLIF(p.thc, '') AS NUMERIC) ${sortDirection} NULLS LAST`;
|
||||
break;
|
||||
case 'updated':
|
||||
orderBy = `p.updated_at ${sortDirection}`;
|
||||
break;
|
||||
case 'name':
|
||||
default:
|
||||
orderBy = `p.name ${sortDirection}`;
|
||||
}
|
||||
|
||||
params.push(limitNum, offsetNum);
|
||||
|
||||
// Query products with latest snapshot data
|
||||
// Note: Price filters use HAVING clause since they reference the snapshot subquery
|
||||
const { rows: products } = await dutchieAzQuery(`
|
||||
SELECT
|
||||
p.id,
|
||||
@@ -406,13 +475,24 @@ router.get('/products', async (req: PublicApiRequest, res: Response) => {
|
||||
LIMIT 1
|
||||
) s ON true
|
||||
${whereClause}
|
||||
ORDER BY p.name ASC
|
||||
${min_price ? `AND (s.rec_min_price_cents / 100.0) >= ${parseFloat(min_price as string)}` : ''}
|
||||
${max_price ? `AND (s.rec_min_price_cents / 100.0) <= ${parseFloat(max_price as string)}` : ''}
|
||||
ORDER BY ${orderBy}
|
||||
LIMIT $${paramIndex} OFFSET $${paramIndex + 1}
|
||||
`, params);
|
||||
|
||||
// Get total count for pagination
|
||||
// Get total count for pagination (include price filters if specified)
|
||||
const { rows: countRows } = await dutchieAzQuery(`
|
||||
SELECT COUNT(*) as total FROM dutchie_products p ${whereClause}
|
||||
SELECT COUNT(*) as total FROM dutchie_products p
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT rec_min_price_cents, special FROM dutchie_product_snapshots
|
||||
WHERE dutchie_product_id = p.id
|
||||
ORDER BY crawled_at DESC
|
||||
LIMIT 1
|
||||
) s ON true
|
||||
${whereClause}
|
||||
${min_price ? `AND (s.rec_min_price_cents / 100.0) >= ${parseFloat(min_price as string)}` : ''}
|
||||
${max_price ? `AND (s.rec_min_price_cents / 100.0) <= ${parseFloat(max_price as string)}` : ''}
|
||||
`, params.slice(0, -2));
|
||||
|
||||
// Transform products to backward-compatible format
|
||||
|
||||
238
backend/src/routes/seo.ts
Normal file
238
backend/src/routes/seo.ts
Normal file
@@ -0,0 +1,238 @@
|
||||
/**
|
||||
* SEO API Routes - Content generation and management for CannaiQ marketing pages
|
||||
*
|
||||
* All content returned by these endpoints is sanitized to ensure
|
||||
* enterprise-safe phrasing with no forbidden terminology.
|
||||
*/
|
||||
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { getPool } from '../db/pool';
|
||||
import { authMiddleware } from '../auth/middleware';
|
||||
import { ContentValidator } from '../utils/ContentValidator';
|
||||
import { generateSeoPageWithClaude } from '../services/seoGenerator';
|
||||
|
||||
const router = Router();
|
||||
|
||||
/**
|
||||
* GET /api/seo/page - Get SEO page content by slug (public, sanitized)
|
||||
*/
|
||||
router.get('/page', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { slug } = req.query;
|
||||
if (!slug || typeof slug !== 'string') {
|
||||
return res.status(400).json({ error: 'slug query parameter required' });
|
||||
}
|
||||
|
||||
const pool = getPool();
|
||||
const result = await pool.query(
|
||||
`SELECT id, slug, type, meta_title, meta_description, status, updated_at
|
||||
FROM seo_pages WHERE slug = $1 AND status = 'live'`,
|
||||
[slug]
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Page not found' });
|
||||
}
|
||||
|
||||
const page = result.rows[0];
|
||||
// Always sanitize content before returning (safety net)
|
||||
const content = {
|
||||
metaTitle: page.meta_title,
|
||||
metaDescription: page.meta_description,
|
||||
};
|
||||
const sanitizedContent = ContentValidator.sanitizeContent(content);
|
||||
|
||||
res.json({
|
||||
id: page.id,
|
||||
slug: page.slug,
|
||||
type: page.type,
|
||||
content: sanitizedContent,
|
||||
updatedAt: page.updated_at,
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[SEO] Error fetching page:', error.message);
|
||||
res.status(500).json({ error: 'Failed to fetch SEO page' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/seo/page - Create/update SEO page (admin, auto-sanitizes)
|
||||
*/
|
||||
router.post('/page', authMiddleware, async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { slug, type, metaTitle, metaDescription, status = 'draft' } = req.body;
|
||||
if (!slug || !type) {
|
||||
return res.status(400).json({ error: 'slug and type required' });
|
||||
}
|
||||
|
||||
// Validate and sanitize content
|
||||
const content = { metaTitle, metaDescription };
|
||||
const validation = ContentValidator.validate(content);
|
||||
if (!validation.valid) {
|
||||
console.warn(`[SEO] Forbidden terms sanitized for ${slug}:`, validation.forbiddenTerms);
|
||||
}
|
||||
|
||||
const sanitized = validation.sanitized as { metaTitle?: string; metaDescription?: string };
|
||||
const pool = getPool();
|
||||
|
||||
// Always store sanitized content
|
||||
const result = await pool.query(
|
||||
`INSERT INTO seo_pages (slug, type, page_key, meta_title, meta_description, status, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, NOW())
|
||||
ON CONFLICT (slug) DO UPDATE SET
|
||||
type = EXCLUDED.type, meta_title = EXCLUDED.meta_title,
|
||||
meta_description = EXCLUDED.meta_description,
|
||||
status = EXCLUDED.status, updated_at = NOW()
|
||||
RETURNING id, slug, type, status, updated_at`,
|
||||
[slug, type, slug, sanitized.metaTitle || null, sanitized.metaDescription || null, status]
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
page: result.rows[0],
|
||||
sanitized: !validation.valid,
|
||||
forbiddenTermsRemoved: validation.forbiddenTerms,
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[SEO] Error saving page:', error.message);
|
||||
res.status(500).json({ error: 'Failed to save SEO page' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/seo/validate - Validate content for forbidden terms
|
||||
*/
|
||||
router.post('/validate', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { content } = req.body;
|
||||
if (!content) {
|
||||
return res.status(400).json({ error: 'content is required' });
|
||||
}
|
||||
|
||||
const validation = ContentValidator.validate(content);
|
||||
res.json({
|
||||
valid: validation.valid,
|
||||
forbiddenTerms: validation.forbiddenTerms,
|
||||
sanitized: validation.sanitized,
|
||||
approvedPhrases: ContentValidator.getApprovedPhrases(),
|
||||
});
|
||||
} catch (error: any) {
|
||||
res.status(500).json({ error: 'Failed to validate content' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/seo/state/:stateCode - State SEO data with metrics
|
||||
*/
|
||||
router.get('/state/:stateCode', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { stateCode } = req.params;
|
||||
const code = stateCode.toUpperCase();
|
||||
const pool = getPool();
|
||||
|
||||
const metricsResult = await pool.query(`
|
||||
SELECT COUNT(DISTINCT d.id) as dispensary_count,
|
||||
COUNT(DISTINCT p.id) as product_count,
|
||||
COUNT(DISTINCT p.brand_name) as brand_count
|
||||
FROM dispensaries d
|
||||
LEFT JOIN dutchie_products p ON p.dispensary_id = d.id
|
||||
WHERE d.state = $1`, [code]);
|
||||
|
||||
const brandsResult = await pool.query(`
|
||||
SELECT brand_name, COUNT(*) as product_count
|
||||
FROM dutchie_products p JOIN dispensaries d ON p.dispensary_id = d.id
|
||||
WHERE d.state = $1 AND p.brand_name IS NOT NULL
|
||||
GROUP BY brand_name ORDER BY product_count DESC LIMIT 10`, [code]);
|
||||
|
||||
const metrics = metricsResult.rows[0];
|
||||
const response = ContentValidator.sanitizeContent({
|
||||
stateCode: code,
|
||||
metrics: {
|
||||
dispensaryCount: parseInt(metrics.dispensary_count, 10) || 0,
|
||||
productCount: parseInt(metrics.product_count, 10) || 0,
|
||||
brandCount: parseInt(metrics.brand_count, 10) || 0,
|
||||
},
|
||||
topBrands: brandsResult.rows.map(r => ({
|
||||
name: r.brand_name,
|
||||
productCount: parseInt(r.product_count, 10),
|
||||
})),
|
||||
});
|
||||
|
||||
res.json(response);
|
||||
} catch (error: any) {
|
||||
console.error('[SEO] Error fetching state data:', error.message);
|
||||
res.status(500).json({ error: 'Failed to fetch state SEO data' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/seo/pages/:id/generate - Generate SEO content for a page
|
||||
*/
|
||||
router.post('/pages/:id/generate', authMiddleware, async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const pageId = parseInt(id, 10);
|
||||
if (isNaN(pageId)) {
|
||||
return res.status(400).json({ error: 'Invalid page ID' });
|
||||
}
|
||||
|
||||
const content = await generateSeoPageWithClaude(pageId);
|
||||
res.json({ success: true, content });
|
||||
} catch (error: any) {
|
||||
console.error('[SEO] Error generating page:', error.message);
|
||||
res.status(500).json({ error: error.message || 'Failed to generate SEO content' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/seo/public/content - Get full SEO page content by slug (public)
|
||||
*/
|
||||
router.get('/public/content', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { slug } = req.query;
|
||||
if (!slug || typeof slug !== 'string') {
|
||||
return res.status(400).json({ error: 'slug query parameter required' });
|
||||
}
|
||||
|
||||
const pool = getPool();
|
||||
|
||||
// Find page and content
|
||||
const result = await pool.query(`
|
||||
SELECT p.id, p.slug, p.type, p.status,
|
||||
c.blocks, c.meta_title, c.meta_description, c.h1,
|
||||
c.canonical_url, c.og_title, c.og_description, c.og_image_url
|
||||
FROM seo_pages p
|
||||
LEFT JOIN seo_page_contents c ON c.page_id = p.id
|
||||
WHERE p.slug = $1 AND p.status = 'live'
|
||||
`, [slug]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: 'Page not found or not published' });
|
||||
}
|
||||
|
||||
const row = result.rows[0];
|
||||
const sanitized = ContentValidator.sanitizeContent({
|
||||
meta: {
|
||||
title: row.meta_title,
|
||||
description: row.meta_description,
|
||||
h1: row.h1,
|
||||
canonicalUrl: row.canonical_url,
|
||||
ogTitle: row.og_title,
|
||||
ogDescription: row.og_description,
|
||||
ogImageUrl: row.og_image_url
|
||||
},
|
||||
blocks: row.blocks || []
|
||||
});
|
||||
|
||||
res.json({
|
||||
slug: row.slug,
|
||||
type: row.type,
|
||||
...sanitized
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[SEO] Error fetching public content:', error.message);
|
||||
res.status(500).json({ error: 'Failed to fetch SEO content' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
622
backend/src/routes/workers.ts
Normal file
622
backend/src/routes/workers.ts
Normal file
@@ -0,0 +1,622 @@
|
||||
/**
|
||||
* Workers API Routes
|
||||
*
|
||||
* Provider-agnostic worker management and job monitoring.
|
||||
* Replaces legacy /api/dutchie-az/admin/schedules and /api/dutchie-az/monitor/* routes.
|
||||
*
|
||||
* Endpoints:
|
||||
* GET /api/workers - List all workers/schedules
|
||||
* GET /api/workers/active - List currently active workers
|
||||
* GET /api/workers/schedule - Get all job schedules
|
||||
* GET /api/workers/:workerName - Get specific worker details
|
||||
* GET /api/workers/:workerName/scope - Get worker's scope (states, etc.)
|
||||
* GET /api/workers/:workerName/stats - Get worker statistics
|
||||
* GET /api/workers/:workerName/logs - Get worker's recent logs
|
||||
* POST /api/workers/:workerName/trigger - Trigger worker manually
|
||||
*
|
||||
* GET /api/monitor/jobs - Get recent job history
|
||||
* GET /api/monitor/active-jobs - Get currently running jobs
|
||||
* GET /api/monitor/summary - Get monitoring summary
|
||||
*/
|
||||
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { getPool } from '../dutchie-az/db/connection';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// ============================================================
|
||||
// WORKER TYPES
|
||||
// ============================================================
|
||||
|
||||
interface Worker {
|
||||
id: number;
|
||||
worker_name: string;
|
||||
run_role: string;
|
||||
scope: string[];
|
||||
description: string;
|
||||
enabled: boolean;
|
||||
base_interval_minutes: number;
|
||||
jitter_minutes: number;
|
||||
next_run_at: string | null;
|
||||
last_run_at: string | null;
|
||||
last_status: string | null;
|
||||
last_seen: string | null;
|
||||
visibility_lost: number;
|
||||
visibility_restored: number;
|
||||
}
|
||||
|
||||
interface JobLog {
|
||||
id: number;
|
||||
worker_name: string;
|
||||
run_role: string;
|
||||
job_name: string;
|
||||
status: string;
|
||||
started_at: string;
|
||||
completed_at: string | null;
|
||||
duration_seconds: number | null;
|
||||
items_processed: number;
|
||||
items_succeeded: number;
|
||||
items_failed: number;
|
||||
error_message: string | null;
|
||||
scope: string[];
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// HELPERS
|
||||
// ============================================================
|
||||
|
||||
function parseScope(jobConfig: any): string[] {
|
||||
if (!jobConfig) return [];
|
||||
if (jobConfig.scope) return Array.isArray(jobConfig.scope) ? jobConfig.scope : [jobConfig.scope];
|
||||
if (jobConfig.states) return Array.isArray(jobConfig.states) ? jobConfig.states : [jobConfig.states];
|
||||
return [];
|
||||
}
|
||||
|
||||
function extractWorkerName(jobName: string, jobConfig: any): string {
|
||||
// Priority: explicit worker_name > job_config.worker_name > derive from job_name
|
||||
if (jobConfig?.worker_name) return jobConfig.worker_name;
|
||||
|
||||
// Extract from job_name like "dutchie_az_product_crawl" -> "ProductCrawl"
|
||||
const parts = jobName.replace(/^(dutchie_)?az_?/i, '').split('_');
|
||||
return parts.map(p => p.charAt(0).toUpperCase() + p.slice(1).toLowerCase()).join('');
|
||||
}
|
||||
|
||||
function extractRunRole(jobName: string, jobConfig: any): string {
|
||||
if (jobConfig?.run_role) return jobConfig.run_role;
|
||||
|
||||
// Map job names to roles
|
||||
const roleMap: Record<string, string> = {
|
||||
'menu_detection': 'StoreDiscovery',
|
||||
'menu_detection_single': 'StoreDiscovery',
|
||||
'dutchie_product_crawl': 'ProductSync',
|
||||
'product_crawl': 'ProductSync',
|
||||
'analytics_refresh': 'Analytics',
|
||||
'id_resolution': 'IdResolution',
|
||||
};
|
||||
|
||||
for (const [key, role] of Object.entries(roleMap)) {
|
||||
if (jobName.toLowerCase().includes(key.toLowerCase())) {
|
||||
return role;
|
||||
}
|
||||
}
|
||||
|
||||
return 'General';
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// WORKERS ROUTES
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* GET /api/workers - List all workers/schedules
|
||||
*/
|
||||
router.get('/', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const pool = getPool();
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
id,
|
||||
job_name,
|
||||
description,
|
||||
enabled,
|
||||
base_interval_minutes,
|
||||
jitter_minutes,
|
||||
next_run_at,
|
||||
last_run_at,
|
||||
last_status,
|
||||
job_config
|
||||
FROM job_schedules
|
||||
ORDER BY enabled DESC, last_run_at DESC NULLS LAST
|
||||
`);
|
||||
|
||||
const workers: Worker[] = rows.map((row: any) => ({
|
||||
id: row.id,
|
||||
worker_name: extractWorkerName(row.job_name, row.job_config),
|
||||
run_role: extractRunRole(row.job_name, row.job_config),
|
||||
scope: parseScope(row.job_config),
|
||||
description: row.description || row.job_name,
|
||||
enabled: row.enabled,
|
||||
base_interval_minutes: row.base_interval_minutes,
|
||||
jitter_minutes: row.jitter_minutes,
|
||||
next_run_at: row.next_run_at?.toISOString() || null,
|
||||
last_run_at: row.last_run_at?.toISOString() || null,
|
||||
last_status: row.last_status,
|
||||
last_seen: row.last_run_at?.toISOString() || null,
|
||||
visibility_lost: 0,
|
||||
visibility_restored: 0,
|
||||
}));
|
||||
|
||||
res.json({ success: true, workers });
|
||||
} catch (error: any) {
|
||||
console.error('[Workers] Error listing workers:', error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/workers/active - List currently active workers
|
||||
*/
|
||||
router.get('/active', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const pool = getPool();
|
||||
const { rows } = await pool.query(`
|
||||
SELECT DISTINCT ON (claimed_by)
|
||||
claimed_by as worker_id,
|
||||
worker_hostname,
|
||||
job_type,
|
||||
started_at,
|
||||
last_heartbeat_at
|
||||
FROM dispensary_crawl_jobs
|
||||
WHERE status = 'running'
|
||||
AND claimed_by IS NOT NULL
|
||||
ORDER BY claimed_by, started_at DESC
|
||||
`);
|
||||
|
||||
const activeWorkers = rows.map((row: any) => ({
|
||||
worker_id: row.worker_id,
|
||||
hostname: row.worker_hostname,
|
||||
current_job_type: row.job_type,
|
||||
started_at: row.started_at?.toISOString(),
|
||||
last_heartbeat: row.last_heartbeat_at?.toISOString(),
|
||||
run_role: extractRunRole(row.job_type, null),
|
||||
}));
|
||||
|
||||
res.json({ success: true, active_workers: activeWorkers, count: activeWorkers.length });
|
||||
} catch (error: any) {
|
||||
console.error('[Workers] Error getting active workers:', error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/workers/schedule - Get all job schedules (alias for /)
|
||||
*/
|
||||
router.get('/schedule', async (req: Request, res: Response) => {
|
||||
// Delegate to main workers endpoint
|
||||
const pool = getPool();
|
||||
try {
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
id,
|
||||
job_name,
|
||||
description,
|
||||
enabled,
|
||||
base_interval_minutes,
|
||||
jitter_minutes,
|
||||
next_run_at,
|
||||
last_run_at,
|
||||
last_status,
|
||||
job_config
|
||||
FROM job_schedules
|
||||
ORDER BY next_run_at ASC NULLS LAST
|
||||
`);
|
||||
|
||||
res.json({ success: true, schedules: rows });
|
||||
} catch (error: any) {
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/workers/:workerIdOrName - Get specific worker details
|
||||
*/
|
||||
router.get('/:workerIdOrName', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { workerIdOrName } = req.params;
|
||||
const pool = getPool();
|
||||
|
||||
// Try to find by ID or job_name
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
id,
|
||||
job_name,
|
||||
description,
|
||||
enabled,
|
||||
base_interval_minutes,
|
||||
jitter_minutes,
|
||||
next_run_at,
|
||||
last_run_at,
|
||||
last_status,
|
||||
job_config
|
||||
FROM job_schedules
|
||||
WHERE id = $1::int OR job_name ILIKE $2
|
||||
LIMIT 1
|
||||
`, [parseInt(workerIdOrName) || 0, `%${workerIdOrName}%`]);
|
||||
|
||||
if (rows.length === 0) {
|
||||
return res.status(404).json({ success: false, error: 'Worker not found' });
|
||||
}
|
||||
|
||||
const row = rows[0];
|
||||
const worker: Worker = {
|
||||
id: row.id,
|
||||
worker_name: extractWorkerName(row.job_name, row.job_config),
|
||||
run_role: extractRunRole(row.job_name, row.job_config),
|
||||
scope: parseScope(row.job_config),
|
||||
description: row.description || row.job_name,
|
||||
enabled: row.enabled,
|
||||
base_interval_minutes: row.base_interval_minutes,
|
||||
jitter_minutes: row.jitter_minutes,
|
||||
next_run_at: row.next_run_at?.toISOString() || null,
|
||||
last_run_at: row.last_run_at?.toISOString() || null,
|
||||
last_status: row.last_status,
|
||||
last_seen: row.last_run_at?.toISOString() || null,
|
||||
visibility_lost: 0,
|
||||
visibility_restored: 0,
|
||||
};
|
||||
|
||||
res.json({ success: true, worker });
|
||||
} catch (error: any) {
|
||||
console.error('[Workers] Error getting worker:', error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/workers/:workerIdOrName/scope - Get worker's scope
|
||||
*/
|
||||
router.get('/:workerIdOrName/scope', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { workerIdOrName } = req.params;
|
||||
const pool = getPool();
|
||||
|
||||
const { rows } = await pool.query(`
|
||||
SELECT job_config
|
||||
FROM job_schedules
|
||||
WHERE id = $1::int OR job_name ILIKE $2
|
||||
LIMIT 1
|
||||
`, [parseInt(workerIdOrName) || 0, `%${workerIdOrName}%`]);
|
||||
|
||||
if (rows.length === 0) {
|
||||
return res.status(404).json({ success: false, error: 'Worker not found' });
|
||||
}
|
||||
|
||||
const scope = parseScope(rows[0].job_config);
|
||||
res.json({ success: true, scope });
|
||||
} catch (error: any) {
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/workers/:workerIdOrName/stats - Get worker statistics
|
||||
*/
|
||||
router.get('/:workerIdOrName/stats', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { workerIdOrName } = req.params;
|
||||
const pool = getPool();
|
||||
|
||||
// Get schedule info
|
||||
const scheduleResult = await pool.query(`
|
||||
SELECT id, job_name FROM job_schedules
|
||||
WHERE id = $1::int OR job_name ILIKE $2
|
||||
LIMIT 1
|
||||
`, [parseInt(workerIdOrName) || 0, `%${workerIdOrName}%`]);
|
||||
|
||||
if (scheduleResult.rows.length === 0) {
|
||||
return res.status(404).json({ success: false, error: 'Worker not found' });
|
||||
}
|
||||
|
||||
const scheduleId = scheduleResult.rows[0].id;
|
||||
|
||||
// Get stats
|
||||
const statsResult = await pool.query(`
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE status = 'success') as success_count,
|
||||
COUNT(*) FILTER (WHERE status IN ('error', 'partial')) as failure_count,
|
||||
COUNT(*) as total_runs,
|
||||
AVG(EXTRACT(EPOCH FROM (completed_at - started_at))) as avg_duration_seconds,
|
||||
SUM(items_processed) as total_items_processed,
|
||||
MAX(completed_at) as last_completed
|
||||
FROM job_run_logs
|
||||
WHERE schedule_id = $1
|
||||
AND started_at > NOW() - INTERVAL '7 days'
|
||||
`, [scheduleId]);
|
||||
|
||||
const stats = statsResult.rows[0];
|
||||
res.json({
|
||||
success: true,
|
||||
stats: {
|
||||
success_count: parseInt(stats.success_count) || 0,
|
||||
failure_count: parseInt(stats.failure_count) || 0,
|
||||
total_runs: parseInt(stats.total_runs) || 0,
|
||||
avg_duration_seconds: parseFloat(stats.avg_duration_seconds) || 0,
|
||||
total_items_processed: parseInt(stats.total_items_processed) || 0,
|
||||
last_completed: stats.last_completed?.toISOString() || null,
|
||||
}
|
||||
});
|
||||
} catch (error: any) {
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/workers/:workerIdOrName/logs - Get worker's recent logs
|
||||
*/
|
||||
router.get('/:workerIdOrName/logs', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { workerIdOrName } = req.params;
|
||||
const limit = parseInt(req.query.limit as string) || 20;
|
||||
const pool = getPool();
|
||||
|
||||
// Get schedule info
|
||||
const scheduleResult = await pool.query(`
|
||||
SELECT id, job_name, job_config FROM job_schedules
|
||||
WHERE id = $1::int OR job_name ILIKE $2
|
||||
LIMIT 1
|
||||
`, [parseInt(workerIdOrName) || 0, `%${workerIdOrName}%`]);
|
||||
|
||||
if (scheduleResult.rows.length === 0) {
|
||||
return res.status(404).json({ success: false, error: 'Worker not found' });
|
||||
}
|
||||
|
||||
const schedule = scheduleResult.rows[0];
|
||||
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
id,
|
||||
job_name,
|
||||
status,
|
||||
started_at,
|
||||
completed_at,
|
||||
EXTRACT(EPOCH FROM (completed_at - started_at)) as duration_seconds,
|
||||
items_processed,
|
||||
items_succeeded,
|
||||
items_failed,
|
||||
error_message,
|
||||
metadata
|
||||
FROM job_run_logs
|
||||
WHERE schedule_id = $1
|
||||
ORDER BY started_at DESC
|
||||
LIMIT $2
|
||||
`, [schedule.id, limit]);
|
||||
|
||||
const logs: JobLog[] = rows.map((row: any) => ({
|
||||
id: row.id,
|
||||
worker_name: extractWorkerName(schedule.job_name, schedule.job_config),
|
||||
run_role: extractRunRole(schedule.job_name, schedule.job_config),
|
||||
job_name: row.job_name,
|
||||
status: row.status,
|
||||
started_at: row.started_at?.toISOString(),
|
||||
completed_at: row.completed_at?.toISOString() || null,
|
||||
duration_seconds: row.duration_seconds ? Math.round(row.duration_seconds) : null,
|
||||
items_processed: row.items_processed || 0,
|
||||
items_succeeded: row.items_succeeded || 0,
|
||||
items_failed: row.items_failed || 0,
|
||||
error_message: row.error_message,
|
||||
scope: parseScope(schedule.job_config),
|
||||
}));
|
||||
|
||||
res.json({ success: true, logs });
|
||||
} catch (error: any) {
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/workers/:workerIdOrName/trigger - Trigger worker manually
|
||||
*/
|
||||
router.post('/:workerIdOrName/trigger', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { workerIdOrName } = req.params;
|
||||
const pool = getPool();
|
||||
|
||||
// Get schedule info
|
||||
const scheduleResult = await pool.query(`
|
||||
SELECT id, job_name FROM job_schedules
|
||||
WHERE id = $1::int OR job_name ILIKE $2
|
||||
LIMIT 1
|
||||
`, [parseInt(workerIdOrName) || 0, `%${workerIdOrName}%`]);
|
||||
|
||||
if (scheduleResult.rows.length === 0) {
|
||||
return res.status(404).json({ success: false, error: 'Worker not found' });
|
||||
}
|
||||
|
||||
const scheduleId = scheduleResult.rows[0].id;
|
||||
|
||||
// Set next_run_at to now to trigger immediately
|
||||
await pool.query(`
|
||||
UPDATE job_schedules
|
||||
SET next_run_at = NOW()
|
||||
WHERE id = $1
|
||||
`, [scheduleId]);
|
||||
|
||||
res.json({ success: true, message: 'Worker triggered', schedule_id: scheduleId });
|
||||
} catch (error: any) {
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// MONITOR ROUTES (for /api/monitor prefix)
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* GET /api/monitor/jobs - Get recent job history
|
||||
*/
|
||||
router.get('/jobs', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const limit = parseInt(req.query.limit as string) || 50;
|
||||
const status = req.query.status as string | undefined;
|
||||
const pool = getPool();
|
||||
|
||||
let query = `
|
||||
SELECT
|
||||
j.id,
|
||||
j.job_name,
|
||||
j.status,
|
||||
j.started_at,
|
||||
j.completed_at,
|
||||
EXTRACT(EPOCH FROM (j.completed_at - j.started_at)) as duration_seconds,
|
||||
j.items_processed,
|
||||
j.items_succeeded,
|
||||
j.items_failed,
|
||||
j.error_message,
|
||||
j.metadata,
|
||||
s.job_config
|
||||
FROM job_run_logs j
|
||||
LEFT JOIN job_schedules s ON j.schedule_id = s.id
|
||||
WHERE 1=1
|
||||
`;
|
||||
const params: any[] = [];
|
||||
|
||||
if (status) {
|
||||
params.push(status);
|
||||
query += ` AND j.status = $${params.length}`;
|
||||
}
|
||||
|
||||
params.push(limit);
|
||||
query += ` ORDER BY j.started_at DESC LIMIT $${params.length}`;
|
||||
|
||||
const { rows } = await pool.query(query, params);
|
||||
|
||||
const jobs: JobLog[] = rows.map((row: any) => ({
|
||||
id: row.id,
|
||||
worker_name: extractWorkerName(row.job_name, row.job_config),
|
||||
run_role: extractRunRole(row.job_name, row.job_config),
|
||||
job_name: row.job_name,
|
||||
status: row.status,
|
||||
started_at: row.started_at?.toISOString(),
|
||||
completed_at: row.completed_at?.toISOString() || null,
|
||||
duration_seconds: row.duration_seconds ? Math.round(row.duration_seconds) : null,
|
||||
items_processed: row.items_processed || 0,
|
||||
items_succeeded: row.items_succeeded || 0,
|
||||
items_failed: row.items_failed || 0,
|
||||
error_message: row.error_message,
|
||||
scope: parseScope(row.job_config),
|
||||
}));
|
||||
|
||||
res.json({ success: true, jobs, count: jobs.length });
|
||||
} catch (error: any) {
|
||||
console.error('[Monitor] Error getting jobs:', error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/monitor/active-jobs - Get currently running jobs
|
||||
*/
|
||||
router.get('/active-jobs', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const pool = getPool();
|
||||
|
||||
const { rows } = await pool.query(`
|
||||
SELECT
|
||||
id,
|
||||
dispensary_id,
|
||||
job_type,
|
||||
status,
|
||||
worker_hostname,
|
||||
started_at,
|
||||
last_heartbeat_at,
|
||||
products_found,
|
||||
error_message,
|
||||
metadata
|
||||
FROM dispensary_crawl_jobs
|
||||
WHERE status = 'running'
|
||||
ORDER BY started_at DESC
|
||||
`);
|
||||
|
||||
const activeJobs = rows.map((row: any) => ({
|
||||
id: row.id,
|
||||
dispensary_id: row.dispensary_id,
|
||||
job_type: row.job_type,
|
||||
worker_name: extractWorkerName(row.job_type, null),
|
||||
run_role: extractRunRole(row.job_type, null),
|
||||
status: row.status,
|
||||
hostname: row.worker_hostname,
|
||||
started_at: row.started_at?.toISOString(),
|
||||
last_heartbeat: row.last_heartbeat_at?.toISOString(),
|
||||
products_found: row.products_found || 0,
|
||||
error_message: row.error_message,
|
||||
}));
|
||||
|
||||
res.json({ success: true, active_jobs: activeJobs, count: activeJobs.length });
|
||||
} catch (error: any) {
|
||||
console.error('[Monitor] Error getting active jobs:', error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/monitor/summary - Get monitoring summary
|
||||
*/
|
||||
router.get('/summary', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const pool = getPool();
|
||||
|
||||
// Get summary stats
|
||||
const [scheduleStats, jobStats, activeJobs] = await Promise.all([
|
||||
pool.query(`
|
||||
SELECT
|
||||
COUNT(*) as total_schedules,
|
||||
COUNT(*) FILTER (WHERE enabled = true) as enabled_schedules,
|
||||
COUNT(*) FILTER (WHERE last_status = 'success') as last_success,
|
||||
COUNT(*) FILTER (WHERE last_status IN ('error', 'partial')) as last_failed
|
||||
FROM job_schedules
|
||||
`),
|
||||
pool.query(`
|
||||
SELECT
|
||||
COUNT(*) as total_runs,
|
||||
COUNT(*) FILTER (WHERE status = 'success') as success_count,
|
||||
COUNT(*) FILTER (WHERE status IN ('error', 'partial')) as failure_count,
|
||||
COUNT(*) FILTER (WHERE status = 'running') as running_count
|
||||
FROM job_run_logs
|
||||
WHERE started_at > NOW() - INTERVAL '24 hours'
|
||||
`),
|
||||
pool.query(`
|
||||
SELECT COUNT(*) as active_count
|
||||
FROM dispensary_crawl_jobs
|
||||
WHERE status = 'running'
|
||||
`)
|
||||
]);
|
||||
|
||||
const schedules = scheduleStats.rows[0];
|
||||
const jobs = jobStats.rows[0];
|
||||
const active = activeJobs.rows[0];
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
summary: {
|
||||
schedules: {
|
||||
total: parseInt(schedules.total_schedules) || 0,
|
||||
enabled: parseInt(schedules.enabled_schedules) || 0,
|
||||
last_success: parseInt(schedules.last_success) || 0,
|
||||
last_failed: parseInt(schedules.last_failed) || 0,
|
||||
},
|
||||
jobs_24h: {
|
||||
total: parseInt(jobs.total_runs) || 0,
|
||||
success: parseInt(jobs.success_count) || 0,
|
||||
failed: parseInt(jobs.failure_count) || 0,
|
||||
running: parseInt(jobs.running_count) || 0,
|
||||
},
|
||||
active_crawl_jobs: parseInt(active.active_count) || 0,
|
||||
}
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[Monitor] Error getting summary:', error);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
200
backend/src/services/seoGenerator.ts
Normal file
200
backend/src/services/seoGenerator.ts
Normal file
@@ -0,0 +1,200 @@
|
||||
/**
|
||||
* SEO Page Generator Service
|
||||
*
|
||||
* Generates SEO content for pages using structured prompts,
|
||||
* sanitizes output, and stores in seo_page_contents.
|
||||
*/
|
||||
|
||||
import { getPool } from '../db/pool';
|
||||
|
||||
const pool = getPool();
|
||||
import { ContentValidator } from '../utils/ContentValidator';
|
||||
|
||||
interface SeoPage {
|
||||
id: number;
|
||||
type: string;
|
||||
slug: string;
|
||||
page_key: string;
|
||||
primary_keyword: string | null;
|
||||
}
|
||||
|
||||
interface GeneratedSeoPayload {
|
||||
title: string;
|
||||
metaDescription: string;
|
||||
h1: string;
|
||||
blocks: any[];
|
||||
}
|
||||
|
||||
interface SeoPageContent {
|
||||
id: number;
|
||||
pageId: number;
|
||||
blocks: any[];
|
||||
metaTitle: string;
|
||||
metaDescription: string;
|
||||
h1: string;
|
||||
}
|
||||
|
||||
interface GenerationSpec {
|
||||
type: string;
|
||||
slug: string;
|
||||
primaryKeyword: string;
|
||||
metrics?: any;
|
||||
topBrands?: any[];
|
||||
competitorName?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build generation spec based on page type
|
||||
*/
|
||||
async function buildSeoGenerationSpec(page: SeoPage): Promise<GenerationSpec> {
|
||||
const spec: GenerationSpec = {
|
||||
type: page.type,
|
||||
slug: page.slug,
|
||||
primaryKeyword: page.primary_keyword || `${page.type} cannabis data`
|
||||
};
|
||||
|
||||
if (page.type === 'state') {
|
||||
// Fetch state metrics
|
||||
const stateCode = page.page_key.replace('states/', '').toUpperCase();
|
||||
const metricsResult = await pool.query(`
|
||||
SELECT store_count, total_products, unique_brands, state_name
|
||||
FROM mv_state_metrics WHERE state = $1
|
||||
`, [stateCode]);
|
||||
|
||||
if (metricsResult.rows[0]) {
|
||||
spec.metrics = {
|
||||
dispensaryCount: parseInt(metricsResult.rows[0].store_count) || 0,
|
||||
productCount: parseInt(metricsResult.rows[0].total_products) || 0,
|
||||
brandCount: parseInt(metricsResult.rows[0].unique_brands) || 0,
|
||||
stateName: metricsResult.rows[0].state_name || stateCode
|
||||
};
|
||||
}
|
||||
|
||||
// Fetch top brands
|
||||
const brandsResult = await pool.query(`
|
||||
SELECT brand_name, COUNT(*) as count
|
||||
FROM dutchie_products p
|
||||
JOIN dispensaries d ON p.dispensary_id = d.id
|
||||
WHERE d.state = $1 AND p.brand_name IS NOT NULL
|
||||
GROUP BY brand_name ORDER BY count DESC LIMIT 6
|
||||
`, [stateCode]);
|
||||
spec.topBrands = brandsResult.rows;
|
||||
}
|
||||
|
||||
if (page.type === 'competitor_alternative') {
|
||||
spec.competitorName = page.slug.split('/').pop()?.replace(/-/g, ' ') || 'competitor';
|
||||
}
|
||||
|
||||
return spec;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate SEO content based on spec (uses templates, not actual Claude API)
|
||||
*/
|
||||
function generateSeoContent(spec: GenerationSpec): GeneratedSeoPayload {
|
||||
// Template-based generation (production would call Claude API)
|
||||
if (spec.type === 'state' && spec.metrics) {
|
||||
const { stateName, dispensaryCount, productCount, brandCount } = spec.metrics;
|
||||
return {
|
||||
title: `${stateName} Cannabis Market Data | CannaIQ`,
|
||||
metaDescription: `Real-time ${stateName} cannabis market intelligence. Monitor ${dispensaryCount}+ dispensaries, ${productCount.toLocaleString()}+ products, and ${brandCount}+ brands with continuously refreshed data.`,
|
||||
h1: `${stateName} Cannabis Market Intelligence`,
|
||||
blocks: [
|
||||
{
|
||||
type: 'hero',
|
||||
headline: `${stateName} Cannabis Market Data`,
|
||||
subheadline: `Continuously refreshed pricing, product, and brand data from ${dispensaryCount}+ ${stateName} dispensaries.`,
|
||||
ctaPrimary: { text: 'Get Market Data', href: `/demo?state=${spec.slug.split('/').pop()}` }
|
||||
},
|
||||
{
|
||||
type: 'stats',
|
||||
headline: 'Market Snapshot',
|
||||
items: [
|
||||
{ value: dispensaryCount.toString(), label: 'Dispensaries', description: 'Monitored in real-time' },
|
||||
{ value: productCount.toLocaleString(), label: 'Products', description: 'With live pricing' },
|
||||
{ value: brandCount.toString(), label: 'Brands', description: 'Tracked across retailers' }
|
||||
]
|
||||
},
|
||||
{
|
||||
type: 'intro',
|
||||
content: `CannaIQ monitors ${dispensaryCount} dispensaries, ${productCount.toLocaleString()} products, and ${brandCount} brands in ${stateName}, with listings and availability continuously updated throughout the day.`
|
||||
},
|
||||
{
|
||||
type: 'topBrands',
|
||||
headline: `Top ${stateName} Cannabis Brands`,
|
||||
brands: spec.topBrands?.slice(0, 6).map(b => ({ name: b.brand_name, productCount: parseInt(b.count) })) || []
|
||||
},
|
||||
{
|
||||
type: 'cta',
|
||||
headline: `Get ${stateName} Market Intelligence`,
|
||||
subheadline: 'Access real-time pricing, brand distribution, and competitive insights.',
|
||||
ctaPrimary: { text: 'Request Demo', href: '/demo' }
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
// Generic fallback
|
||||
return {
|
||||
title: `${spec.primaryKeyword} | CannaIQ`,
|
||||
metaDescription: `Cannabis market intelligence for ${spec.primaryKeyword}. Real-time data continuously refreshed.`,
|
||||
h1: spec.primaryKeyword,
|
||||
blocks: [
|
||||
{ type: 'hero', headline: spec.primaryKeyword, subheadline: 'Real-time cannabis market data', ctaPrimary: { text: 'Learn More', href: '/demo' } },
|
||||
{ type: 'cta', headline: 'Get Started', subheadline: 'Request a demo today.', ctaPrimary: { text: 'Request Demo', href: '/demo' } }
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Main generation pipeline
|
||||
*/
|
||||
export async function generateSeoPageWithClaude(pageId: number): Promise<SeoPageContent> {
|
||||
// 1. Load page
|
||||
const pageResult = await pool.query(
|
||||
'SELECT id, type, slug, page_key, primary_keyword FROM seo_pages WHERE id = $1',
|
||||
[pageId]
|
||||
);
|
||||
if (pageResult.rows.length === 0) {
|
||||
throw new Error(`SEO page not found: ${pageId}`);
|
||||
}
|
||||
const page = pageResult.rows[0] as SeoPage;
|
||||
|
||||
// 2. Build spec
|
||||
const spec = await buildSeoGenerationSpec(page);
|
||||
|
||||
// 3. Generate content
|
||||
const payload = generateSeoContent(spec);
|
||||
|
||||
// 4. Sanitize (removes forbidden terms)
|
||||
const sanitized = ContentValidator.sanitizeContent(payload) as GeneratedSeoPayload;
|
||||
|
||||
// 5. Upsert content
|
||||
const result = await pool.query(`
|
||||
INSERT INTO seo_page_contents (page_id, blocks, meta_title, meta_description, h1, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, NOW())
|
||||
ON CONFLICT (page_id) DO UPDATE SET
|
||||
blocks = EXCLUDED.blocks,
|
||||
meta_title = EXCLUDED.meta_title,
|
||||
meta_description = EXCLUDED.meta_description,
|
||||
h1 = EXCLUDED.h1,
|
||||
updated_at = NOW()
|
||||
RETURNING id, page_id, blocks, meta_title, meta_description, h1
|
||||
`, [pageId, JSON.stringify(sanitized.blocks), sanitized.title, sanitized.metaDescription, sanitized.h1]);
|
||||
|
||||
// 6. Update page status
|
||||
await pool.query(
|
||||
`UPDATE seo_pages SET status = 'live', last_generated_at = NOW() WHERE id = $1`,
|
||||
[pageId]
|
||||
);
|
||||
|
||||
const row = result.rows[0];
|
||||
return {
|
||||
id: row.id,
|
||||
pageId: row.page_id,
|
||||
blocks: row.blocks,
|
||||
metaTitle: row.meta_title,
|
||||
metaDescription: row.meta_description,
|
||||
h1: row.h1
|
||||
};
|
||||
}
|
||||
179
backend/src/utils/ContentValidator.ts
Normal file
179
backend/src/utils/ContentValidator.ts
Normal file
@@ -0,0 +1,179 @@
|
||||
/**
|
||||
* ContentValidator - Redaction and Sanitization for Public Content
|
||||
*
|
||||
* Ensures all public-facing content (SEO pages, homepage, API responses)
|
||||
* uses approved enterprise-safe phrasing and never exposes internal
|
||||
* data collection mechanics.
|
||||
*
|
||||
* Source of truth for forbidden/approved terminology.
|
||||
*/
|
||||
|
||||
// Forbidden terms that must NEVER appear in public content
|
||||
const FORBIDDEN_TERMS = [
|
||||
'crawl', 'crawled', 'crawler', 'crawling',
|
||||
'scrape', 'scraped', 'scraper', 'scraping',
|
||||
'snapshot', 'snapshots',
|
||||
'ingestion', 'pipeline', 'ETL',
|
||||
'worker', 'workers',
|
||||
'bot', 'bots',
|
||||
'extraction', 'harvesting', 'polling',
|
||||
'data collector', 'collectors',
|
||||
] as const;
|
||||
|
||||
// Approved replacement phrases
|
||||
const APPROVED_PHRASES = [
|
||||
'continuously refreshed',
|
||||
'updated daily',
|
||||
'up-to-date data',
|
||||
'current availability',
|
||||
'latest listings',
|
||||
'real-time market insights',
|
||||
'real-time cannabis market intelligence',
|
||||
'continuously monitored markets',
|
||||
'fresh, reliable market information',
|
||||
] as const;
|
||||
|
||||
// Context-aware replacements (pattern -> replacement)
|
||||
const REPLACEMENTS: Array<{ pattern: RegExp; replacement: string }> = [
|
||||
// Data collection verbs → neutral phrasing
|
||||
{ pattern: /\b(crawl|scrape)s?\b/gi, replacement: 'refresh' },
|
||||
{ pattern: /\b(crawled|scraped)\b/gi, replacement: 'updated' },
|
||||
{ pattern: /\b(crawling|scraping)\b/gi, replacement: 'refreshing' },
|
||||
{ pattern: /\b(crawler|scraper)s?\b/gi, replacement: 'data service' },
|
||||
|
||||
// Technical terms → business terms
|
||||
{ pattern: /\bsnapshots?\b/gi, replacement: 'market data' },
|
||||
{ pattern: /\bingestion\b/gi, replacement: 'data processing' },
|
||||
{ pattern: /\bpipeline\b/gi, replacement: 'data flow' },
|
||||
{ pattern: /\bETL\b/g, replacement: 'data processing' },
|
||||
{ pattern: /\bworkers?\b/gi, replacement: 'services' },
|
||||
{ pattern: /\bbots?\b/gi, replacement: 'automated systems' },
|
||||
{ pattern: /\bextraction\b/gi, replacement: 'data gathering' },
|
||||
{ pattern: /\bharvesting\b/gi, replacement: 'collecting' },
|
||||
{ pattern: /\bpolling\b/gi, replacement: 'monitoring' },
|
||||
{ pattern: /\bdata collectors?\b/gi, replacement: 'data services' },
|
||||
];
|
||||
|
||||
// Build regex for detection
|
||||
const FORBIDDEN_PATTERN = new RegExp(
|
||||
`\\b(${FORBIDDEN_TERMS.join('|')})\\b`,
|
||||
'gi'
|
||||
);
|
||||
|
||||
export class ContentValidator {
|
||||
/**
|
||||
* Check if text contains any forbidden terms
|
||||
*/
|
||||
static hasForbiddenTerms(text: string): boolean {
|
||||
if (typeof text !== 'string') return false;
|
||||
return FORBIDDEN_PATTERN.test(text);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of forbidden terms found in text
|
||||
*/
|
||||
static findForbiddenTerms(text: string): string[] {
|
||||
if (typeof text !== 'string') return [];
|
||||
const matches = text.match(FORBIDDEN_PATTERN);
|
||||
return matches ? Array.from(new Set(matches.map(m => m.toLowerCase()))) : [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize a string by replacing forbidden terms
|
||||
*/
|
||||
static sanitizeString(text: string): string {
|
||||
if (typeof text !== 'string') return text;
|
||||
|
||||
let sanitized = text;
|
||||
for (const { pattern, replacement } of REPLACEMENTS) {
|
||||
sanitized = sanitized.replace(pattern, replacement);
|
||||
}
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively sanitize content (strings, objects, arrays)
|
||||
* Preserves structure while sanitizing all string values
|
||||
*/
|
||||
static sanitizeContent<T>(content: T): T {
|
||||
if (content === null || content === undefined) {
|
||||
return content;
|
||||
}
|
||||
|
||||
if (typeof content === 'string') {
|
||||
return this.sanitizeString(content) as T;
|
||||
}
|
||||
|
||||
if (Array.isArray(content)) {
|
||||
return content.map(item => this.sanitizeContent(item)) as T;
|
||||
}
|
||||
|
||||
if (typeof content === 'object') {
|
||||
const sanitized: Record<string, unknown> = {};
|
||||
for (const [key, value] of Object.entries(content)) {
|
||||
sanitized[key] = this.sanitizeContent(value);
|
||||
}
|
||||
return sanitized as T;
|
||||
}
|
||||
|
||||
// Numbers, booleans, etc. pass through unchanged
|
||||
return content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate content and throw if forbidden terms found
|
||||
* Use for build-time / deploy-time validation
|
||||
*/
|
||||
static validateOrThrow(content: unknown, context?: string): void {
|
||||
const text = typeof content === 'string'
|
||||
? content
|
||||
: JSON.stringify(content);
|
||||
|
||||
const forbidden = this.findForbiddenTerms(text);
|
||||
if (forbidden.length > 0) {
|
||||
const ctx = context ? ` in ${context}` : '';
|
||||
throw new Error(
|
||||
`Forbidden terms found${ctx}: ${forbidden.join(', ')}. ` +
|
||||
`Public content must use approved enterprise-safe phrasing.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate and return validation result (non-throwing)
|
||||
*/
|
||||
static validate(content: unknown): {
|
||||
valid: boolean;
|
||||
forbiddenTerms: string[];
|
||||
sanitized: unknown;
|
||||
} {
|
||||
const text = typeof content === 'string'
|
||||
? content
|
||||
: JSON.stringify(content);
|
||||
|
||||
const forbiddenTerms = this.findForbiddenTerms(text);
|
||||
const sanitized = this.sanitizeContent(content);
|
||||
|
||||
return {
|
||||
valid: forbiddenTerms.length === 0,
|
||||
forbiddenTerms,
|
||||
sanitized,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get approved phrases for reference
|
||||
*/
|
||||
static getApprovedPhrases(): readonly string[] {
|
||||
return APPROVED_PHRASES;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get forbidden terms for reference
|
||||
*/
|
||||
static getForbiddenTerms(): readonly string[] {
|
||||
return FORBIDDEN_TERMS;
|
||||
}
|
||||
}
|
||||
|
||||
export default ContentValidator;
|
||||
115
backend/src/utils/HomepageValidator.ts
Normal file
115
backend/src/utils/HomepageValidator.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
/**
|
||||
* HomepageValidator - Content validation for homepage and marketing pages
|
||||
*
|
||||
* Validates that homepage content adheres to enterprise-safe phrasing
|
||||
* and contains no forbidden terminology.
|
||||
*/
|
||||
|
||||
import { ContentValidator } from './ContentValidator';
|
||||
|
||||
export interface HomepageContent {
|
||||
hero?: {
|
||||
headline?: string;
|
||||
subheadline?: string;
|
||||
ctaPrimary?: string;
|
||||
ctaSecondary?: string;
|
||||
};
|
||||
features?: Array<{
|
||||
title: string;
|
||||
description: string;
|
||||
}>;
|
||||
stats?: Array<{
|
||||
label: string;
|
||||
value: string;
|
||||
}>;
|
||||
testimonials?: Array<{
|
||||
quote: string;
|
||||
author: string;
|
||||
}>;
|
||||
faq?: Array<{
|
||||
question: string;
|
||||
answer: string;
|
||||
}>;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface ValidationResult {
|
||||
valid: boolean;
|
||||
forbiddenTerms: string[];
|
||||
warnings: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate homepage content for forbidden terms
|
||||
* Throws an error if forbidden terms are found (for build/deploy)
|
||||
*/
|
||||
export function validateHomepageContent(content: HomepageContent): void {
|
||||
ContentValidator.validateOrThrow(content, 'homepage content');
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate homepage content (non-throwing)
|
||||
* Returns validation result with details
|
||||
*/
|
||||
export function checkHomepageContent(content: HomepageContent): ValidationResult {
|
||||
const result = ContentValidator.validate(content);
|
||||
|
||||
const warnings: string[] = [];
|
||||
|
||||
// Check for potential issues even if technically valid
|
||||
const jsonContent = JSON.stringify(content).toLowerCase();
|
||||
|
||||
// Warn about terms that might be close to forbidden
|
||||
if (jsonContent.includes('data') && jsonContent.includes('collect')) {
|
||||
warnings.push('Content mentions "data" and "collect" - ensure context is appropriate');
|
||||
}
|
||||
|
||||
if (jsonContent.includes('automat')) {
|
||||
warnings.push('Content mentions automation - verify phrasing is enterprise-appropriate');
|
||||
}
|
||||
|
||||
return {
|
||||
valid: result.valid,
|
||||
forbiddenTerms: result.forbiddenTerms,
|
||||
warnings,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize homepage content
|
||||
* Returns cleaned content with forbidden terms replaced
|
||||
*/
|
||||
export function sanitizeHomepageContent<T extends HomepageContent>(content: T): T {
|
||||
return ContentValidator.sanitizeContent(content);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate and sanitize in one call
|
||||
* Logs warnings but returns sanitized content
|
||||
*/
|
||||
export function processHomepageContent<T extends HomepageContent>(
|
||||
content: T,
|
||||
options?: { logWarnings?: boolean }
|
||||
): T {
|
||||
const check = checkHomepageContent(content);
|
||||
|
||||
if (options?.logWarnings && check.warnings.length > 0) {
|
||||
console.warn('[HomepageValidator] Warnings:', check.warnings);
|
||||
}
|
||||
|
||||
if (!check.valid) {
|
||||
console.warn(
|
||||
'[HomepageValidator] Forbidden terms found and sanitized:',
|
||||
check.forbiddenTerms
|
||||
);
|
||||
}
|
||||
|
||||
return sanitizeHomepageContent(content);
|
||||
}
|
||||
|
||||
export default {
|
||||
validateHomepageContent,
|
||||
checkHomepageContent,
|
||||
sanitizeHomepageContent,
|
||||
processHomepageContent,
|
||||
};
|
||||
124
backend/src/utils/__tests__/ContentValidator.test.ts
Normal file
124
backend/src/utils/__tests__/ContentValidator.test.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
/**
|
||||
* ContentValidator Unit Tests
|
||||
*/
|
||||
|
||||
import { ContentValidator } from '../ContentValidator';
|
||||
|
||||
describe('ContentValidator', () => {
|
||||
describe('hasForbiddenTerms', () => {
|
||||
it('detects forbidden terms', () => {
|
||||
expect(ContentValidator.hasForbiddenTerms('We crawl dispensary data')).toBe(true);
|
||||
expect(ContentValidator.hasForbiddenTerms('Data is scraped daily')).toBe(true);
|
||||
expect(ContentValidator.hasForbiddenTerms('Our crawler runs hourly')).toBe(true);
|
||||
expect(ContentValidator.hasForbiddenTerms('ETL pipeline processes data')).toBe(true);
|
||||
expect(ContentValidator.hasForbiddenTerms('Workers harvest data')).toBe(true);
|
||||
});
|
||||
|
||||
it('returns false for clean text', () => {
|
||||
expect(ContentValidator.hasForbiddenTerms('Real-time market insights')).toBe(false);
|
||||
expect(ContentValidator.hasForbiddenTerms('Continuously refreshed data')).toBe(false);
|
||||
expect(ContentValidator.hasForbiddenTerms('Latest cannabis listings')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('findForbiddenTerms', () => {
|
||||
it('returns all forbidden terms found', () => {
|
||||
const result = ContentValidator.findForbiddenTerms(
|
||||
'Our crawler scrapes dispensary snapshots using a pipeline'
|
||||
);
|
||||
expect(result).toContain('crawler');
|
||||
expect(result).toContain('scrapes');
|
||||
expect(result).toContain('snapshots');
|
||||
expect(result).toContain('pipeline');
|
||||
});
|
||||
});
|
||||
|
||||
describe('sanitizeString', () => {
|
||||
it('replaces forbidden terms', () => {
|
||||
expect(ContentValidator.sanitizeString('Data is crawled daily'))
|
||||
.toBe('Data is updated daily');
|
||||
expect(ContentValidator.sanitizeString('Our scraper runs'))
|
||||
.toBe('Our data service runs');
|
||||
expect(ContentValidator.sanitizeString('View snapshots'))
|
||||
.toBe('View market data');
|
||||
});
|
||||
|
||||
it('preserves clean text', () => {
|
||||
const clean = 'Real-time cannabis market intelligence';
|
||||
expect(ContentValidator.sanitizeString(clean)).toBe(clean);
|
||||
});
|
||||
});
|
||||
|
||||
describe('sanitizeContent', () => {
|
||||
it('sanitizes nested objects', () => {
|
||||
const dirty = {
|
||||
title: 'Our Crawler',
|
||||
blocks: [
|
||||
{ type: 'text', content: 'Data is scraped hourly' },
|
||||
{ type: 'stats', items: ['100 snapshots per day'] },
|
||||
],
|
||||
};
|
||||
|
||||
const result = ContentValidator.sanitizeContent(dirty);
|
||||
|
||||
expect(result.title).toBe('Our data service');
|
||||
expect(result.blocks[0].content).toBe('Data is updated hourly');
|
||||
expect(result.blocks[1].items[0]).toBe('100 market data per day');
|
||||
});
|
||||
|
||||
it('handles null and undefined', () => {
|
||||
expect(ContentValidator.sanitizeContent(null)).toBe(null);
|
||||
expect(ContentValidator.sanitizeContent(undefined)).toBe(undefined);
|
||||
});
|
||||
|
||||
it('preserves non-string values', () => {
|
||||
const input = { count: 42, active: true, data: null };
|
||||
const result = ContentValidator.sanitizeContent(input);
|
||||
expect(result.count).toBe(42);
|
||||
expect(result.active).toBe(true);
|
||||
expect(result.data).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validate', () => {
|
||||
it('returns valid=true for clean content', () => {
|
||||
const result = ContentValidator.validate({
|
||||
title: 'Real-time Market Data',
|
||||
description: 'Continuously refreshed cannabis insights',
|
||||
});
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.forbiddenTerms).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('returns valid=false with forbidden terms list', () => {
|
||||
const result = ContentValidator.validate({
|
||||
title: 'Our Crawler',
|
||||
description: 'Scraping dispensary data',
|
||||
});
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.forbiddenTerms).toContain('crawler');
|
||||
expect(result.forbiddenTerms).toContain('scraping');
|
||||
});
|
||||
|
||||
it('returns sanitized version', () => {
|
||||
const result = ContentValidator.validate({
|
||||
title: 'Crawler Status',
|
||||
});
|
||||
expect(result.sanitized).toEqual({ title: 'data service Status' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateOrThrow', () => {
|
||||
it('throws on forbidden terms', () => {
|
||||
expect(() => {
|
||||
ContentValidator.validateOrThrow({ text: 'Our crawler' }, 'test content');
|
||||
}).toThrow(/Forbidden terms found in test content/);
|
||||
});
|
||||
|
||||
it('does not throw on clean content', () => {
|
||||
expect(() => {
|
||||
ContentValidator.validateOrThrow({ text: 'Real-time data' });
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user