Compare commits
12 Commits
feat/ui-po
...
feat/query
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
daab0ae9b2 | ||
|
|
2ed088b4d8 | ||
|
|
d3c49fa246 | ||
|
|
52cb5014fd | ||
|
|
50654be910 | ||
|
|
cdab71a1ee | ||
|
|
a35976b9e9 | ||
|
|
c68210c485 | ||
|
|
f2864bd2ad | ||
|
|
eca9e85242 | ||
|
|
e1c67dcee5 | ||
|
|
34c8a8cc67 |
@@ -69,6 +69,7 @@ steps:
|
||||
|
||||
# ===========================================
|
||||
# MASTER DEPLOY: Parallel Docker builds
|
||||
# NOTE: cache_from/cache_to removed due to plugin bug splitting on commas
|
||||
# ===========================================
|
||||
docker-backend:
|
||||
image: woodpeckerci/plugin-docker-buildx
|
||||
@@ -86,10 +87,6 @@ steps:
|
||||
from_secret: registry_password
|
||||
platforms: linux/amd64
|
||||
provenance: false
|
||||
cache_from:
|
||||
- "type=registry,ref=code.cannabrands.app/creationshop/dispensary-scraper:cache"
|
||||
cache_to:
|
||||
- "type=registry,ref=code.cannabrands.app/creationshop/dispensary-scraper:cache,mode=max"
|
||||
build_args:
|
||||
APP_BUILD_VERSION: ${CI_COMMIT_SHA:0:8}
|
||||
APP_GIT_SHA: ${CI_COMMIT_SHA}
|
||||
@@ -116,10 +113,6 @@ steps:
|
||||
from_secret: registry_password
|
||||
platforms: linux/amd64
|
||||
provenance: false
|
||||
cache_from:
|
||||
- "type=registry,ref=code.cannabrands.app/creationshop/cannaiq-frontend:cache"
|
||||
cache_to:
|
||||
- "type=registry,ref=code.cannabrands.app/creationshop/cannaiq-frontend:cache,mode=max"
|
||||
depends_on: []
|
||||
when:
|
||||
branch: master
|
||||
@@ -141,10 +134,6 @@ steps:
|
||||
from_secret: registry_password
|
||||
platforms: linux/amd64
|
||||
provenance: false
|
||||
cache_from:
|
||||
- "type=registry,ref=code.cannabrands.app/creationshop/findadispo-frontend:cache"
|
||||
cache_to:
|
||||
- "type=registry,ref=code.cannabrands.app/creationshop/findadispo-frontend:cache,mode=max"
|
||||
depends_on: []
|
||||
when:
|
||||
branch: master
|
||||
@@ -166,10 +155,6 @@ steps:
|
||||
from_secret: registry_password
|
||||
platforms: linux/amd64
|
||||
provenance: false
|
||||
cache_from:
|
||||
- "type=registry,ref=code.cannabrands.app/creationshop/findagram-frontend:cache"
|
||||
cache_to:
|
||||
- "type=registry,ref=code.cannabrands.app/creationshop/findagram-frontend:cache,mode=max"
|
||||
depends_on: []
|
||||
when:
|
||||
branch: master
|
||||
|
||||
218
backend/docs/CODEBASE_MAP.md
Normal file
218
backend/docs/CODEBASE_MAP.md
Normal file
@@ -0,0 +1,218 @@
|
||||
# CannaiQ Backend Codebase Map
|
||||
|
||||
**Last Updated:** 2025-12-12
|
||||
**Purpose:** Help Claude and developers understand which code is current vs deprecated
|
||||
|
||||
---
|
||||
|
||||
## Quick Reference: What to Use
|
||||
|
||||
### For Crawling/Scraping
|
||||
| Task | Use This | NOT This |
|
||||
|------|----------|----------|
|
||||
| Fetch products | `src/tasks/handlers/payload-fetch.ts` | `src/hydration/*` |
|
||||
| Process products | `src/tasks/handlers/product-refresh.ts` | `src/scraper-v2/*` |
|
||||
| GraphQL client | `src/platforms/dutchie/client.ts` | `src/dutchie-az/services/graphql-client.ts` |
|
||||
| Worker system | `src/tasks/task-worker.ts` | `src/dutchie-az/services/worker.ts` |
|
||||
|
||||
### For Database
|
||||
| Task | Use This | NOT This |
|
||||
|------|----------|----------|
|
||||
| Get DB pool | `src/db/pool.ts` | `src/dutchie-az/db/connection.ts` |
|
||||
| Run migrations | `src/db/migrate.ts` (CLI only) | Never import at runtime |
|
||||
| Query products | `store_products` table | `products`, `dutchie_products` |
|
||||
| Query stores | `dispensaries` table | `stores` table |
|
||||
|
||||
### For Discovery
|
||||
| Task | Use This |
|
||||
|------|----------|
|
||||
| Discover stores | `src/discovery/*.ts` |
|
||||
| Run discovery | `npx tsx src/scripts/run-discovery.ts` |
|
||||
|
||||
---
|
||||
|
||||
## Directory Status
|
||||
|
||||
### ACTIVE DIRECTORIES (Use These)
|
||||
|
||||
```
|
||||
src/
|
||||
├── auth/ # JWT/session auth, middleware
|
||||
├── db/ # Database pool, migrations
|
||||
├── discovery/ # Dutchie store discovery pipeline
|
||||
├── middleware/ # Express middleware
|
||||
├── multi-state/ # Multi-state query support
|
||||
├── platforms/ # Platform-specific clients (Dutchie, Jane, etc)
|
||||
│ └── dutchie/ # THE Dutchie client - use this one
|
||||
├── routes/ # Express API routes
|
||||
├── services/ # Core services (logger, scheduler, etc)
|
||||
├── tasks/ # Task system (workers, handlers, scheduler)
|
||||
│ └── handlers/ # Task handlers (payload_fetch, product_refresh, etc)
|
||||
├── types/ # TypeScript types
|
||||
└── utils/ # Utilities (storage, image processing)
|
||||
```
|
||||
|
||||
### DEPRECATED DIRECTORIES (DO NOT USE)
|
||||
|
||||
```
|
||||
src/
|
||||
├── hydration/ # DEPRECATED - Old pipeline approach
|
||||
├── scraper-v2/ # DEPRECATED - Old scraper engine
|
||||
├── canonical-hydration/# DEPRECATED - Merged into tasks/handlers
|
||||
├── dutchie-az/ # PARTIAL - Some parts deprecated, some active
|
||||
│ ├── db/ # DEPRECATED - Use src/db/pool.ts
|
||||
│ └── services/ # PARTIAL - worker.ts still runs, graphql-client.ts deprecated
|
||||
├── portals/ # FUTURE - Not yet implemented
|
||||
├── seo/ # PARTIAL - Settings work, templates WIP
|
||||
└── system/ # DEPRECATED - Old orchestration system
|
||||
```
|
||||
|
||||
### DEPRECATED FILES (DO NOT USE)
|
||||
|
||||
```
|
||||
src/dutchie-az/db/connection.ts # Use src/db/pool.ts instead
|
||||
src/dutchie-az/services/graphql-client.ts # Use src/platforms/dutchie/client.ts
|
||||
src/hydration/*.ts # Entire directory deprecated
|
||||
src/scraper-v2/*.ts # Entire directory deprecated
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Key Files Reference
|
||||
|
||||
### Entry Points
|
||||
| File | Purpose | Status |
|
||||
|------|---------|--------|
|
||||
| `src/index.ts` | Main Express server | ACTIVE |
|
||||
| `src/dutchie-az/services/worker.ts` | Worker process entry | ACTIVE |
|
||||
| `src/tasks/task-worker.ts` | Task worker (new system) | ACTIVE |
|
||||
|
||||
### Dutchie Integration
|
||||
| File | Purpose | Status |
|
||||
|------|---------|--------|
|
||||
| `src/platforms/dutchie/client.ts` | GraphQL client, hashes, curl | **PRIMARY** |
|
||||
| `src/platforms/dutchie/queries.ts` | High-level query functions | ACTIVE |
|
||||
| `src/platforms/dutchie/index.ts` | Re-exports | ACTIVE |
|
||||
|
||||
### Task Handlers
|
||||
| File | Purpose | Status |
|
||||
|------|---------|--------|
|
||||
| `src/tasks/handlers/payload-fetch.ts` | Fetch products from Dutchie | **PRIMARY** |
|
||||
| `src/tasks/handlers/product-refresh.ts` | Process payload into DB | **PRIMARY** |
|
||||
| `src/tasks/handlers/menu-detection.ts` | Detect menu type | ACTIVE |
|
||||
| `src/tasks/handlers/id-resolution.ts` | Resolve platform IDs | ACTIVE |
|
||||
| `src/tasks/handlers/image-download.ts` | Download product images | ACTIVE |
|
||||
|
||||
### Database
|
||||
| File | Purpose | Status |
|
||||
|------|---------|--------|
|
||||
| `src/db/pool.ts` | Canonical DB pool | **PRIMARY** |
|
||||
| `src/db/migrate.ts` | Migration runner (CLI only) | CLI ONLY |
|
||||
| `src/db/auto-migrate.ts` | Auto-run migrations on startup | ACTIVE |
|
||||
|
||||
### Configuration
|
||||
| File | Purpose | Status |
|
||||
|------|---------|--------|
|
||||
| `.env` | Environment variables | ACTIVE |
|
||||
| `package.json` | Dependencies | ACTIVE |
|
||||
| `tsconfig.json` | TypeScript config | ACTIVE |
|
||||
|
||||
---
|
||||
|
||||
## GraphQL Hashes (CRITICAL)
|
||||
|
||||
The correct hashes are in `src/platforms/dutchie/client.ts`:
|
||||
|
||||
```typescript
|
||||
export const GRAPHQL_HASHES = {
|
||||
FilteredProducts: 'ee29c060826dc41c527e470e9ae502c9b2c169720faa0a9f5d25e1b9a530a4a0',
|
||||
GetAddressBasedDispensaryData: '13461f73abf7268770dfd05fe7e10c523084b2bb916a929c08efe3d87531977b',
|
||||
ConsumerDispensaries: '0a5bfa6ca1d64ae47bcccb7c8077c87147cbc4e6982c17ceec97a2a4948b311b',
|
||||
GetAllCitiesByState: 'ae547a0466ace5a48f91e55bf6699eacd87e3a42841560f0c0eabed5a0a920e6',
|
||||
};
|
||||
```
|
||||
|
||||
**ALWAYS** use `Status: 'Active'` for FilteredProducts (not `null` or `'All'`).
|
||||
|
||||
---
|
||||
|
||||
## Scripts Reference
|
||||
|
||||
### Useful Scripts (in `src/scripts/`)
|
||||
| Script | Purpose |
|
||||
|--------|---------|
|
||||
| `run-discovery.ts` | Run Dutchie discovery |
|
||||
| `crawl-single-store.ts` | Test crawl a single store |
|
||||
| `test-dutchie-graphql.ts` | Test GraphQL queries |
|
||||
|
||||
### One-Off Scripts (probably don't need)
|
||||
| Script | Purpose |
|
||||
|--------|---------|
|
||||
| `harmonize-az-dispensaries.ts` | One-time data cleanup |
|
||||
| `bootstrap-stores-for-dispensaries.ts` | One-time migration |
|
||||
| `backfill-*.ts` | Historical backfill scripts |
|
||||
|
||||
---
|
||||
|
||||
## API Routes
|
||||
|
||||
### Active Routes (in `src/routes/`)
|
||||
| Route File | Mount Point | Purpose |
|
||||
|------------|-------------|---------|
|
||||
| `auth.ts` | `/api/auth` | Login/logout/session |
|
||||
| `stores.ts` | `/api/stores` | Store CRUD |
|
||||
| `dashboard.ts` | `/api/dashboard` | Dashboard stats |
|
||||
| `workers.ts` | `/api/workers` | Worker monitoring |
|
||||
| `pipeline.ts` | `/api/pipeline` | Crawl triggers |
|
||||
| `discovery.ts` | `/api/discovery` | Discovery management |
|
||||
| `analytics.ts` | `/api/analytics` | Analytics queries |
|
||||
| `wordpress.ts` | `/api/v1/wordpress` | WordPress plugin API |
|
||||
|
||||
---
|
||||
|
||||
## Documentation Files
|
||||
|
||||
### Current Docs (in `backend/docs/`)
|
||||
| Doc | Purpose | Currency |
|
||||
|-----|---------|----------|
|
||||
| `TASK_WORKFLOW_2024-12-10.md` | Task system architecture | CURRENT |
|
||||
| `WORKER_TASK_ARCHITECTURE.md` | Worker/task design | CURRENT |
|
||||
| `CRAWL_PIPELINE.md` | Crawl pipeline overview | CURRENT |
|
||||
| `ORGANIC_SCRAPING_GUIDE.md` | Browser-based scraping | CURRENT |
|
||||
| `CODEBASE_MAP.md` | This file | CURRENT |
|
||||
| `ANALYTICS_V2_EXAMPLES.md` | Analytics API examples | CURRENT |
|
||||
| `BRAND_INTELLIGENCE_API.md` | Brand API docs | CURRENT |
|
||||
|
||||
### Root Docs
|
||||
| Doc | Purpose | Currency |
|
||||
|-----|---------|----------|
|
||||
| `CLAUDE.md` | Claude instructions | **PRIMARY** |
|
||||
| `README.md` | Project overview | NEEDS UPDATE |
|
||||
|
||||
---
|
||||
|
||||
## Common Mistakes to Avoid
|
||||
|
||||
1. **Don't use `src/hydration/`** - It's an old approach that was superseded by the task system
|
||||
|
||||
2. **Don't use `src/dutchie-az/db/connection.ts`** - Use `src/db/pool.ts` instead
|
||||
|
||||
3. **Don't import `src/db/migrate.ts` at runtime** - It will crash. Only use for CLI migrations.
|
||||
|
||||
4. **Don't query `stores` table** - It's empty. Use `dispensaries`.
|
||||
|
||||
5. **Don't query `products` table** - It's empty. Use `store_products`.
|
||||
|
||||
6. **Don't use wrong GraphQL hash** - Always get hash from `GRAPHQL_HASHES` in client.ts
|
||||
|
||||
7. **Don't use `Status: null`** - It returns 0 products. Use `Status: 'Active'`.
|
||||
|
||||
---
|
||||
|
||||
## When in Doubt
|
||||
|
||||
1. Check if the file is imported in `src/index.ts` - if not, it may be deprecated
|
||||
2. Check the last modified date - older files may be stale
|
||||
3. Look for `DEPRECATED` comments in the code
|
||||
4. Ask: "Is there a newer version of this in `src/tasks/` or `src/platforms/`?"
|
||||
5. Read the relevant doc in `docs/` before modifying code
|
||||
343
backend/docs/QUERY_API.md
Normal file
343
backend/docs/QUERY_API.md
Normal file
@@ -0,0 +1,343 @@
|
||||
# CannaiQ Query API
|
||||
|
||||
Query raw crawl payload data with flexible filters, sorting, and aggregation.
|
||||
|
||||
## Base URL
|
||||
|
||||
```
|
||||
https://cannaiq.co/api/payloads
|
||||
```
|
||||
|
||||
## Authentication
|
||||
|
||||
Include your API key in the header:
|
||||
```
|
||||
X-API-Key: your-api-key
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Endpoints
|
||||
|
||||
### 1. Query Products
|
||||
|
||||
Filter and search products from a store's latest crawl data.
|
||||
|
||||
```
|
||||
GET /api/payloads/store/{dispensaryId}/query
|
||||
```
|
||||
|
||||
#### Query Parameters
|
||||
|
||||
| Parameter | Type | Description |
|
||||
|-----------|------|-------------|
|
||||
| `brand` | string | Filter by brand name (partial match) |
|
||||
| `category` | string | Filter by category (flower, vape, edible, etc.) |
|
||||
| `subcategory` | string | Filter by subcategory |
|
||||
| `strain_type` | string | Filter by strain (indica, sativa, hybrid, cbd) |
|
||||
| `in_stock` | boolean | Filter by stock status (true/false) |
|
||||
| `price_min` | number | Minimum price |
|
||||
| `price_max` | number | Maximum price |
|
||||
| `thc_min` | number | Minimum THC percentage |
|
||||
| `thc_max` | number | Maximum THC percentage |
|
||||
| `search` | string | Search product name (partial match) |
|
||||
| `fields` | string | Comma-separated fields to return |
|
||||
| `limit` | number | Max results (default 100, max 1000) |
|
||||
| `offset` | number | Skip results for pagination |
|
||||
| `sort` | string | Sort by: name, price, thc, brand |
|
||||
| `order` | string | Sort order: asc, desc |
|
||||
|
||||
#### Available Fields
|
||||
|
||||
When using `fields` parameter, you can request:
|
||||
- `id` - Product ID
|
||||
- `name` - Product name
|
||||
- `brand` - Brand name
|
||||
- `category` - Product category
|
||||
- `subcategory` - Product subcategory
|
||||
- `strain_type` - Indica/Sativa/Hybrid/CBD
|
||||
- `price` - Current price
|
||||
- `price_med` - Medical price
|
||||
- `price_rec` - Recreational price
|
||||
- `thc` - THC percentage
|
||||
- `cbd` - CBD percentage
|
||||
- `weight` - Product weight/size
|
||||
- `status` - Stock status
|
||||
- `in_stock` - Boolean in-stock flag
|
||||
- `image_url` - Product image
|
||||
- `description` - Product description
|
||||
|
||||
#### Examples
|
||||
|
||||
**Get all flower products under $40:**
|
||||
```
|
||||
GET /api/payloads/store/112/query?category=flower&price_max=40
|
||||
```
|
||||
|
||||
**Search for "Blue Dream" with high THC:**
|
||||
```
|
||||
GET /api/payloads/store/112/query?search=blue+dream&thc_min=20
|
||||
```
|
||||
|
||||
**Get only name and price for Alien Labs products:**
|
||||
```
|
||||
GET /api/payloads/store/112/query?brand=Alien+Labs&fields=name,price,thc
|
||||
```
|
||||
|
||||
**Get top 10 highest THC products:**
|
||||
```
|
||||
GET /api/payloads/store/112/query?sort=thc&order=desc&limit=10
|
||||
```
|
||||
|
||||
**Paginate through in-stock products:**
|
||||
```
|
||||
GET /api/payloads/store/112/query?in_stock=true&limit=50&offset=0
|
||||
GET /api/payloads/store/112/query?in_stock=true&limit=50&offset=50
|
||||
```
|
||||
|
||||
#### Response
|
||||
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"dispensaryId": 112,
|
||||
"payloadId": 45,
|
||||
"fetchedAt": "2025-12-11T10:30:00Z",
|
||||
"query": {
|
||||
"filters": {
|
||||
"brand": "Alien Labs",
|
||||
"category": null,
|
||||
"price_max": null
|
||||
},
|
||||
"sort": "price",
|
||||
"order": "asc",
|
||||
"limit": 100,
|
||||
"offset": 0
|
||||
},
|
||||
"pagination": {
|
||||
"total": 15,
|
||||
"returned": 15,
|
||||
"limit": 100,
|
||||
"offset": 0,
|
||||
"has_more": false
|
||||
},
|
||||
"products": [
|
||||
{
|
||||
"id": "507f1f77bcf86cd799439011",
|
||||
"name": "Alien Labs - Baklava 3.5g",
|
||||
"brand": "Alien Labs",
|
||||
"category": "flower",
|
||||
"strain_type": "hybrid",
|
||||
"price": 55,
|
||||
"thc": "28.5",
|
||||
"in_stock": true
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 2. Aggregate Data
|
||||
|
||||
Group products and calculate metrics.
|
||||
|
||||
```
|
||||
GET /api/payloads/store/{dispensaryId}/aggregate
|
||||
```
|
||||
|
||||
#### Query Parameters
|
||||
|
||||
| Parameter | Type | Description |
|
||||
|-----------|------|-------------|
|
||||
| `group_by` | string | **Required.** Field to group by: brand, category, subcategory, strain_type |
|
||||
| `metrics` | string | Comma-separated metrics (default: count) |
|
||||
|
||||
#### Available Metrics
|
||||
|
||||
- `count` - Number of products
|
||||
- `avg_price` - Average price
|
||||
- `min_price` - Lowest price
|
||||
- `max_price` - Highest price
|
||||
- `avg_thc` - Average THC percentage
|
||||
- `in_stock_count` - Number of in-stock products
|
||||
|
||||
#### Examples
|
||||
|
||||
**Count products by brand:**
|
||||
```
|
||||
GET /api/payloads/store/112/aggregate?group_by=brand
|
||||
```
|
||||
|
||||
**Get price stats by category:**
|
||||
```
|
||||
GET /api/payloads/store/112/aggregate?group_by=category&metrics=count,avg_price,min_price,max_price
|
||||
```
|
||||
|
||||
**Get THC averages by strain type:**
|
||||
```
|
||||
GET /api/payloads/store/112/aggregate?group_by=strain_type&metrics=count,avg_thc
|
||||
```
|
||||
|
||||
**Brand analysis with stock info:**
|
||||
```
|
||||
GET /api/payloads/store/112/aggregate?group_by=brand&metrics=count,avg_price,in_stock_count
|
||||
```
|
||||
|
||||
#### Response
|
||||
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"dispensaryId": 112,
|
||||
"payloadId": 45,
|
||||
"fetchedAt": "2025-12-11T10:30:00Z",
|
||||
"groupBy": "brand",
|
||||
"metrics": ["count", "avg_price"],
|
||||
"totalProducts": 450,
|
||||
"groupCount": 85,
|
||||
"aggregations": [
|
||||
{
|
||||
"brand": "Alien Labs",
|
||||
"count": 15,
|
||||
"avg_price": 52.33
|
||||
},
|
||||
{
|
||||
"brand": "Connected",
|
||||
"count": 12,
|
||||
"avg_price": 48.50
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 3. Compare Stores (Price Comparison)
|
||||
|
||||
Query the same data from multiple stores and compare in your app:
|
||||
|
||||
```javascript
|
||||
// Get flower prices from Store A
|
||||
const storeA = await fetch('/api/payloads/store/112/query?category=flower&fields=name,brand,price');
|
||||
|
||||
// Get flower prices from Store B
|
||||
const storeB = await fetch('/api/payloads/store/115/query?category=flower&fields=name,brand,price');
|
||||
|
||||
// Compare in your app
|
||||
const dataA = await storeA.json();
|
||||
const dataB = await storeB.json();
|
||||
|
||||
// Find matching products and compare prices
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 4. Price History
|
||||
|
||||
For historical price data, use the snapshots endpoint:
|
||||
|
||||
```
|
||||
GET /api/v1/products/{productId}/history?days=30
|
||||
```
|
||||
|
||||
Or compare payloads over time:
|
||||
|
||||
```
|
||||
GET /api/payloads/store/{dispensaryId}/diff?from={payloadId1}&to={payloadId2}
|
||||
```
|
||||
|
||||
The diff endpoint shows:
|
||||
- Products added
|
||||
- Products removed
|
||||
- Price changes
|
||||
- Stock changes
|
||||
|
||||
---
|
||||
|
||||
### 5. List Stores
|
||||
|
||||
Get available dispensaries to query:
|
||||
|
||||
```
|
||||
GET /api/stores
|
||||
```
|
||||
|
||||
Returns all stores with their IDs, names, and locations.
|
||||
|
||||
---
|
||||
|
||||
## Use Cases
|
||||
|
||||
### Price Comparison App
|
||||
|
||||
```javascript
|
||||
// 1. Get stores in Arizona
|
||||
const stores = await fetch('/api/stores?state=AZ').then(r => r.json());
|
||||
|
||||
// 2. Query flower prices from each store
|
||||
const prices = await Promise.all(
|
||||
stores.map(store =>
|
||||
fetch(`/api/payloads/store/${store.id}/query?category=flower&fields=name,brand,price`)
|
||||
.then(r => r.json())
|
||||
)
|
||||
);
|
||||
|
||||
// 3. Build comparison matrix in your app
|
||||
```
|
||||
|
||||
### Brand Analytics Dashboard
|
||||
|
||||
```javascript
|
||||
// Get brand presence across stores
|
||||
const brandData = await Promise.all(
|
||||
storeIds.map(id =>
|
||||
fetch(`/api/payloads/store/${id}/aggregate?group_by=brand&metrics=count,avg_price`)
|
||||
.then(r => r.json())
|
||||
)
|
||||
);
|
||||
|
||||
// Aggregate brand presence across all stores
|
||||
```
|
||||
|
||||
### Deal Finder
|
||||
|
||||
```javascript
|
||||
// Find high-THC flower under $30
|
||||
const deals = await fetch(
|
||||
'/api/payloads/store/112/query?category=flower&price_max=30&thc_min=20&in_stock=true&sort=thc&order=desc'
|
||||
).then(r => r.json());
|
||||
```
|
||||
|
||||
### Inventory Tracker
|
||||
|
||||
```javascript
|
||||
// Get products that went out of stock
|
||||
const diff = await fetch('/api/payloads/store/112/diff').then(r => r.json());
|
||||
|
||||
const outOfStock = diff.details.stockChanges.filter(
|
||||
p => p.newStatus !== 'Active'
|
||||
);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Rate Limits
|
||||
|
||||
- Default: 100 requests/minute per API key
|
||||
- Contact support for higher limits
|
||||
|
||||
## Error Responses
|
||||
|
||||
```json
|
||||
{
|
||||
"success": false,
|
||||
"error": "Error message here"
|
||||
}
|
||||
```
|
||||
|
||||
Common errors:
|
||||
- `404` - Store or payload not found
|
||||
- `400` - Missing required parameter
|
||||
- `401` - Invalid or missing API key
|
||||
- `429` - Rate limit exceeded
|
||||
297
backend/docs/_archive/ORGANIC_SCRAPING_GUIDE.md
Normal file
297
backend/docs/_archive/ORGANIC_SCRAPING_GUIDE.md
Normal file
@@ -0,0 +1,297 @@
|
||||
# Organic Browser-Based Scraping Guide
|
||||
|
||||
**Last Updated:** 2025-12-12
|
||||
**Status:** Production-ready proof of concept
|
||||
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
This document describes the "organic" browser-based approach to scraping Dutchie dispensary menus. Unlike direct curl/axios requests, this method uses a real browser session to make API calls, making requests appear natural and reducing detection risk.
|
||||
|
||||
---
|
||||
|
||||
## Why Organic Scraping?
|
||||
|
||||
| Approach | Detection Risk | Speed | Complexity |
|
||||
|----------|---------------|-------|------------|
|
||||
| Direct curl | Higher | Fast | Low |
|
||||
| curl-impersonate | Medium | Fast | Medium |
|
||||
| **Browser-based (organic)** | **Lowest** | Slower | Higher |
|
||||
|
||||
Direct curl requests can be fingerprinted via:
|
||||
- TLS fingerprint (cipher suites, extensions)
|
||||
- Header order and values
|
||||
- Missing cookies/session data
|
||||
- Request patterns
|
||||
|
||||
Browser-based requests inherit:
|
||||
- Real Chrome TLS fingerprint
|
||||
- Session cookies from page visit
|
||||
- Natural header order
|
||||
- JavaScript execution environment
|
||||
|
||||
---
|
||||
|
||||
## Implementation
|
||||
|
||||
### Dependencies
|
||||
|
||||
```bash
|
||||
npm install puppeteer puppeteer-extra puppeteer-extra-plugin-stealth
|
||||
```
|
||||
|
||||
### Core Script: `test-intercept.js`
|
||||
|
||||
Located at: `backend/test-intercept.js`
|
||||
|
||||
```javascript
|
||||
const puppeteer = require('puppeteer-extra');
|
||||
const StealthPlugin = require('puppeteer-extra-plugin-stealth');
|
||||
const fs = require('fs');
|
||||
|
||||
puppeteer.use(StealthPlugin());
|
||||
|
||||
async function capturePayload(config) {
|
||||
const { dispensaryId, platformId, cName, outputPath } = config;
|
||||
|
||||
const browser = await puppeteer.launch({
|
||||
headless: 'new',
|
||||
args: ['--no-sandbox', '--disable-setuid-sandbox']
|
||||
});
|
||||
|
||||
const page = await browser.newPage();
|
||||
|
||||
// STEP 1: Establish session by visiting the menu
|
||||
const embedUrl = `https://dutchie.com/embedded-menu/${cName}?menuType=rec`;
|
||||
await page.goto(embedUrl, { waitUntil: 'networkidle2', timeout: 60000 });
|
||||
|
||||
// STEP 2: Fetch ALL products using GraphQL from browser context
|
||||
const result = await page.evaluate(async (platformId) => {
|
||||
const allProducts = [];
|
||||
let pageNum = 0;
|
||||
const perPage = 100;
|
||||
let totalCount = 0;
|
||||
const sessionId = 'browser-session-' + Date.now();
|
||||
|
||||
while (pageNum < 30) {
|
||||
const variables = {
|
||||
includeEnterpriseSpecials: false,
|
||||
productsFilter: {
|
||||
dispensaryId: platformId,
|
||||
pricingType: 'rec',
|
||||
Status: 'Active', // CRITICAL: Must be 'Active', not null
|
||||
types: [],
|
||||
useCache: true,
|
||||
isDefaultSort: true,
|
||||
sortBy: 'popularSortIdx',
|
||||
sortDirection: 1,
|
||||
bypassOnlineThresholds: true,
|
||||
isKioskMenu: false,
|
||||
removeProductsBelowOptionThresholds: false,
|
||||
},
|
||||
page: pageNum,
|
||||
perPage: perPage,
|
||||
};
|
||||
|
||||
const extensions = {
|
||||
persistedQuery: {
|
||||
version: 1,
|
||||
sha256Hash: 'ee29c060826dc41c527e470e9ae502c9b2c169720faa0a9f5d25e1b9a530a4a0'
|
||||
}
|
||||
};
|
||||
|
||||
const qs = new URLSearchParams({
|
||||
operationName: 'FilteredProducts',
|
||||
variables: JSON.stringify(variables),
|
||||
extensions: JSON.stringify(extensions)
|
||||
});
|
||||
|
||||
const response = await fetch(`https://dutchie.com/api-3/graphql?${qs}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'content-type': 'application/json',
|
||||
'x-dutchie-session': sessionId,
|
||||
'apollographql-client-name': 'Marketplace (production)',
|
||||
},
|
||||
credentials: 'include'
|
||||
});
|
||||
|
||||
const json = await response.json();
|
||||
const data = json?.data?.filteredProducts;
|
||||
if (!data?.products) break;
|
||||
|
||||
allProducts.push(...data.products);
|
||||
if (pageNum === 0) totalCount = data.queryInfo?.totalCount || 0;
|
||||
if (allProducts.length >= totalCount) break;
|
||||
|
||||
pageNum++;
|
||||
await new Promise(r => setTimeout(r, 200)); // Polite delay
|
||||
}
|
||||
|
||||
return { products: allProducts, totalCount };
|
||||
}, platformId);
|
||||
|
||||
await browser.close();
|
||||
|
||||
// STEP 3: Save payload
|
||||
const payload = {
|
||||
dispensaryId,
|
||||
platformId,
|
||||
cName,
|
||||
fetchedAt: new Date().toISOString(),
|
||||
productCount: result.products.length,
|
||||
products: result.products,
|
||||
};
|
||||
|
||||
fs.writeFileSync(outputPath, JSON.stringify(payload, null, 2));
|
||||
return payload;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Critical Parameters
|
||||
|
||||
### GraphQL Hash (FilteredProducts)
|
||||
|
||||
```
|
||||
ee29c060826dc41c527e470e9ae502c9b2c169720faa0a9f5d25e1b9a530a4a0
|
||||
```
|
||||
|
||||
**WARNING:** Using the wrong hash returns HTTP 400.
|
||||
|
||||
### Status Parameter
|
||||
|
||||
| Value | Result |
|
||||
|-------|--------|
|
||||
| `'Active'` | Returns in-stock products (1019 in test) |
|
||||
| `null` | Returns 0 products |
|
||||
| `'All'` | Returns HTTP 400 |
|
||||
|
||||
**ALWAYS use `Status: 'Active'`**
|
||||
|
||||
### Required Headers
|
||||
|
||||
```javascript
|
||||
{
|
||||
'Accept': 'application/json',
|
||||
'content-type': 'application/json',
|
||||
'x-dutchie-session': 'unique-session-id',
|
||||
'apollographql-client-name': 'Marketplace (production)',
|
||||
}
|
||||
```
|
||||
|
||||
### Endpoint
|
||||
|
||||
```
|
||||
https://dutchie.com/api-3/graphql
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Performance Benchmarks
|
||||
|
||||
Test store: AZ-Deeply-Rooted (1019 products)
|
||||
|
||||
| Metric | Value |
|
||||
|--------|-------|
|
||||
| Total products | 1019 |
|
||||
| Time | 18.5 seconds |
|
||||
| Payload size | 11.8 MB |
|
||||
| Pages fetched | 11 (100 per page) |
|
||||
| Success rate | 100% |
|
||||
|
||||
---
|
||||
|
||||
## Payload Format
|
||||
|
||||
The output matches the existing `payload-fetch.ts` handler format:
|
||||
|
||||
```json
|
||||
{
|
||||
"dispensaryId": 123,
|
||||
"platformId": "6405ef617056e8014d79101b",
|
||||
"cName": "AZ-Deeply-Rooted",
|
||||
"fetchedAt": "2025-12-12T05:05:19.837Z",
|
||||
"productCount": 1019,
|
||||
"products": [
|
||||
{
|
||||
"id": "6927508db4851262f629a869",
|
||||
"Name": "Product Name",
|
||||
"brand": { "name": "Brand Name", ... },
|
||||
"type": "Flower",
|
||||
"THC": "25%",
|
||||
"Prices": [...],
|
||||
"Options": [...],
|
||||
...
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Integration Points
|
||||
|
||||
### As a Task Handler
|
||||
|
||||
The organic approach can be integrated as an alternative to curl-based fetching:
|
||||
|
||||
```typescript
|
||||
// In src/tasks/handlers/organic-payload-fetch.ts
|
||||
export async function handleOrganicPayloadFetch(ctx: TaskContext): Promise<TaskResult> {
|
||||
// Use puppeteer-based capture
|
||||
// Save to same payload storage
|
||||
// Queue product_refresh task
|
||||
}
|
||||
```
|
||||
|
||||
### Worker Configuration
|
||||
|
||||
Add to job_schedules:
|
||||
```sql
|
||||
INSERT INTO job_schedules (name, role, cron_expression)
|
||||
VALUES ('organic_product_crawl', 'organic_payload_fetch', '0 */6 * * *');
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### HTTP 400 Bad Request
|
||||
- Check hash is correct: `ee29c060...`
|
||||
- Verify Status is `'Active'` (string, not null)
|
||||
|
||||
### 0 Products Returned
|
||||
- Status was likely `null` or `'All'` - use `'Active'`
|
||||
- Check platformId is valid MongoDB ObjectId
|
||||
|
||||
### Session Not Established
|
||||
- Increase timeout on initial page.goto()
|
||||
- Check cName is valid (matches embedded-menu URL)
|
||||
|
||||
### Detection/Blocking
|
||||
- StealthPlugin should handle most cases
|
||||
- Add random delays between pages
|
||||
- Use headless: 'new' (not true/false)
|
||||
|
||||
---
|
||||
|
||||
## Files Reference
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `backend/test-intercept.js` | Proof of concept script |
|
||||
| `backend/src/platforms/dutchie/client.ts` | GraphQL hashes, curl implementation |
|
||||
| `backend/src/tasks/handlers/payload-fetch.ts` | Current curl-based handler |
|
||||
| `backend/src/utils/payload-storage.ts` | Payload save/load utilities |
|
||||
|
||||
---
|
||||
|
||||
## See Also
|
||||
|
||||
- `DUTCHIE_CRAWL_WORKFLOW.md` - Full crawl pipeline documentation
|
||||
- `TASK_WORKFLOW_2024-12-10.md` - Task system architecture
|
||||
- `CLAUDE.md` - Project rules and constraints
|
||||
25
backend/docs/_archive/README.md
Normal file
25
backend/docs/_archive/README.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# ARCHIVED DOCUMENTATION
|
||||
|
||||
**WARNING: These docs may be outdated or inaccurate.**
|
||||
|
||||
The code has evolved significantly. These docs are kept for historical reference only.
|
||||
|
||||
## What to Use Instead
|
||||
|
||||
**The single source of truth is:**
|
||||
- `CLAUDE.md` (root) - Essential rules and quick reference
|
||||
- `docs/CODEBASE_MAP.md` - Current file/directory reference
|
||||
|
||||
## Why Archive?
|
||||
|
||||
These docs were written during development iterations and may reference:
|
||||
- Old file paths that no longer exist
|
||||
- Deprecated approaches (hydration, scraper-v2)
|
||||
- APIs that have changed
|
||||
- Database schemas that evolved
|
||||
|
||||
## If You Need Details
|
||||
|
||||
1. First check CODEBASE_MAP.md for current file locations
|
||||
2. Then read the actual source code
|
||||
3. Only use archive docs as a last resort for historical context
|
||||
253
backend/migrations/084_dual_transport_preflight.sql
Normal file
253
backend/migrations/084_dual_transport_preflight.sql
Normal file
@@ -0,0 +1,253 @@
|
||||
-- Migration 084: Dual Transport Preflight System
|
||||
-- Workers run both curl and http (Puppeteer) preflights on startup
|
||||
-- Tasks can require a specific transport method
|
||||
|
||||
-- ===================================================================
|
||||
-- PART 1: Add preflight columns to worker_registry
|
||||
-- ===================================================================
|
||||
|
||||
-- Preflight status for curl/axios transport (proxy-based)
|
||||
ALTER TABLE worker_registry
|
||||
ADD COLUMN IF NOT EXISTS preflight_curl_status VARCHAR(20) DEFAULT 'pending';
|
||||
|
||||
-- Preflight status for http/Puppeteer transport (browser-based)
|
||||
ALTER TABLE worker_registry
|
||||
ADD COLUMN IF NOT EXISTS preflight_http_status VARCHAR(20) DEFAULT 'pending';
|
||||
|
||||
-- Timestamps for when each preflight completed
|
||||
ALTER TABLE worker_registry
|
||||
ADD COLUMN IF NOT EXISTS preflight_curl_at TIMESTAMPTZ;
|
||||
|
||||
ALTER TABLE worker_registry
|
||||
ADD COLUMN IF NOT EXISTS preflight_http_at TIMESTAMPTZ;
|
||||
|
||||
-- Error messages for failed preflights
|
||||
ALTER TABLE worker_registry
|
||||
ADD COLUMN IF NOT EXISTS preflight_curl_error TEXT;
|
||||
|
||||
ALTER TABLE worker_registry
|
||||
ADD COLUMN IF NOT EXISTS preflight_http_error TEXT;
|
||||
|
||||
-- Response time for successful preflights (ms)
|
||||
ALTER TABLE worker_registry
|
||||
ADD COLUMN IF NOT EXISTS preflight_curl_ms INTEGER;
|
||||
|
||||
ALTER TABLE worker_registry
|
||||
ADD COLUMN IF NOT EXISTS preflight_http_ms INTEGER;
|
||||
|
||||
-- Constraints for preflight status values
|
||||
ALTER TABLE worker_registry
|
||||
DROP CONSTRAINT IF EXISTS valid_preflight_curl_status;
|
||||
|
||||
ALTER TABLE worker_registry
|
||||
ADD CONSTRAINT valid_preflight_curl_status
|
||||
CHECK (preflight_curl_status IN ('pending', 'passed', 'failed', 'skipped'));
|
||||
|
||||
ALTER TABLE worker_registry
|
||||
DROP CONSTRAINT IF EXISTS valid_preflight_http_status;
|
||||
|
||||
ALTER TABLE worker_registry
|
||||
ADD CONSTRAINT valid_preflight_http_status
|
||||
CHECK (preflight_http_status IN ('pending', 'passed', 'failed', 'skipped'));
|
||||
|
||||
-- ===================================================================
|
||||
-- PART 2: Add method column to worker_tasks
|
||||
-- ===================================================================
|
||||
|
||||
-- Transport method requirement for the task
|
||||
-- NULL = no preference (any worker can claim)
|
||||
-- 'curl' = requires curl/axios transport (proxy-based, fast)
|
||||
-- 'http' = requires http/Puppeteer transport (browser-based, anti-detect)
|
||||
ALTER TABLE worker_tasks
|
||||
ADD COLUMN IF NOT EXISTS method VARCHAR(10);
|
||||
|
||||
-- Constraint for valid method values
|
||||
ALTER TABLE worker_tasks
|
||||
DROP CONSTRAINT IF EXISTS valid_task_method;
|
||||
|
||||
ALTER TABLE worker_tasks
|
||||
ADD CONSTRAINT valid_task_method
|
||||
CHECK (method IS NULL OR method IN ('curl', 'http'));
|
||||
|
||||
-- Index for method-based task claiming
|
||||
CREATE INDEX IF NOT EXISTS idx_worker_tasks_method
|
||||
ON worker_tasks(method)
|
||||
WHERE status = 'pending';
|
||||
|
||||
-- Set default method for all existing pending tasks to 'http'
|
||||
-- ALL current tasks require Puppeteer/browser-based transport
|
||||
UPDATE worker_tasks
|
||||
SET method = 'http'
|
||||
WHERE method IS NULL;
|
||||
|
||||
-- ===================================================================
|
||||
-- PART 3: Update claim_task function for method compatibility
|
||||
-- ===================================================================
|
||||
|
||||
CREATE OR REPLACE FUNCTION claim_task(
|
||||
p_role VARCHAR(50),
|
||||
p_worker_id VARCHAR(100),
|
||||
p_curl_passed BOOLEAN DEFAULT TRUE,
|
||||
p_http_passed BOOLEAN DEFAULT FALSE
|
||||
) RETURNS worker_tasks AS $$
|
||||
DECLARE
|
||||
claimed_task worker_tasks;
|
||||
BEGIN
|
||||
UPDATE worker_tasks
|
||||
SET
|
||||
status = 'claimed',
|
||||
worker_id = p_worker_id,
|
||||
claimed_at = NOW(),
|
||||
updated_at = NOW()
|
||||
WHERE id = (
|
||||
SELECT id FROM worker_tasks
|
||||
WHERE role = p_role
|
||||
AND status = 'pending'
|
||||
AND (scheduled_for IS NULL OR scheduled_for <= NOW())
|
||||
-- Method compatibility: worker must have passed the required preflight
|
||||
AND (
|
||||
method IS NULL -- No preference, any worker can claim
|
||||
OR (method = 'curl' AND p_curl_passed = TRUE)
|
||||
OR (method = 'http' AND p_http_passed = TRUE)
|
||||
)
|
||||
-- Exclude stores that already have an active task
|
||||
AND (dispensary_id IS NULL OR dispensary_id NOT IN (
|
||||
SELECT dispensary_id FROM worker_tasks
|
||||
WHERE status IN ('claimed', 'running')
|
||||
AND dispensary_id IS NOT NULL
|
||||
))
|
||||
ORDER BY priority DESC, created_at ASC
|
||||
LIMIT 1
|
||||
FOR UPDATE SKIP LOCKED
|
||||
)
|
||||
RETURNING * INTO claimed_task;
|
||||
|
||||
RETURN claimed_task;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- ===================================================================
|
||||
-- PART 4: Update v_active_workers view
|
||||
-- ===================================================================
|
||||
|
||||
DROP VIEW IF EXISTS v_active_workers;
|
||||
|
||||
CREATE VIEW v_active_workers AS
|
||||
SELECT
|
||||
wr.id,
|
||||
wr.worker_id,
|
||||
wr.friendly_name,
|
||||
wr.role,
|
||||
wr.status,
|
||||
wr.pod_name,
|
||||
wr.hostname,
|
||||
wr.started_at,
|
||||
wr.last_heartbeat_at,
|
||||
wr.last_task_at,
|
||||
wr.tasks_completed,
|
||||
wr.tasks_failed,
|
||||
wr.current_task_id,
|
||||
-- Preflight status
|
||||
wr.preflight_curl_status,
|
||||
wr.preflight_http_status,
|
||||
wr.preflight_curl_at,
|
||||
wr.preflight_http_at,
|
||||
wr.preflight_curl_error,
|
||||
wr.preflight_http_error,
|
||||
wr.preflight_curl_ms,
|
||||
wr.preflight_http_ms,
|
||||
-- Computed fields
|
||||
EXTRACT(EPOCH FROM (NOW() - wr.last_heartbeat_at)) as seconds_since_heartbeat,
|
||||
CASE
|
||||
WHEN wr.status = 'offline' THEN 'offline'
|
||||
WHEN wr.last_heartbeat_at < NOW() - INTERVAL '2 minutes' THEN 'stale'
|
||||
WHEN wr.current_task_id IS NOT NULL THEN 'busy'
|
||||
ELSE 'ready'
|
||||
END as health_status,
|
||||
-- Capability flags (can this worker handle curl/http tasks?)
|
||||
(wr.preflight_curl_status = 'passed') as can_curl,
|
||||
(wr.preflight_http_status = 'passed') as can_http
|
||||
FROM worker_registry wr
|
||||
WHERE wr.status != 'terminated'
|
||||
ORDER BY wr.status = 'active' DESC, wr.last_heartbeat_at DESC;
|
||||
|
||||
-- ===================================================================
|
||||
-- PART 5: View for task queue with method info
|
||||
-- ===================================================================
|
||||
|
||||
DROP VIEW IF EXISTS v_task_history;
|
||||
|
||||
CREATE VIEW v_task_history AS
|
||||
SELECT
|
||||
t.id,
|
||||
t.role,
|
||||
t.dispensary_id,
|
||||
d.name as dispensary_name,
|
||||
t.platform,
|
||||
t.status,
|
||||
t.priority,
|
||||
t.method,
|
||||
t.worker_id,
|
||||
t.scheduled_for,
|
||||
t.claimed_at,
|
||||
t.started_at,
|
||||
t.completed_at,
|
||||
t.error_message,
|
||||
t.retry_count,
|
||||
t.created_at,
|
||||
EXTRACT(EPOCH FROM (t.completed_at - t.started_at)) as duration_sec
|
||||
FROM worker_tasks t
|
||||
LEFT JOIN dispensaries d ON d.id = t.dispensary_id
|
||||
ORDER BY t.created_at DESC;
|
||||
|
||||
-- ===================================================================
|
||||
-- PART 6: Helper function to update worker preflight status
|
||||
-- ===================================================================
|
||||
|
||||
CREATE OR REPLACE FUNCTION update_worker_preflight(
|
||||
p_worker_id VARCHAR(100),
|
||||
p_transport VARCHAR(10), -- 'curl' or 'http'
|
||||
p_status VARCHAR(20), -- 'passed', 'failed', 'skipped'
|
||||
p_response_ms INTEGER DEFAULT NULL,
|
||||
p_error TEXT DEFAULT NULL
|
||||
) RETURNS VOID AS $$
|
||||
BEGIN
|
||||
IF p_transport = 'curl' THEN
|
||||
UPDATE worker_registry
|
||||
SET
|
||||
preflight_curl_status = p_status,
|
||||
preflight_curl_at = NOW(),
|
||||
preflight_curl_ms = p_response_ms,
|
||||
preflight_curl_error = p_error,
|
||||
updated_at = NOW()
|
||||
WHERE worker_id = p_worker_id;
|
||||
ELSIF p_transport = 'http' THEN
|
||||
UPDATE worker_registry
|
||||
SET
|
||||
preflight_http_status = p_status,
|
||||
preflight_http_at = NOW(),
|
||||
preflight_http_ms = p_response_ms,
|
||||
preflight_http_error = p_error,
|
||||
updated_at = NOW()
|
||||
WHERE worker_id = p_worker_id;
|
||||
END IF;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- ===================================================================
|
||||
-- Comments
|
||||
-- ===================================================================
|
||||
|
||||
COMMENT ON COLUMN worker_registry.preflight_curl_status IS 'Status of curl/axios preflight: pending, passed, failed, skipped';
|
||||
COMMENT ON COLUMN worker_registry.preflight_http_status IS 'Status of http/Puppeteer preflight: pending, passed, failed, skipped';
|
||||
COMMENT ON COLUMN worker_registry.preflight_curl_at IS 'When curl preflight completed';
|
||||
COMMENT ON COLUMN worker_registry.preflight_http_at IS 'When http preflight completed';
|
||||
COMMENT ON COLUMN worker_registry.preflight_curl_error IS 'Error message if curl preflight failed';
|
||||
COMMENT ON COLUMN worker_registry.preflight_http_error IS 'Error message if http preflight failed';
|
||||
COMMENT ON COLUMN worker_registry.preflight_curl_ms IS 'Response time of successful curl preflight (ms)';
|
||||
COMMENT ON COLUMN worker_registry.preflight_http_ms IS 'Response time of successful http preflight (ms)';
|
||||
|
||||
COMMENT ON COLUMN worker_tasks.method IS 'Transport method required: NULL=any, curl=proxy-based, http=browser-based';
|
||||
|
||||
COMMENT ON FUNCTION claim_task IS 'Atomically claim a task, respecting method requirements and per-store locking';
|
||||
COMMENT ON FUNCTION update_worker_preflight IS 'Update a workers preflight status for a given transport';
|
||||
59
backend/migrations/085_trusted_origins.sql
Normal file
59
backend/migrations/085_trusted_origins.sql
Normal file
@@ -0,0 +1,59 @@
|
||||
-- Migration 085: Trusted Origins Management
|
||||
-- Allows admin to manage trusted IPs and domains via UI instead of hardcoded values
|
||||
|
||||
-- Trusted origins table (IPs and domains that bypass API key auth)
|
||||
CREATE TABLE IF NOT EXISTS trusted_origins (
|
||||
id SERIAL PRIMARY KEY,
|
||||
|
||||
-- Origin type: 'ip', 'domain', 'pattern'
|
||||
origin_type VARCHAR(20) NOT NULL CHECK (origin_type IN ('ip', 'domain', 'pattern')),
|
||||
|
||||
-- The actual value
|
||||
-- For ip: '127.0.0.1', '::1', '192.168.1.0/24'
|
||||
-- For domain: 'cannaiq.co', 'findadispo.com'
|
||||
-- For pattern: '^https://.*\.cannabrands\.app$' (regex)
|
||||
origin_value VARCHAR(255) NOT NULL,
|
||||
|
||||
-- Description for admin reference
|
||||
description TEXT,
|
||||
|
||||
-- Active flag
|
||||
active BOOLEAN DEFAULT true,
|
||||
|
||||
-- Audit
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
created_by INTEGER REFERENCES users(id),
|
||||
updated_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
|
||||
UNIQUE(origin_type, origin_value)
|
||||
);
|
||||
|
||||
-- Index for quick lookups
|
||||
CREATE INDEX IF NOT EXISTS idx_trusted_origins_active ON trusted_origins(active) WHERE active = true;
|
||||
CREATE INDEX IF NOT EXISTS idx_trusted_origins_type ON trusted_origins(origin_type, active);
|
||||
|
||||
-- Seed with current hardcoded values
|
||||
INSERT INTO trusted_origins (origin_type, origin_value, description) VALUES
|
||||
-- Trusted IPs (localhost)
|
||||
('ip', '127.0.0.1', 'Localhost IPv4'),
|
||||
('ip', '::1', 'Localhost IPv6'),
|
||||
('ip', '::ffff:127.0.0.1', 'Localhost IPv4-mapped IPv6'),
|
||||
|
||||
-- Trusted domains
|
||||
('domain', 'cannaiq.co', 'CannaiQ production'),
|
||||
('domain', 'www.cannaiq.co', 'CannaiQ production (www)'),
|
||||
('domain', 'findadispo.com', 'FindADispo production'),
|
||||
('domain', 'www.findadispo.com', 'FindADispo production (www)'),
|
||||
('domain', 'findagram.co', 'Findagram production'),
|
||||
('domain', 'www.findagram.co', 'Findagram production (www)'),
|
||||
('domain', 'localhost:3010', 'Local backend dev'),
|
||||
('domain', 'localhost:8080', 'Local admin dev'),
|
||||
('domain', 'localhost:5173', 'Local Vite dev'),
|
||||
|
||||
-- Pattern-based (regex)
|
||||
('pattern', '^https://.*\.cannabrands\.app$', 'All cannabrands.app subdomains'),
|
||||
('pattern', '^https://.*\.cannaiq\.co$', 'All cannaiq.co subdomains')
|
||||
ON CONFLICT (origin_type, origin_value) DO NOTHING;
|
||||
|
||||
-- Add comment
|
||||
COMMENT ON TABLE trusted_origins IS 'IPs and domains that bypass API key authentication. Managed via /admin.';
|
||||
239
backend/package-lock.json
generated
239
backend/package-lock.json
generated
@@ -35,6 +35,8 @@
|
||||
"puppeteer-extra-plugin-stealth": "^2.11.2",
|
||||
"sharp": "^0.32.0",
|
||||
"socks-proxy-agent": "^8.0.2",
|
||||
"swagger-jsdoc": "^6.2.8",
|
||||
"swagger-ui-express": "^5.0.1",
|
||||
"user-agents": "^1.1.669",
|
||||
"uuid": "^9.0.1",
|
||||
"zod": "^3.22.4"
|
||||
@@ -47,11 +49,53 @@
|
||||
"@types/node": "^20.10.5",
|
||||
"@types/node-cron": "^3.0.11",
|
||||
"@types/pg": "^8.15.6",
|
||||
"@types/swagger-jsdoc": "^6.0.4",
|
||||
"@types/swagger-ui-express": "^4.1.8",
|
||||
"@types/uuid": "^9.0.7",
|
||||
"tsx": "^4.7.0",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@apidevtools/json-schema-ref-parser": {
|
||||
"version": "9.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.1.2.tgz",
|
||||
"integrity": "sha512-r1w81DpR+KyRWd3f+rk6TNqMgedmAxZP5v5KWlXQWlgMUUtyEJch0DKEci1SorPMiSeM8XPl7MZ3miJ60JIpQg==",
|
||||
"dependencies": {
|
||||
"@jsdevtools/ono": "^7.1.3",
|
||||
"@types/json-schema": "^7.0.6",
|
||||
"call-me-maybe": "^1.0.1",
|
||||
"js-yaml": "^4.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@apidevtools/openapi-schemas": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@apidevtools/openapi-schemas/-/openapi-schemas-2.1.0.tgz",
|
||||
"integrity": "sha512-Zc1AlqrJlX3SlpupFGpiLi2EbteyP7fXmUOGup6/DnkRgjP9bgMM/ag+n91rsv0U1Gpz0H3VILA/o3bW7Ua6BQ==",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@apidevtools/swagger-methods": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@apidevtools/swagger-methods/-/swagger-methods-3.0.2.tgz",
|
||||
"integrity": "sha512-QAkD5kK2b1WfjDS/UQn/qQkbwF31uqRjPTrsCs5ZG9BQGAkjwvqGFjjPqAuzac/IYzpPtRzjCP1WrTuAIjMrXg=="
|
||||
},
|
||||
"node_modules/@apidevtools/swagger-parser": {
|
||||
"version": "10.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@apidevtools/swagger-parser/-/swagger-parser-10.0.3.tgz",
|
||||
"integrity": "sha512-sNiLY51vZOmSPFZA5TF35KZ2HbgYklQnTSDnkghamzLb3EkNtcQnrBQEj5AOCxHpTtXpqMCRM1CrmV2rG6nw4g==",
|
||||
"dependencies": {
|
||||
"@apidevtools/json-schema-ref-parser": "^9.0.6",
|
||||
"@apidevtools/openapi-schemas": "^2.0.4",
|
||||
"@apidevtools/swagger-methods": "^3.0.2",
|
||||
"@jsdevtools/ono": "^7.1.3",
|
||||
"call-me-maybe": "^1.0.1",
|
||||
"z-schema": "^5.0.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"openapi-types": ">=7"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/code-frame": {
|
||||
"version": "7.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
|
||||
@@ -494,6 +538,11 @@
|
||||
"resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.4.0.tgz",
|
||||
"integrity": "sha512-aFT2yemJJo+TZCmieA7qnYGQooOS7QfNmYrzGtsYd3g9j5iDP8AimYYAesf79ohjbLG12XxC4nG5DyEnC88AsQ=="
|
||||
},
|
||||
"node_modules/@jsdevtools/ono": {
|
||||
"version": "7.1.3",
|
||||
"resolved": "https://registry.npmjs.org/@jsdevtools/ono/-/ono-7.1.3.tgz",
|
||||
"integrity": "sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg=="
|
||||
},
|
||||
"node_modules/@jsep-plugin/assignment": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@jsep-plugin/assignment/-/assignment-1.3.0.tgz",
|
||||
@@ -761,6 +810,12 @@
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"node_modules/@scarf/scarf": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@scarf/scarf/-/scarf-1.4.0.tgz",
|
||||
"integrity": "sha512-xxeapPiUXdZAE3che6f3xogoJPeZgig6omHEy1rIY5WVsB3H2BHNnZH+gHG6x91SCWyQCzWGsuL2Hh3ClO5/qQ==",
|
||||
"hasInstallScript": true
|
||||
},
|
||||
"node_modules/@tootallnate/quickjs-emscripten": {
|
||||
"version": "0.23.0",
|
||||
"resolved": "https://registry.npmjs.org/@tootallnate/quickjs-emscripten/-/quickjs-emscripten-0.23.0.tgz",
|
||||
@@ -855,6 +910,11 @@
|
||||
"resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.9.tgz",
|
||||
"integrity": "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg=="
|
||||
},
|
||||
"node_modules/@types/json-schema": {
|
||||
"version": "7.0.15",
|
||||
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz",
|
||||
"integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="
|
||||
},
|
||||
"node_modules/@types/jsonwebtoken": {
|
||||
"version": "9.0.10",
|
||||
"resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.10.tgz",
|
||||
@@ -960,6 +1020,22 @@
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/swagger-jsdoc": {
|
||||
"version": "6.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/swagger-jsdoc/-/swagger-jsdoc-6.0.4.tgz",
|
||||
"integrity": "sha512-W+Xw5epcOZrF/AooUM/PccNMSAFOKWZA5dasNyMujTwsBkU74njSJBpvCCJhHAJ95XRMzQrrW844Btu0uoetwQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/swagger-ui-express": {
|
||||
"version": "4.1.8",
|
||||
"resolved": "https://registry.npmjs.org/@types/swagger-ui-express/-/swagger-ui-express-4.1.8.tgz",
|
||||
"integrity": "sha512-AhZV8/EIreHFmBV5wAs0gzJUNq9JbbSXgJLQubCC0jtIo6prnI9MIRRxnU4MZX9RB9yXxF1V4R7jtLl/Wcj31g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/express": "*",
|
||||
"@types/serve-static": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/uuid": {
|
||||
"version": "9.0.8",
|
||||
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz",
|
||||
@@ -1434,6 +1510,11 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/call-me-maybe": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/call-me-maybe/-/call-me-maybe-1.0.2.tgz",
|
||||
"integrity": "sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ=="
|
||||
},
|
||||
"node_modules/callsites": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
|
||||
@@ -1594,6 +1675,14 @@
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/commander": {
|
||||
"version": "6.2.0",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-6.2.0.tgz",
|
||||
"integrity": "sha512-zP4jEKbe8SHzKJYQmq8Y9gYjtO/POJLgIdKgV7B9qNmABVFVc+ctqSX6iXh4mCpJfRBOabiZ2YKPg8ciDw6C+Q==",
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/concat-map": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
@@ -1863,6 +1952,17 @@
|
||||
"resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1232444.tgz",
|
||||
"integrity": "sha512-pM27vqEfxSxRkTMnF+XCmxSEb6duO5R+t8A9DEEJgy4Wz2RVanje2mmj99B6A3zv2r/qGfYlOvYznUhuokizmg=="
|
||||
},
|
||||
"node_modules/doctrine": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
|
||||
"integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==",
|
||||
"dependencies": {
|
||||
"esutils": "^2.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/dom-serializer": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz",
|
||||
@@ -3258,6 +3358,12 @@
|
||||
"resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
|
||||
"integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ=="
|
||||
},
|
||||
"node_modules/lodash.get": {
|
||||
"version": "4.4.2",
|
||||
"resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz",
|
||||
"integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==",
|
||||
"deprecated": "This package is deprecated. Use the optional chaining (?.) operator instead."
|
||||
},
|
||||
"node_modules/lodash.includes": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz",
|
||||
@@ -3273,6 +3379,12 @@
|
||||
"resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz",
|
||||
"integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg=="
|
||||
},
|
||||
"node_modules/lodash.isequal": {
|
||||
"version": "4.5.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz",
|
||||
"integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==",
|
||||
"deprecated": "This package is deprecated. Use require('node:util').isDeepStrictEqual instead."
|
||||
},
|
||||
"node_modules/lodash.isinteger": {
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz",
|
||||
@@ -3293,6 +3405,11 @@
|
||||
"resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz",
|
||||
"integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw=="
|
||||
},
|
||||
"node_modules/lodash.mergewith": {
|
||||
"version": "4.6.2",
|
||||
"resolved": "https://registry.npmjs.org/lodash.mergewith/-/lodash.mergewith-4.6.2.tgz",
|
||||
"integrity": "sha512-GK3g5RPZWTRSeLSpgP8Xhra+pnjBC56q9FZYe1d5RN3TJ35dbkGy3YqBSMbyCrlbi+CM9Z3Jk5yTL7RCsqboyQ=="
|
||||
},
|
||||
"node_modules/lodash.once": {
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz",
|
||||
@@ -3748,6 +3865,12 @@
|
||||
"wrappy": "1"
|
||||
}
|
||||
},
|
||||
"node_modules/openapi-types": {
|
||||
"version": "12.1.3",
|
||||
"resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.1.3.tgz",
|
||||
"integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/openid-client": {
|
||||
"version": "6.8.1",
|
||||
"resolved": "https://registry.npmjs.org/openid-client/-/openid-client-6.8.1.tgz",
|
||||
@@ -5188,6 +5311,78 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"node_modules/swagger-jsdoc": {
|
||||
"version": "6.2.8",
|
||||
"resolved": "https://registry.npmjs.org/swagger-jsdoc/-/swagger-jsdoc-6.2.8.tgz",
|
||||
"integrity": "sha512-VPvil1+JRpmJ55CgAtn8DIcpBs0bL5L3q5bVQvF4tAW/k/9JYSj7dCpaYCAv5rufe0vcCbBRQXGvzpkWjvLklQ==",
|
||||
"dependencies": {
|
||||
"commander": "6.2.0",
|
||||
"doctrine": "3.0.0",
|
||||
"glob": "7.1.6",
|
||||
"lodash.mergewith": "^4.6.2",
|
||||
"swagger-parser": "^10.0.3",
|
||||
"yaml": "2.0.0-1"
|
||||
},
|
||||
"bin": {
|
||||
"swagger-jsdoc": "bin/swagger-jsdoc.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/swagger-jsdoc/node_modules/glob": {
|
||||
"version": "7.1.6",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
|
||||
"integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
|
||||
"deprecated": "Glob versions prior to v9 are no longer supported",
|
||||
"dependencies": {
|
||||
"fs.realpath": "^1.0.0",
|
||||
"inflight": "^1.0.4",
|
||||
"inherits": "2",
|
||||
"minimatch": "^3.0.4",
|
||||
"once": "^1.3.0",
|
||||
"path-is-absolute": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/swagger-parser": {
|
||||
"version": "10.0.3",
|
||||
"resolved": "https://registry.npmjs.org/swagger-parser/-/swagger-parser-10.0.3.tgz",
|
||||
"integrity": "sha512-nF7oMeL4KypldrQhac8RyHerJeGPD1p2xDh900GPvc+Nk7nWP6jX2FcC7WmkinMoAmoO774+AFXcWsW8gMWEIg==",
|
||||
"dependencies": {
|
||||
"@apidevtools/swagger-parser": "10.0.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/swagger-ui-dist": {
|
||||
"version": "5.31.0",
|
||||
"resolved": "https://registry.npmjs.org/swagger-ui-dist/-/swagger-ui-dist-5.31.0.tgz",
|
||||
"integrity": "sha512-zSUTIck02fSga6rc0RZP3b7J7wgHXwLea8ZjgLA3Vgnb8QeOl3Wou2/j5QkzSGeoz6HusP/coYuJl33aQxQZpg==",
|
||||
"dependencies": {
|
||||
"@scarf/scarf": "=1.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/swagger-ui-express": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/swagger-ui-express/-/swagger-ui-express-5.0.1.tgz",
|
||||
"integrity": "sha512-SrNU3RiBGTLLmFU8GIJdOdanJTl4TOmT27tt3bWWHppqYmAZ6IDuEuBvMU6nZq0zLEe6b/1rACXCgLZqO6ZfrA==",
|
||||
"dependencies": {
|
||||
"swagger-ui-dist": ">=5.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= v0.10.32"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"express": ">=4.0.0 || >=5.0.0-beta"
|
||||
}
|
||||
},
|
||||
"node_modules/tar": {
|
||||
"version": "6.2.1",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz",
|
||||
@@ -5406,6 +5601,14 @@
|
||||
"uuid": "dist/bin/uuid"
|
||||
}
|
||||
},
|
||||
"node_modules/validator": {
|
||||
"version": "13.15.23",
|
||||
"resolved": "https://registry.npmjs.org/validator/-/validator-13.15.23.tgz",
|
||||
"integrity": "sha512-4yoz1kEWqUjzi5zsPbAS/903QXSYp0UOtHsPpp7p9rHAw/W+dkInskAE386Fat3oKRROwO98d9ZB0G4cObgUyw==",
|
||||
"engines": {
|
||||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/vary": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
|
||||
@@ -5584,6 +5787,14 @@
|
||||
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
|
||||
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
|
||||
},
|
||||
"node_modules/yaml": {
|
||||
"version": "2.0.0-1",
|
||||
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.0.0-1.tgz",
|
||||
"integrity": "sha512-W7h5dEhywMKenDJh2iX/LABkbFnBxasD27oyXWDS/feDsxiw0dD5ncXdYXgkvAsXIY2MpW/ZKkr9IU30DBdMNQ==",
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/yargs": {
|
||||
"version": "17.7.2",
|
||||
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
|
||||
@@ -5618,6 +5829,34 @@
|
||||
"fd-slicer": "~1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/z-schema": {
|
||||
"version": "5.0.5",
|
||||
"resolved": "https://registry.npmjs.org/z-schema/-/z-schema-5.0.5.tgz",
|
||||
"integrity": "sha512-D7eujBWkLa3p2sIpJA0d1pr7es+a7m0vFAnZLlCEKq/Ij2k0MLi9Br2UPxoxdYystm5K1yeBGzub0FlYUEWj2Q==",
|
||||
"dependencies": {
|
||||
"lodash.get": "^4.4.2",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"validator": "^13.7.0"
|
||||
},
|
||||
"bin": {
|
||||
"z-schema": "bin/z-schema"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"commander": "^9.4.1"
|
||||
}
|
||||
},
|
||||
"node_modules/z-schema/node_modules/commander": {
|
||||
"version": "9.5.0",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz",
|
||||
"integrity": "sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==",
|
||||
"optional": true,
|
||||
"engines": {
|
||||
"node": "^12.20.0 || >=14"
|
||||
}
|
||||
},
|
||||
"node_modules/zod": {
|
||||
"version": "3.25.76",
|
||||
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
|
||||
|
||||
@@ -49,6 +49,8 @@
|
||||
"puppeteer-extra-plugin-stealth": "^2.11.2",
|
||||
"sharp": "^0.32.0",
|
||||
"socks-proxy-agent": "^8.0.2",
|
||||
"swagger-jsdoc": "^6.2.8",
|
||||
"swagger-ui-express": "^5.0.1",
|
||||
"user-agents": "^1.1.669",
|
||||
"uuid": "^9.0.1",
|
||||
"zod": "^3.22.4"
|
||||
@@ -61,6 +63,8 @@
|
||||
"@types/node": "^20.10.5",
|
||||
"@types/node-cron": "^3.0.11",
|
||||
"@types/pg": "^8.15.6",
|
||||
"@types/swagger-jsdoc": "^6.0.4",
|
||||
"@types/swagger-ui-express": "^4.1.8",
|
||||
"@types/uuid": "^9.0.7",
|
||||
"tsx": "^4.7.0",
|
||||
"typescript": "^5.3.3"
|
||||
|
||||
46
backend/src/_deprecated/DONT_USE.md
Normal file
46
backend/src/_deprecated/DONT_USE.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# DEPRECATED CODE - DO NOT USE
|
||||
|
||||
**These directories contain OLD, ABANDONED code.**
|
||||
|
||||
## What's Here
|
||||
|
||||
| Directory | What It Was | Why Deprecated |
|
||||
|-----------|-------------|----------------|
|
||||
| `hydration/` | Old pipeline for processing crawl data | Replaced by `src/tasks/handlers/` |
|
||||
| `scraper-v2/` | Old Puppeteer-based scraper engine | Replaced by curl-based `src/platforms/dutchie/client.ts` |
|
||||
| `canonical-hydration/` | Intermediate step toward canonical schema | Merged into task handlers |
|
||||
|
||||
## What to Use Instead
|
||||
|
||||
| Old (DONT USE) | New (USE THIS) |
|
||||
|----------------|----------------|
|
||||
| `hydration/normalizers/dutchie.ts` | `src/tasks/handlers/product-refresh.ts` |
|
||||
| `hydration/producer.ts` | `src/tasks/handlers/payload-fetch.ts` |
|
||||
| `scraper-v2/engine.ts` | `src/platforms/dutchie/client.ts` |
|
||||
| `scraper-v2/scheduler.ts` | `src/services/task-scheduler.ts` |
|
||||
|
||||
## Why Keep This Code?
|
||||
|
||||
- Historical reference only
|
||||
- Some patterns may be useful for debugging
|
||||
- Will be deleted once confirmed not needed
|
||||
|
||||
## Claude Instructions
|
||||
|
||||
**IF YOU ARE CLAUDE:**
|
||||
|
||||
1. NEVER import from `src/_deprecated/`
|
||||
2. NEVER reference these files as examples
|
||||
3. NEVER try to "fix" or "update" code in here
|
||||
4. If you see imports from these directories, suggest replacing them
|
||||
|
||||
**Correct imports:**
|
||||
```typescript
|
||||
// GOOD
|
||||
import { executeGraphQL } from '../platforms/dutchie/client';
|
||||
import { pool } from '../db/pool';
|
||||
|
||||
// BAD - DO NOT USE
|
||||
import { something } from '../_deprecated/hydration/...';
|
||||
import { something } from '../_deprecated/scraper-v2/...';
|
||||
```
|
||||
584
backend/src/_deprecated/system/routes/index.ts
Normal file
584
backend/src/_deprecated/system/routes/index.ts
Normal file
@@ -0,0 +1,584 @@
|
||||
/**
|
||||
* System API Routes
|
||||
*
|
||||
* Provides REST API endpoints for system monitoring and control:
|
||||
* - /api/system/sync/* - Sync orchestrator
|
||||
* - /api/system/dlq/* - Dead-letter queue
|
||||
* - /api/system/integrity/* - Integrity checks
|
||||
* - /api/system/fix/* - Auto-fix routines
|
||||
* - /api/system/alerts/* - System alerts
|
||||
* - /metrics - Prometheus metrics
|
||||
*
|
||||
* Phase 5: Full Production Sync + Monitoring
|
||||
*/
|
||||
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { Pool } from 'pg';
|
||||
import {
|
||||
SyncOrchestrator,
|
||||
MetricsService,
|
||||
DLQService,
|
||||
AlertService,
|
||||
IntegrityService,
|
||||
AutoFixService,
|
||||
} from '../services';
|
||||
|
||||
export function createSystemRouter(pool: Pool): Router {
|
||||
const router = Router();
|
||||
|
||||
// Initialize services
|
||||
const metrics = new MetricsService(pool);
|
||||
const dlq = new DLQService(pool);
|
||||
const alerts = new AlertService(pool);
|
||||
const integrity = new IntegrityService(pool, alerts);
|
||||
const autoFix = new AutoFixService(pool, alerts);
|
||||
const orchestrator = new SyncOrchestrator(pool, metrics, dlq, alerts);
|
||||
|
||||
// ============================================================
|
||||
// SYNC ORCHESTRATOR ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* GET /api/system/sync/status
|
||||
* Get current sync status
|
||||
*/
|
||||
router.get('/sync/status', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const status = await orchestrator.getStatus();
|
||||
res.json(status);
|
||||
} catch (error) {
|
||||
console.error('[System] Sync status error:', error);
|
||||
res.status(500).json({ error: 'Failed to get sync status' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/sync/run
|
||||
* Trigger a sync run
|
||||
*/
|
||||
router.post('/sync/run', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const triggeredBy = req.body.triggeredBy || 'api';
|
||||
const result = await orchestrator.runSync();
|
||||
res.json({
|
||||
success: true,
|
||||
triggeredBy,
|
||||
metrics: result,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[System] Sync run error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Sync run failed',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/sync/queue-depth
|
||||
* Get queue depth information
|
||||
*/
|
||||
router.get('/sync/queue-depth', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const depth = await orchestrator.getQueueDepth();
|
||||
res.json(depth);
|
||||
} catch (error) {
|
||||
console.error('[System] Queue depth error:', error);
|
||||
res.status(500).json({ error: 'Failed to get queue depth' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/sync/health
|
||||
* Get sync health status
|
||||
*/
|
||||
router.get('/sync/health', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const health = await orchestrator.getHealth();
|
||||
res.status(health.healthy ? 200 : 503).json(health);
|
||||
} catch (error) {
|
||||
console.error('[System] Health check error:', error);
|
||||
res.status(500).json({ healthy: false, error: 'Health check failed' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/sync/pause
|
||||
* Pause the orchestrator
|
||||
*/
|
||||
router.post('/sync/pause', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const reason = req.body.reason || 'Manual pause';
|
||||
await orchestrator.pause(reason);
|
||||
res.json({ success: true, message: 'Orchestrator paused' });
|
||||
} catch (error) {
|
||||
console.error('[System] Pause error:', error);
|
||||
res.status(500).json({ error: 'Failed to pause orchestrator' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/sync/resume
|
||||
* Resume the orchestrator
|
||||
*/
|
||||
router.post('/sync/resume', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
await orchestrator.resume();
|
||||
res.json({ success: true, message: 'Orchestrator resumed' });
|
||||
} catch (error) {
|
||||
console.error('[System] Resume error:', error);
|
||||
res.status(500).json({ error: 'Failed to resume orchestrator' });
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// DLQ ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* GET /api/system/dlq
|
||||
* List DLQ payloads
|
||||
*/
|
||||
router.get('/dlq', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const options = {
|
||||
status: req.query.status as string,
|
||||
errorType: req.query.errorType as string,
|
||||
dispensaryId: req.query.dispensaryId ? parseInt(req.query.dispensaryId as string) : undefined,
|
||||
limit: req.query.limit ? parseInt(req.query.limit as string) : 50,
|
||||
offset: req.query.offset ? parseInt(req.query.offset as string) : 0,
|
||||
};
|
||||
|
||||
const result = await dlq.listPayloads(options);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('[System] DLQ list error:', error);
|
||||
res.status(500).json({ error: 'Failed to list DLQ payloads' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/dlq/stats
|
||||
* Get DLQ statistics
|
||||
*/
|
||||
router.get('/dlq/stats', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const stats = await dlq.getStats();
|
||||
res.json(stats);
|
||||
} catch (error) {
|
||||
console.error('[System] DLQ stats error:', error);
|
||||
res.status(500).json({ error: 'Failed to get DLQ stats' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/dlq/summary
|
||||
* Get DLQ summary by error type
|
||||
*/
|
||||
router.get('/dlq/summary', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const summary = await dlq.getSummary();
|
||||
res.json(summary);
|
||||
} catch (error) {
|
||||
console.error('[System] DLQ summary error:', error);
|
||||
res.status(500).json({ error: 'Failed to get DLQ summary' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/dlq/:id
|
||||
* Get a specific DLQ payload
|
||||
*/
|
||||
router.get('/dlq/:id', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const payload = await dlq.getPayload(req.params.id);
|
||||
if (!payload) {
|
||||
return res.status(404).json({ error: 'Payload not found' });
|
||||
}
|
||||
res.json(payload);
|
||||
} catch (error) {
|
||||
console.error('[System] DLQ get error:', error);
|
||||
res.status(500).json({ error: 'Failed to get DLQ payload' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/dlq/:id/retry
|
||||
* Retry a DLQ payload
|
||||
*/
|
||||
router.post('/dlq/:id/retry', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const result = await dlq.retryPayload(req.params.id);
|
||||
if (result.success) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(400).json(result);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[System] DLQ retry error:', error);
|
||||
res.status(500).json({ error: 'Failed to retry payload' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/dlq/:id/abandon
|
||||
* Abandon a DLQ payload
|
||||
*/
|
||||
router.post('/dlq/:id/abandon', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const reason = req.body.reason || 'Manually abandoned';
|
||||
const abandonedBy = req.body.abandonedBy || 'api';
|
||||
const success = await dlq.abandonPayload(req.params.id, reason, abandonedBy);
|
||||
res.json({ success });
|
||||
} catch (error) {
|
||||
console.error('[System] DLQ abandon error:', error);
|
||||
res.status(500).json({ error: 'Failed to abandon payload' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/dlq/bulk-retry
|
||||
* Bulk retry payloads by error type
|
||||
*/
|
||||
router.post('/dlq/bulk-retry', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { errorType } = req.body;
|
||||
if (!errorType) {
|
||||
return res.status(400).json({ error: 'errorType is required' });
|
||||
}
|
||||
const result = await dlq.bulkRetryByErrorType(errorType);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('[System] DLQ bulk retry error:', error);
|
||||
res.status(500).json({ error: 'Failed to bulk retry' });
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// INTEGRITY CHECK ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* POST /api/system/integrity/run
|
||||
* Run all integrity checks
|
||||
*/
|
||||
router.post('/integrity/run', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const triggeredBy = req.body.triggeredBy || 'api';
|
||||
const result = await integrity.runAllChecks(triggeredBy);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('[System] Integrity run error:', error);
|
||||
res.status(500).json({ error: 'Failed to run integrity checks' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/integrity/runs
|
||||
* Get recent integrity check runs
|
||||
*/
|
||||
router.get('/integrity/runs', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const limit = req.query.limit ? parseInt(req.query.limit as string) : 10;
|
||||
const runs = await integrity.getRecentRuns(limit);
|
||||
res.json(runs);
|
||||
} catch (error) {
|
||||
console.error('[System] Integrity runs error:', error);
|
||||
res.status(500).json({ error: 'Failed to get integrity runs' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/integrity/runs/:runId
|
||||
* Get results for a specific integrity run
|
||||
*/
|
||||
router.get('/integrity/runs/:runId', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const results = await integrity.getRunResults(req.params.runId);
|
||||
res.json(results);
|
||||
} catch (error) {
|
||||
console.error('[System] Integrity run results error:', error);
|
||||
res.status(500).json({ error: 'Failed to get run results' });
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// AUTO-FIX ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* GET /api/system/fix/routines
|
||||
* Get available fix routines
|
||||
*/
|
||||
router.get('/fix/routines', (_req: Request, res: Response) => {
|
||||
try {
|
||||
const routines = autoFix.getAvailableRoutines();
|
||||
res.json(routines);
|
||||
} catch (error) {
|
||||
console.error('[System] Get routines error:', error);
|
||||
res.status(500).json({ error: 'Failed to get routines' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/fix/:routine
|
||||
* Run a fix routine
|
||||
*/
|
||||
router.post('/fix/:routine', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const routineName = req.params.routine;
|
||||
const dryRun = req.body.dryRun === true;
|
||||
const triggeredBy = req.body.triggeredBy || 'api';
|
||||
|
||||
const result = await autoFix.runRoutine(routineName as any, triggeredBy, { dryRun });
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('[System] Fix routine error:', error);
|
||||
res.status(500).json({ error: 'Failed to run fix routine' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/fix/runs
|
||||
* Get recent fix runs
|
||||
*/
|
||||
router.get('/fix/runs', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const limit = req.query.limit ? parseInt(req.query.limit as string) : 20;
|
||||
const runs = await autoFix.getRecentRuns(limit);
|
||||
res.json(runs);
|
||||
} catch (error) {
|
||||
console.error('[System] Fix runs error:', error);
|
||||
res.status(500).json({ error: 'Failed to get fix runs' });
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// ALERTS ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* GET /api/system/alerts
|
||||
* List alerts
|
||||
*/
|
||||
router.get('/alerts', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const options = {
|
||||
status: req.query.status as any,
|
||||
severity: req.query.severity as any,
|
||||
type: req.query.type as string,
|
||||
limit: req.query.limit ? parseInt(req.query.limit as string) : 50,
|
||||
offset: req.query.offset ? parseInt(req.query.offset as string) : 0,
|
||||
};
|
||||
|
||||
const result = await alerts.listAlerts(options);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('[System] Alerts list error:', error);
|
||||
res.status(500).json({ error: 'Failed to list alerts' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/alerts/active
|
||||
* Get active alerts
|
||||
*/
|
||||
router.get('/alerts/active', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const activeAlerts = await alerts.getActiveAlerts();
|
||||
res.json(activeAlerts);
|
||||
} catch (error) {
|
||||
console.error('[System] Active alerts error:', error);
|
||||
res.status(500).json({ error: 'Failed to get active alerts' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/alerts/summary
|
||||
* Get alert summary
|
||||
*/
|
||||
router.get('/alerts/summary', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const summary = await alerts.getSummary();
|
||||
res.json(summary);
|
||||
} catch (error) {
|
||||
console.error('[System] Alerts summary error:', error);
|
||||
res.status(500).json({ error: 'Failed to get alerts summary' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/alerts/:id/acknowledge
|
||||
* Acknowledge an alert
|
||||
*/
|
||||
router.post('/alerts/:id/acknowledge', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const alertId = parseInt(req.params.id);
|
||||
const acknowledgedBy = req.body.acknowledgedBy || 'api';
|
||||
const success = await alerts.acknowledgeAlert(alertId, acknowledgedBy);
|
||||
res.json({ success });
|
||||
} catch (error) {
|
||||
console.error('[System] Acknowledge alert error:', error);
|
||||
res.status(500).json({ error: 'Failed to acknowledge alert' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/alerts/:id/resolve
|
||||
* Resolve an alert
|
||||
*/
|
||||
router.post('/alerts/:id/resolve', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const alertId = parseInt(req.params.id);
|
||||
const resolvedBy = req.body.resolvedBy || 'api';
|
||||
const success = await alerts.resolveAlert(alertId, resolvedBy);
|
||||
res.json({ success });
|
||||
} catch (error) {
|
||||
console.error('[System] Resolve alert error:', error);
|
||||
res.status(500).json({ error: 'Failed to resolve alert' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/alerts/bulk-acknowledge
|
||||
* Bulk acknowledge alerts
|
||||
*/
|
||||
router.post('/alerts/bulk-acknowledge', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { ids, acknowledgedBy } = req.body;
|
||||
if (!ids || !Array.isArray(ids)) {
|
||||
return res.status(400).json({ error: 'ids array is required' });
|
||||
}
|
||||
const count = await alerts.bulkAcknowledge(ids, acknowledgedBy || 'api');
|
||||
res.json({ acknowledged: count });
|
||||
} catch (error) {
|
||||
console.error('[System] Bulk acknowledge error:', error);
|
||||
res.status(500).json({ error: 'Failed to bulk acknowledge' });
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// METRICS ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* GET /api/system/metrics
|
||||
* Get all current metrics
|
||||
*/
|
||||
router.get('/metrics', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const allMetrics = await metrics.getAllMetrics();
|
||||
res.json(allMetrics);
|
||||
} catch (error) {
|
||||
console.error('[System] Metrics error:', error);
|
||||
res.status(500).json({ error: 'Failed to get metrics' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/metrics/:name
|
||||
* Get a specific metric
|
||||
*/
|
||||
router.get('/metrics/:name', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const metric = await metrics.getMetric(req.params.name);
|
||||
if (!metric) {
|
||||
return res.status(404).json({ error: 'Metric not found' });
|
||||
}
|
||||
res.json(metric);
|
||||
} catch (error) {
|
||||
console.error('[System] Metric error:', error);
|
||||
res.status(500).json({ error: 'Failed to get metric' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/metrics/:name/history
|
||||
* Get metric time series
|
||||
*/
|
||||
router.get('/metrics/:name/history', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const hours = req.query.hours ? parseInt(req.query.hours as string) : 24;
|
||||
const history = await metrics.getMetricHistory(req.params.name, hours);
|
||||
res.json(history);
|
||||
} catch (error) {
|
||||
console.error('[System] Metric history error:', error);
|
||||
res.status(500).json({ error: 'Failed to get metric history' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/errors
|
||||
* Get error summary
|
||||
*/
|
||||
router.get('/errors', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const summary = await metrics.getErrorSummary();
|
||||
res.json(summary);
|
||||
} catch (error) {
|
||||
console.error('[System] Error summary error:', error);
|
||||
res.status(500).json({ error: 'Failed to get error summary' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/errors/recent
|
||||
* Get recent errors
|
||||
*/
|
||||
router.get('/errors/recent', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const limit = req.query.limit ? parseInt(req.query.limit as string) : 50;
|
||||
const errorType = req.query.type as string;
|
||||
const errors = await metrics.getRecentErrors(limit, errorType);
|
||||
res.json(errors);
|
||||
} catch (error) {
|
||||
console.error('[System] Recent errors error:', error);
|
||||
res.status(500).json({ error: 'Failed to get recent errors' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/errors/acknowledge
|
||||
* Acknowledge errors
|
||||
*/
|
||||
router.post('/errors/acknowledge', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { ids, acknowledgedBy } = req.body;
|
||||
if (!ids || !Array.isArray(ids)) {
|
||||
return res.status(400).json({ error: 'ids array is required' });
|
||||
}
|
||||
const count = await metrics.acknowledgeErrors(ids, acknowledgedBy || 'api');
|
||||
res.json({ acknowledged: count });
|
||||
} catch (error) {
|
||||
console.error('[System] Acknowledge errors error:', error);
|
||||
res.status(500).json({ error: 'Failed to acknowledge errors' });
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create Prometheus metrics endpoint (standalone)
|
||||
*/
|
||||
export function createPrometheusRouter(pool: Pool): Router {
|
||||
const router = Router();
|
||||
const metrics = new MetricsService(pool);
|
||||
|
||||
/**
|
||||
* GET /metrics
|
||||
* Prometheus-compatible metrics endpoint
|
||||
*/
|
||||
router.get('/', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const prometheusOutput = await metrics.getPrometheusMetrics();
|
||||
res.set('Content-Type', 'text/plain; version=0.0.4');
|
||||
res.send(prometheusOutput);
|
||||
} catch (error) {
|
||||
console.error('[Prometheus] Metrics error:', error);
|
||||
res.status(500).send('# Error generating metrics');
|
||||
}
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
@@ -7,6 +7,7 @@
|
||||
*
|
||||
* NO username/password auth in API. Use tokens only.
|
||||
*
|
||||
* Trusted origins are managed via /admin and stored in the trusted_origins table.
|
||||
* Localhost bypass: curl from 127.0.0.1 gets automatic admin access.
|
||||
*/
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
@@ -16,8 +17,8 @@ import { pool } from '../db/pool';
|
||||
|
||||
const JWT_SECRET = process.env.JWT_SECRET || 'change_this_in_production';
|
||||
|
||||
// Trusted origins that bypass auth for internal/same-origin requests
|
||||
const TRUSTED_ORIGINS = [
|
||||
// Fallback trusted origins (used if DB unavailable)
|
||||
const FALLBACK_TRUSTED_ORIGINS = [
|
||||
'https://cannaiq.co',
|
||||
'https://www.cannaiq.co',
|
||||
'https://findadispo.com',
|
||||
@@ -29,31 +30,108 @@ const TRUSTED_ORIGINS = [
|
||||
'http://localhost:5173',
|
||||
];
|
||||
|
||||
// Pattern-based trusted origins (wildcards)
|
||||
const TRUSTED_ORIGIN_PATTERNS = [
|
||||
/^https:\/\/.*\.cannabrands\.app$/, // *.cannabrands.app
|
||||
/^https:\/\/.*\.cannaiq\.co$/, // *.cannaiq.co
|
||||
const FALLBACK_TRUSTED_PATTERNS = [
|
||||
/^https:\/\/.*\.cannabrands\.app$/,
|
||||
/^https:\/\/.*\.cannaiq\.co$/,
|
||||
];
|
||||
|
||||
// Trusted IPs for internal pod-to-pod communication
|
||||
const TRUSTED_IPS = [
|
||||
const FALLBACK_TRUSTED_IPS = [
|
||||
'127.0.0.1',
|
||||
'::1',
|
||||
'::ffff:127.0.0.1',
|
||||
];
|
||||
|
||||
// Cache for DB-backed trusted origins
|
||||
let trustedOriginsCache: {
|
||||
ips: Set<string>;
|
||||
domains: Set<string>;
|
||||
patterns: RegExp[];
|
||||
loadedAt: Date;
|
||||
} | null = null;
|
||||
|
||||
/**
|
||||
* Load trusted origins from DB with caching (5 min TTL)
|
||||
*/
|
||||
async function loadTrustedOrigins(): Promise<{
|
||||
ips: Set<string>;
|
||||
domains: Set<string>;
|
||||
patterns: RegExp[];
|
||||
}> {
|
||||
// Return cached if fresh
|
||||
if (trustedOriginsCache) {
|
||||
const age = Date.now() - trustedOriginsCache.loadedAt.getTime();
|
||||
if (age < 5 * 60 * 1000) {
|
||||
return trustedOriginsCache;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await pool.query(`
|
||||
SELECT origin_type, origin_value
|
||||
FROM trusted_origins
|
||||
WHERE active = true
|
||||
`);
|
||||
|
||||
const ips = new Set<string>();
|
||||
const domains = new Set<string>();
|
||||
const patterns: RegExp[] = [];
|
||||
|
||||
for (const row of result.rows) {
|
||||
switch (row.origin_type) {
|
||||
case 'ip':
|
||||
ips.add(row.origin_value);
|
||||
break;
|
||||
case 'domain':
|
||||
// Store as full origin for comparison
|
||||
if (!row.origin_value.startsWith('http')) {
|
||||
domains.add(`https://${row.origin_value}`);
|
||||
domains.add(`http://${row.origin_value}`);
|
||||
} else {
|
||||
domains.add(row.origin_value);
|
||||
}
|
||||
break;
|
||||
case 'pattern':
|
||||
try {
|
||||
patterns.push(new RegExp(row.origin_value));
|
||||
} catch {
|
||||
console.warn(`[Auth] Invalid trusted origin pattern: ${row.origin_value}`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
trustedOriginsCache = { ips, domains, patterns, loadedAt: new Date() };
|
||||
return trustedOriginsCache;
|
||||
} catch (error) {
|
||||
// DB not available or table doesn't exist - use fallbacks
|
||||
return {
|
||||
ips: new Set(FALLBACK_TRUSTED_IPS),
|
||||
domains: new Set(FALLBACK_TRUSTED_ORIGINS),
|
||||
patterns: FALLBACK_TRUSTED_PATTERNS,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear trusted origins cache (called when admin updates origins)
|
||||
*/
|
||||
export function clearTrustedOriginsCache() {
|
||||
trustedOriginsCache = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if request is from a trusted origin/IP
|
||||
*/
|
||||
function isTrustedRequest(req: Request): boolean {
|
||||
async function isTrustedRequest(req: Request): Promise<boolean> {
|
||||
const { ips, domains, patterns } = await loadTrustedOrigins();
|
||||
|
||||
// Check origin header
|
||||
const origin = req.headers.origin;
|
||||
if (origin) {
|
||||
if (TRUSTED_ORIGINS.includes(origin)) {
|
||||
if (domains.has(origin)) {
|
||||
return true;
|
||||
}
|
||||
// Check pattern-based origins (wildcards like *.cannabrands.app)
|
||||
for (const pattern of TRUSTED_ORIGIN_PATTERNS) {
|
||||
for (const pattern of patterns) {
|
||||
if (pattern.test(origin)) {
|
||||
return true;
|
||||
}
|
||||
@@ -63,16 +141,15 @@ function isTrustedRequest(req: Request): boolean {
|
||||
// Check referer header (for same-origin requests without CORS)
|
||||
const referer = req.headers.referer;
|
||||
if (referer) {
|
||||
for (const trusted of TRUSTED_ORIGINS) {
|
||||
for (const trusted of domains) {
|
||||
if (referer.startsWith(trusted)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// Check pattern-based referers
|
||||
try {
|
||||
const refererUrl = new URL(referer);
|
||||
const refererOrigin = refererUrl.origin;
|
||||
for (const pattern of TRUSTED_ORIGIN_PATTERNS) {
|
||||
for (const pattern of patterns) {
|
||||
if (pattern.test(refererOrigin)) {
|
||||
return true;
|
||||
}
|
||||
@@ -84,7 +161,7 @@ function isTrustedRequest(req: Request): boolean {
|
||||
|
||||
// Check IP for internal requests (pod-to-pod, localhost)
|
||||
const clientIp = req.ip || req.socket.remoteAddress || '';
|
||||
if (TRUSTED_IPS.includes(clientIp)) {
|
||||
if (ips.has(clientIp)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -200,7 +277,7 @@ export async function authMiddleware(req: AuthRequest, res: Response, next: Next
|
||||
}
|
||||
|
||||
// No token provided - check trusted origins for API access (WordPress, etc.)
|
||||
if (isTrustedRequest(req)) {
|
||||
if (await isTrustedRequest(req)) {
|
||||
req.user = {
|
||||
id: 0,
|
||||
email: 'internal@system',
|
||||
|
||||
@@ -109,7 +109,7 @@ import scraperMonitorRoutes from './routes/scraper-monitor';
|
||||
import apiTokensRoutes from './routes/api-tokens';
|
||||
import apiPermissionsRoutes from './routes/api-permissions';
|
||||
import parallelScrapeRoutes from './routes/parallel-scrape';
|
||||
import crawlerSandboxRoutes from './routes/crawler-sandbox';
|
||||
// crawler-sandbox moved to _deprecated
|
||||
import versionRoutes from './routes/version';
|
||||
import deployStatusRoutes from './routes/deploy-status';
|
||||
import publicApiRoutes from './routes/public-api';
|
||||
@@ -147,6 +147,8 @@ import workerRegistryRoutes from './routes/worker-registry';
|
||||
// Per TASK_WORKFLOW_2024-12-10.md: Raw payload access API
|
||||
import payloadsRoutes from './routes/payloads';
|
||||
import k8sRoutes from './routes/k8s';
|
||||
import trustedOriginsRoutes from './routes/trusted-origins';
|
||||
|
||||
|
||||
// Mark requests from trusted domains (cannaiq.co, findagram.co, findadispo.com)
|
||||
// These domains can access the API without authentication
|
||||
@@ -187,7 +189,7 @@ app.use('/api/scraper-monitor', scraperMonitorRoutes);
|
||||
app.use('/api/api-tokens', apiTokensRoutes);
|
||||
app.use('/api/api-permissions', apiPermissionsRoutes);
|
||||
app.use('/api/parallel-scrape', parallelScrapeRoutes);
|
||||
app.use('/api/crawler-sandbox', crawlerSandboxRoutes);
|
||||
// crawler-sandbox moved to _deprecated
|
||||
app.use('/api/version', versionRoutes);
|
||||
app.use('/api/admin/deploy-status', deployStatusRoutes);
|
||||
console.log('[DeployStatus] Routes registered at /api/admin/deploy-status');
|
||||
@@ -200,6 +202,10 @@ app.use('/api/admin/orchestrator', orchestratorAdminRoutes);
|
||||
app.use('/api/admin/debug', adminDebugRoutes);
|
||||
console.log('[AdminDebug] Routes registered at /api/admin/debug');
|
||||
|
||||
// Admin routes - trusted origins management (IPs, domains that bypass auth)
|
||||
app.use('/api/admin/trusted-origins', trustedOriginsRoutes);
|
||||
console.log('[TrustedOrigins] Routes registered at /api/admin/trusted-origins');
|
||||
|
||||
// Admin routes - intelligence (brands, pricing analytics)
|
||||
app.use('/api/admin/intelligence', intelligenceRoutes);
|
||||
console.log('[Intelligence] Routes registered at /api/admin/intelligence');
|
||||
|
||||
@@ -28,8 +28,55 @@ const router = Router();
|
||||
const getDbPool = (): Pool => getPool() as unknown as Pool;
|
||||
|
||||
/**
|
||||
* GET /api/payloads
|
||||
* List payload metadata (paginated)
|
||||
* @swagger
|
||||
* /payloads:
|
||||
* get:
|
||||
* summary: List payload metadata
|
||||
* description: Returns paginated list of raw crawl payload metadata. Does not include the actual payload data.
|
||||
* tags: [Payloads]
|
||||
* parameters:
|
||||
* - in: query
|
||||
* name: limit
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 50
|
||||
* maximum: 100
|
||||
* description: Number of payloads to return
|
||||
* - in: query
|
||||
* name: offset
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 0
|
||||
* description: Number of payloads to skip
|
||||
* - in: query
|
||||
* name: dispensary_id
|
||||
* schema:
|
||||
* type: integer
|
||||
* description: Filter by dispensary ID
|
||||
* responses:
|
||||
* 200:
|
||||
* description: List of payload metadata
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* payloads:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: '#/components/schemas/PayloadMetadata'
|
||||
* pagination:
|
||||
* type: object
|
||||
* properties:
|
||||
* limit:
|
||||
* type: integer
|
||||
* offset:
|
||||
* type: integer
|
||||
* 500:
|
||||
* description: Server error
|
||||
*/
|
||||
router.get('/', async (req: Request, res: Response) => {
|
||||
try {
|
||||
@@ -56,8 +103,35 @@ router.get('/', async (req: Request, res: Response) => {
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/payloads/:id
|
||||
* Get payload metadata by ID
|
||||
* @swagger
|
||||
* /payloads/{id}:
|
||||
* get:
|
||||
* summary: Get payload metadata by ID
|
||||
* description: Returns metadata for a specific payload including dispensary name, size, and timestamps.
|
||||
* tags: [Payloads]
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* description: Payload ID
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Payload metadata
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* payload:
|
||||
* $ref: '#/components/schemas/PayloadMetadata'
|
||||
* 404:
|
||||
* description: Payload not found
|
||||
* 500:
|
||||
* description: Server error
|
||||
*/
|
||||
router.get('/:id', async (req: Request, res: Response) => {
|
||||
try {
|
||||
@@ -97,8 +171,43 @@ router.get('/:id', async (req: Request, res: Response) => {
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/payloads/:id/data
|
||||
* Get full payload JSON (decompressed from disk)
|
||||
* @swagger
|
||||
* /payloads/{id}/data:
|
||||
* get:
|
||||
* summary: Get full payload data
|
||||
* description: Returns the complete raw crawl payload JSON, decompressed from disk. This includes all products from the crawl.
|
||||
* tags: [Payloads]
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: id
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* description: Payload ID
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Full payload data
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* metadata:
|
||||
* $ref: '#/components/schemas/PayloadMetadata'
|
||||
* data:
|
||||
* type: object
|
||||
* description: Raw GraphQL response with products array
|
||||
* properties:
|
||||
* products:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* 404:
|
||||
* description: Payload not found
|
||||
* 500:
|
||||
* description: Server error
|
||||
*/
|
||||
router.get('/:id/data', async (req: Request, res: Response) => {
|
||||
try {
|
||||
@@ -123,8 +232,48 @@ router.get('/:id/data', async (req: Request, res: Response) => {
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/payloads/store/:dispensaryId
|
||||
* List payloads for a specific store
|
||||
* @swagger
|
||||
* /payloads/store/{dispensaryId}:
|
||||
* get:
|
||||
* summary: List payloads for a store
|
||||
* description: Returns paginated list of payload metadata for a specific dispensary.
|
||||
* tags: [Payloads]
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: dispensaryId
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* description: Dispensary ID
|
||||
* - in: query
|
||||
* name: limit
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 20
|
||||
* maximum: 100
|
||||
* - in: query
|
||||
* name: offset
|
||||
* schema:
|
||||
* type: integer
|
||||
* default: 0
|
||||
* responses:
|
||||
* 200:
|
||||
* description: List of payloads for store
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* dispensaryId:
|
||||
* type: integer
|
||||
* payloads:
|
||||
* type: array
|
||||
* items:
|
||||
* $ref: '#/components/schemas/PayloadMetadata'
|
||||
* 500:
|
||||
* description: Server error
|
||||
*/
|
||||
router.get('/store/:dispensaryId', async (req: Request, res: Response) => {
|
||||
try {
|
||||
@@ -152,8 +301,42 @@ router.get('/store/:dispensaryId', async (req: Request, res: Response) => {
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/payloads/store/:dispensaryId/latest
|
||||
* Get the latest payload for a store (with full data)
|
||||
* @swagger
|
||||
* /payloads/store/{dispensaryId}/latest:
|
||||
* get:
|
||||
* summary: Get latest payload for a store
|
||||
* description: Returns the most recent raw crawl payload for a dispensary, including full product data.
|
||||
* tags: [Payloads]
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: dispensaryId
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* description: Dispensary ID
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Latest payload with full data
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* metadata:
|
||||
* $ref: '#/components/schemas/PayloadMetadata'
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* products:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* 404:
|
||||
* description: No payloads found for dispensary
|
||||
* 500:
|
||||
* description: Server error
|
||||
*/
|
||||
router.get('/store/:dispensaryId/latest', async (req: Request, res: Response) => {
|
||||
try {
|
||||
@@ -181,12 +364,107 @@ router.get('/store/:dispensaryId/latest', async (req: Request, res: Response) =>
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/payloads/store/:dispensaryId/diff
|
||||
* Compare two payloads for a store
|
||||
*
|
||||
* Query params:
|
||||
* - from: payload ID (older)
|
||||
* - to: payload ID (newer) - optional, defaults to latest
|
||||
* @swagger
|
||||
* /payloads/store/{dispensaryId}/diff:
|
||||
* get:
|
||||
* summary: Compare two payloads
|
||||
* description: |
|
||||
* Compares two crawl payloads for a store and returns the differences.
|
||||
* If no IDs are provided, compares the two most recent payloads.
|
||||
* Returns added products, removed products, price changes, and stock changes.
|
||||
* tags: [Payloads]
|
||||
* parameters:
|
||||
* - in: path
|
||||
* name: dispensaryId
|
||||
* required: true
|
||||
* schema:
|
||||
* type: integer
|
||||
* description: Dispensary ID
|
||||
* - in: query
|
||||
* name: from
|
||||
* schema:
|
||||
* type: integer
|
||||
* description: Older payload ID (optional)
|
||||
* - in: query
|
||||
* name: to
|
||||
* schema:
|
||||
* type: integer
|
||||
* description: Newer payload ID (optional)
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Payload diff results
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* from:
|
||||
* type: object
|
||||
* properties:
|
||||
* id:
|
||||
* type: integer
|
||||
* fetchedAt:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* productCount:
|
||||
* type: integer
|
||||
* to:
|
||||
* type: object
|
||||
* properties:
|
||||
* id:
|
||||
* type: integer
|
||||
* fetchedAt:
|
||||
* type: string
|
||||
* format: date-time
|
||||
* productCount:
|
||||
* type: integer
|
||||
* diff:
|
||||
* type: object
|
||||
* properties:
|
||||
* added:
|
||||
* type: integer
|
||||
* removed:
|
||||
* type: integer
|
||||
* priceChanges:
|
||||
* type: integer
|
||||
* stockChanges:
|
||||
* type: integer
|
||||
* details:
|
||||
* type: object
|
||||
* properties:
|
||||
* added:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* removed:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* priceChanges:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* properties:
|
||||
* id:
|
||||
* type: string
|
||||
* name:
|
||||
* type: string
|
||||
* oldPrice:
|
||||
* type: number
|
||||
* newPrice:
|
||||
* type: number
|
||||
* stockChanges:
|
||||
* type: array
|
||||
* items:
|
||||
* type: object
|
||||
* 400:
|
||||
* description: Need at least 2 payloads to diff
|
||||
* 404:
|
||||
* description: One or both payloads not found
|
||||
* 500:
|
||||
* description: Server error
|
||||
*/
|
||||
router.get('/store/:dispensaryId/diff', async (req: Request, res: Response) => {
|
||||
try {
|
||||
@@ -331,4 +609,370 @@ router.get('/store/:dispensaryId/diff', async (req: Request, res: Response) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/payloads/store/:dispensaryId/query
|
||||
* Query products from the latest payload with flexible filters
|
||||
*
|
||||
* Query params:
|
||||
* - brand: Filter by brand name (partial match)
|
||||
* - category: Filter by category (exact match)
|
||||
* - subcategory: Filter by subcategory
|
||||
* - strain_type: Filter by strain type (indica, sativa, hybrid, cbd)
|
||||
* - in_stock: Filter by stock status (true/false)
|
||||
* - price_min: Minimum price
|
||||
* - price_max: Maximum price
|
||||
* - thc_min: Minimum THC percentage
|
||||
* - thc_max: Maximum THC percentage
|
||||
* - search: Search product name (partial match)
|
||||
* - fields: Comma-separated list of fields to return
|
||||
* - limit: Max results (default 100, max 1000)
|
||||
* - offset: Skip results for pagination
|
||||
* - sort: Sort field (name, price, thc, brand)
|
||||
* - order: Sort order (asc, desc)
|
||||
*/
|
||||
router.get('/store/:dispensaryId/query', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const pool = getDbPool();
|
||||
const dispensaryId = parseInt(req.params.dispensaryId);
|
||||
|
||||
// Get latest payload
|
||||
const result = await getLatestPayload(pool, dispensaryId);
|
||||
|
||||
if (!result) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: `No payloads found for dispensary ${dispensaryId}`,
|
||||
});
|
||||
}
|
||||
|
||||
let products = result.payload.products || [];
|
||||
|
||||
// Parse query params
|
||||
const {
|
||||
brand,
|
||||
category,
|
||||
subcategory,
|
||||
strain_type,
|
||||
in_stock,
|
||||
price_min,
|
||||
price_max,
|
||||
thc_min,
|
||||
thc_max,
|
||||
search,
|
||||
fields,
|
||||
limit: limitStr,
|
||||
offset: offsetStr,
|
||||
sort,
|
||||
order,
|
||||
} = req.query;
|
||||
|
||||
// Apply filters
|
||||
if (brand) {
|
||||
const brandLower = (brand as string).toLowerCase();
|
||||
products = products.filter((p: any) =>
|
||||
p.brand?.name?.toLowerCase().includes(brandLower)
|
||||
);
|
||||
}
|
||||
|
||||
if (category) {
|
||||
const catLower = (category as string).toLowerCase();
|
||||
products = products.filter((p: any) =>
|
||||
p.category?.toLowerCase() === catLower ||
|
||||
p.Category?.toLowerCase() === catLower
|
||||
);
|
||||
}
|
||||
|
||||
if (subcategory) {
|
||||
const subLower = (subcategory as string).toLowerCase();
|
||||
products = products.filter((p: any) =>
|
||||
p.subcategory?.toLowerCase() === subLower ||
|
||||
p.subCategory?.toLowerCase() === subLower
|
||||
);
|
||||
}
|
||||
|
||||
if (strain_type) {
|
||||
const strainLower = (strain_type as string).toLowerCase();
|
||||
products = products.filter((p: any) =>
|
||||
p.strainType?.toLowerCase() === strainLower ||
|
||||
p.strain_type?.toLowerCase() === strainLower
|
||||
);
|
||||
}
|
||||
|
||||
if (in_stock !== undefined) {
|
||||
const wantInStock = in_stock === 'true';
|
||||
products = products.filter((p: any) => {
|
||||
const status = p.Status || p.status;
|
||||
const isInStock = status === 'Active' || status === 'In Stock' || status === 'in_stock';
|
||||
return wantInStock ? isInStock : !isInStock;
|
||||
});
|
||||
}
|
||||
|
||||
if (price_min !== undefined) {
|
||||
const min = parseFloat(price_min as string);
|
||||
products = products.filter((p: any) => {
|
||||
const price = p.Prices?.[0]?.price || p.price;
|
||||
return price >= min;
|
||||
});
|
||||
}
|
||||
|
||||
if (price_max !== undefined) {
|
||||
const max = parseFloat(price_max as string);
|
||||
products = products.filter((p: any) => {
|
||||
const price = p.Prices?.[0]?.price || p.price;
|
||||
return price <= max;
|
||||
});
|
||||
}
|
||||
|
||||
if (thc_min !== undefined) {
|
||||
const min = parseFloat(thc_min as string);
|
||||
products = products.filter((p: any) => {
|
||||
const thc = p.potencyThc?.formatted || p.thc || 0;
|
||||
const thcNum = typeof thc === 'string' ? parseFloat(thc) : thc;
|
||||
return thcNum >= min;
|
||||
});
|
||||
}
|
||||
|
||||
if (thc_max !== undefined) {
|
||||
const max = parseFloat(thc_max as string);
|
||||
products = products.filter((p: any) => {
|
||||
const thc = p.potencyThc?.formatted || p.thc || 0;
|
||||
const thcNum = typeof thc === 'string' ? parseFloat(thc) : thc;
|
||||
return thcNum <= max;
|
||||
});
|
||||
}
|
||||
|
||||
if (search) {
|
||||
const searchLower = (search as string).toLowerCase();
|
||||
products = products.filter((p: any) =>
|
||||
p.name?.toLowerCase().includes(searchLower)
|
||||
);
|
||||
}
|
||||
|
||||
// Sort
|
||||
if (sort) {
|
||||
const sortOrder = order === 'desc' ? -1 : 1;
|
||||
products.sort((a: any, b: any) => {
|
||||
let aVal: any, bVal: any;
|
||||
switch (sort) {
|
||||
case 'name':
|
||||
aVal = a.name || '';
|
||||
bVal = b.name || '';
|
||||
break;
|
||||
case 'price':
|
||||
aVal = a.Prices?.[0]?.price || a.price || 0;
|
||||
bVal = b.Prices?.[0]?.price || b.price || 0;
|
||||
break;
|
||||
case 'thc':
|
||||
aVal = parseFloat(a.potencyThc?.formatted || a.thc || '0');
|
||||
bVal = parseFloat(b.potencyThc?.formatted || b.thc || '0');
|
||||
break;
|
||||
case 'brand':
|
||||
aVal = a.brand?.name || '';
|
||||
bVal = b.brand?.name || '';
|
||||
break;
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
if (aVal < bVal) return -1 * sortOrder;
|
||||
if (aVal > bVal) return 1 * sortOrder;
|
||||
return 0;
|
||||
});
|
||||
}
|
||||
|
||||
// Pagination
|
||||
const totalCount = products.length;
|
||||
const limit = Math.min(parseInt(limitStr as string) || 100, 1000);
|
||||
const offset = parseInt(offsetStr as string) || 0;
|
||||
products = products.slice(offset, offset + limit);
|
||||
|
||||
// Field selection - normalize product structure
|
||||
const normalizedProducts = products.map((p: any) => {
|
||||
const normalized: any = {
|
||||
id: p._id || p.id,
|
||||
name: p.name,
|
||||
brand: p.brand?.name || p.brandName,
|
||||
category: p.category || p.Category,
|
||||
subcategory: p.subcategory || p.subCategory,
|
||||
strain_type: p.strainType || p.strain_type,
|
||||
price: p.Prices?.[0]?.price || p.price,
|
||||
price_med: p.Prices?.[0]?.priceMed || p.priceMed,
|
||||
price_rec: p.Prices?.[0]?.priceRec || p.priceRec,
|
||||
thc: p.potencyThc?.formatted || p.thc,
|
||||
cbd: p.potencyCbd?.formatted || p.cbd,
|
||||
weight: p.Prices?.[0]?.weight || p.weight,
|
||||
status: p.Status || p.status,
|
||||
in_stock: (p.Status || p.status) === 'Active',
|
||||
image_url: p.image || p.imageUrl || p.image_url,
|
||||
description: p.description,
|
||||
};
|
||||
|
||||
// If specific fields requested, filter
|
||||
if (fields) {
|
||||
const requestedFields = (fields as string).split(',').map(f => f.trim());
|
||||
const filtered: any = {};
|
||||
for (const field of requestedFields) {
|
||||
if (normalized.hasOwnProperty(field)) {
|
||||
filtered[field] = normalized[field];
|
||||
}
|
||||
}
|
||||
return filtered;
|
||||
}
|
||||
|
||||
return normalized;
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
dispensaryId,
|
||||
payloadId: result.metadata.id,
|
||||
fetchedAt: result.metadata.fetchedAt,
|
||||
query: {
|
||||
filters: {
|
||||
brand: brand || null,
|
||||
category: category || null,
|
||||
subcategory: subcategory || null,
|
||||
strain_type: strain_type || null,
|
||||
in_stock: in_stock || null,
|
||||
price_min: price_min || null,
|
||||
price_max: price_max || null,
|
||||
thc_min: thc_min || null,
|
||||
thc_max: thc_max || null,
|
||||
search: search || null,
|
||||
},
|
||||
sort: sort || null,
|
||||
order: order || 'asc',
|
||||
limit,
|
||||
offset,
|
||||
},
|
||||
pagination: {
|
||||
total: totalCount,
|
||||
returned: normalizedProducts.length,
|
||||
limit,
|
||||
offset,
|
||||
has_more: offset + limit < totalCount,
|
||||
},
|
||||
products: normalizedProducts,
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[Payloads] Query error:', error.message);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/payloads/store/:dispensaryId/aggregate
|
||||
* Aggregate data from the latest payload
|
||||
*
|
||||
* Query params:
|
||||
* - group_by: Field to group by (brand, category, subcategory, strain_type)
|
||||
* - metrics: Comma-separated metrics (count, avg_price, min_price, max_price, avg_thc)
|
||||
*/
|
||||
router.get('/store/:dispensaryId/aggregate', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const pool = getDbPool();
|
||||
const dispensaryId = parseInt(req.params.dispensaryId);
|
||||
|
||||
const result = await getLatestPayload(pool, dispensaryId);
|
||||
|
||||
if (!result) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: `No payloads found for dispensary ${dispensaryId}`,
|
||||
});
|
||||
}
|
||||
|
||||
const products = result.payload.products || [];
|
||||
const groupBy = req.query.group_by as string;
|
||||
const metricsParam = req.query.metrics as string || 'count';
|
||||
const metrics = metricsParam.split(',').map(m => m.trim());
|
||||
|
||||
if (!groupBy) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'group_by parameter is required (brand, category, subcategory, strain_type)',
|
||||
});
|
||||
}
|
||||
|
||||
// Group products
|
||||
const groups: Map<string, any[]> = new Map();
|
||||
for (const p of products) {
|
||||
let key: string;
|
||||
switch (groupBy) {
|
||||
case 'brand':
|
||||
key = p.brand?.name || 'Unknown';
|
||||
break;
|
||||
case 'category':
|
||||
key = p.category || p.Category || 'Unknown';
|
||||
break;
|
||||
case 'subcategory':
|
||||
key = p.subcategory || p.subCategory || 'Unknown';
|
||||
break;
|
||||
case 'strain_type':
|
||||
key = p.strainType || p.strain_type || 'Unknown';
|
||||
break;
|
||||
default:
|
||||
key = 'Unknown';
|
||||
}
|
||||
|
||||
if (!groups.has(key)) {
|
||||
groups.set(key, []);
|
||||
}
|
||||
groups.get(key)!.push(p);
|
||||
}
|
||||
|
||||
// Calculate metrics
|
||||
const aggregations: any[] = [];
|
||||
for (const [key, items] of groups) {
|
||||
const agg: any = { [groupBy]: key };
|
||||
|
||||
for (const metric of metrics) {
|
||||
switch (metric) {
|
||||
case 'count':
|
||||
agg.count = items.length;
|
||||
break;
|
||||
case 'avg_price':
|
||||
const prices = items.map(p => p.Prices?.[0]?.price || p.price).filter(p => p != null);
|
||||
agg.avg_price = prices.length > 0 ? prices.reduce((a, b) => a + b, 0) / prices.length : null;
|
||||
break;
|
||||
case 'min_price':
|
||||
const minPrices = items.map(p => p.Prices?.[0]?.price || p.price).filter(p => p != null);
|
||||
agg.min_price = minPrices.length > 0 ? Math.min(...minPrices) : null;
|
||||
break;
|
||||
case 'max_price':
|
||||
const maxPrices = items.map(p => p.Prices?.[0]?.price || p.price).filter(p => p != null);
|
||||
agg.max_price = maxPrices.length > 0 ? Math.max(...maxPrices) : null;
|
||||
break;
|
||||
case 'avg_thc':
|
||||
const thcs = items.map(p => parseFloat(p.potencyThc?.formatted || p.thc || '0')).filter(t => t > 0);
|
||||
agg.avg_thc = thcs.length > 0 ? thcs.reduce((a, b) => a + b, 0) / thcs.length : null;
|
||||
break;
|
||||
case 'in_stock_count':
|
||||
agg.in_stock_count = items.filter(p => (p.Status || p.status) === 'Active').length;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
aggregations.push(agg);
|
||||
}
|
||||
|
||||
// Sort by count descending
|
||||
aggregations.sort((a, b) => (b.count || 0) - (a.count || 0));
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
dispensaryId,
|
||||
payloadId: result.metadata.id,
|
||||
fetchedAt: result.metadata.fetchedAt,
|
||||
groupBy,
|
||||
metrics,
|
||||
totalProducts: products.length,
|
||||
groupCount: aggregations.length,
|
||||
aggregations,
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[Payloads] Aggregate error:', error.message);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -278,7 +278,7 @@ router.post('/update-locations', requireRole('superadmin', 'admin'), async (req,
|
||||
|
||||
// Run in background
|
||||
updateAllProxyLocations().catch(err => {
|
||||
console.error('❌ Location update failed:', err);
|
||||
console.error('Location update failed:', err);
|
||||
});
|
||||
|
||||
res.json({ message: 'Location update job started' });
|
||||
|
||||
224
backend/src/routes/trusted-origins.ts
Normal file
224
backend/src/routes/trusted-origins.ts
Normal file
@@ -0,0 +1,224 @@
|
||||
/**
|
||||
* Trusted Origins Admin Routes
|
||||
*
|
||||
* Manage IPs and domains that bypass API key authentication.
|
||||
* Available at /api/admin/trusted-origins
|
||||
*/
|
||||
|
||||
import { Router, Response } from 'express';
|
||||
import { pool } from '../db/pool';
|
||||
import { AuthRequest, authMiddleware, requireRole, clearTrustedOriginsCache } from '../auth/middleware';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// All routes require admin auth
|
||||
router.use(authMiddleware);
|
||||
router.use(requireRole('admin', 'superadmin'));
|
||||
|
||||
/**
|
||||
* GET /api/admin/trusted-origins
|
||||
* List all trusted origins
|
||||
*/
|
||||
router.get('/', async (req: AuthRequest, res: Response) => {
|
||||
try {
|
||||
const result = await pool.query(`
|
||||
SELECT
|
||||
id,
|
||||
origin_type,
|
||||
origin_value,
|
||||
description,
|
||||
active,
|
||||
created_at,
|
||||
updated_at
|
||||
FROM trusted_origins
|
||||
ORDER BY origin_type, origin_value
|
||||
`);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
origins: result.rows,
|
||||
counts: {
|
||||
total: result.rows.length,
|
||||
active: result.rows.filter(r => r.active).length,
|
||||
ips: result.rows.filter(r => r.origin_type === 'ip').length,
|
||||
domains: result.rows.filter(r => r.origin_type === 'domain').length,
|
||||
patterns: result.rows.filter(r => r.origin_type === 'pattern').length,
|
||||
},
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[TrustedOrigins] List error:', error.message);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/admin/trusted-origins
|
||||
* Add a new trusted origin
|
||||
*/
|
||||
router.post('/', async (req: AuthRequest, res: Response) => {
|
||||
try {
|
||||
const { origin_type, origin_value, description } = req.body;
|
||||
|
||||
if (!origin_type || !origin_value) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'origin_type and origin_value are required',
|
||||
});
|
||||
}
|
||||
|
||||
if (!['ip', 'domain', 'pattern'].includes(origin_type)) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'origin_type must be: ip, domain, or pattern',
|
||||
});
|
||||
}
|
||||
|
||||
// Validate pattern if regex
|
||||
if (origin_type === 'pattern') {
|
||||
try {
|
||||
new RegExp(origin_value);
|
||||
} catch {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid regex pattern',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
INSERT INTO trusted_origins (origin_type, origin_value, description, created_by)
|
||||
VALUES ($1, $2, $3, $4)
|
||||
RETURNING id, origin_type, origin_value, description, active, created_at
|
||||
`, [origin_type, origin_value, description || null, req.user?.id || null]);
|
||||
|
||||
// Invalidate cache
|
||||
clearTrustedOriginsCache();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
origin: result.rows[0],
|
||||
});
|
||||
} catch (error: any) {
|
||||
if (error.code === '23505') {
|
||||
return res.status(409).json({
|
||||
success: false,
|
||||
error: 'This origin already exists',
|
||||
});
|
||||
}
|
||||
console.error('[TrustedOrigins] Add error:', error.message);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* PUT /api/admin/trusted-origins/:id
|
||||
* Update a trusted origin
|
||||
*/
|
||||
router.put('/:id', async (req: AuthRequest, res: Response) => {
|
||||
try {
|
||||
const id = parseInt(req.params.id);
|
||||
const { origin_type, origin_value, description, active } = req.body;
|
||||
|
||||
// Validate pattern if regex
|
||||
if (origin_type === 'pattern' && origin_value) {
|
||||
try {
|
||||
new RegExp(origin_value);
|
||||
} catch {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'Invalid regex pattern',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const result = await pool.query(`
|
||||
UPDATE trusted_origins
|
||||
SET
|
||||
origin_type = COALESCE($1, origin_type),
|
||||
origin_value = COALESCE($2, origin_value),
|
||||
description = COALESCE($3, description),
|
||||
active = COALESCE($4, active),
|
||||
updated_at = NOW()
|
||||
WHERE id = $5
|
||||
RETURNING id, origin_type, origin_value, description, active, updated_at
|
||||
`, [origin_type, origin_value, description, active, id]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ success: false, error: 'Origin not found' });
|
||||
}
|
||||
|
||||
// Invalidate cache
|
||||
clearTrustedOriginsCache();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
origin: result.rows[0],
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[TrustedOrigins] Update error:', error.message);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* DELETE /api/admin/trusted-origins/:id
|
||||
* Delete a trusted origin
|
||||
*/
|
||||
router.delete('/:id', async (req: AuthRequest, res: Response) => {
|
||||
try {
|
||||
const id = parseInt(req.params.id);
|
||||
|
||||
const result = await pool.query(`
|
||||
DELETE FROM trusted_origins WHERE id = $1 RETURNING id, origin_value
|
||||
`, [id]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ success: false, error: 'Origin not found' });
|
||||
}
|
||||
|
||||
// Invalidate cache
|
||||
clearTrustedOriginsCache();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
deleted: result.rows[0],
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[TrustedOrigins] Delete error:', error.message);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/admin/trusted-origins/:id/toggle
|
||||
* Toggle active status
|
||||
*/
|
||||
router.post('/:id/toggle', async (req: AuthRequest, res: Response) => {
|
||||
try {
|
||||
const id = parseInt(req.params.id);
|
||||
|
||||
const result = await pool.query(`
|
||||
UPDATE trusted_origins
|
||||
SET active = NOT active, updated_at = NOW()
|
||||
WHERE id = $1
|
||||
RETURNING id, origin_type, origin_value, active
|
||||
`, [id]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ success: false, error: 'Origin not found' });
|
||||
}
|
||||
|
||||
// Invalidate cache
|
||||
clearTrustedOriginsCache();
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
origin: result.rows[0],
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('[TrustedOrigins] Toggle error:', error.message);
|
||||
res.status(500).json({ success: false, error: error.message });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
@@ -355,6 +355,12 @@ router.get('/workers', async (req: Request, res: Response) => {
|
||||
-- Decommission fields
|
||||
COALESCE(decommission_requested, false) as decommission_requested,
|
||||
decommission_reason,
|
||||
-- Preflight fields (dual-transport verification)
|
||||
curl_ip,
|
||||
http_ip,
|
||||
preflight_status,
|
||||
preflight_at,
|
||||
fingerprint_data,
|
||||
-- Full metadata for resources
|
||||
metadata,
|
||||
EXTRACT(EPOCH FROM (NOW() - last_heartbeat_at)) as seconds_since_heartbeat,
|
||||
|
||||
@@ -683,6 +683,118 @@ export class CrawlRotator {
|
||||
const current = this.proxy.getCurrent();
|
||||
return current?.timezone;
|
||||
}
|
||||
|
||||
/**
|
||||
* Preflight check - verifies proxy and anti-detect are working
|
||||
* MUST be called before any task execution to ensure anonymity.
|
||||
*
|
||||
* Tests:
|
||||
* 1. Proxy available - a proxy must be loaded and active
|
||||
* 2. Proxy connectivity - makes HTTP request through proxy to verify connection
|
||||
* 3. Anti-detect headers - verifies fingerprint is set with required headers
|
||||
*
|
||||
* @returns Promise<PreflightResult> with pass/fail status and details
|
||||
*/
|
||||
async preflight(): Promise<PreflightResult> {
|
||||
const result: PreflightResult = {
|
||||
passed: false,
|
||||
proxyAvailable: false,
|
||||
proxyConnected: false,
|
||||
antidetectReady: false,
|
||||
proxyIp: null,
|
||||
fingerprint: null,
|
||||
error: null,
|
||||
responseTimeMs: null,
|
||||
};
|
||||
|
||||
// Step 1: Check proxy is available
|
||||
const currentProxy = this.proxy.getCurrent();
|
||||
if (!currentProxy) {
|
||||
result.error = 'No proxy available';
|
||||
console.log('[Preflight] FAILED - No proxy available');
|
||||
return result;
|
||||
}
|
||||
result.proxyAvailable = true;
|
||||
result.proxyIp = currentProxy.host;
|
||||
|
||||
// Step 2: Check fingerprint/anti-detect is ready
|
||||
const fingerprint = this.userAgent.getCurrent();
|
||||
if (!fingerprint || !fingerprint.userAgent) {
|
||||
result.error = 'Anti-detect fingerprint not initialized';
|
||||
console.log('[Preflight] FAILED - No fingerprint');
|
||||
return result;
|
||||
}
|
||||
result.antidetectReady = true;
|
||||
result.fingerprint = {
|
||||
userAgent: fingerprint.userAgent,
|
||||
browserName: fingerprint.browserName,
|
||||
deviceCategory: fingerprint.deviceCategory,
|
||||
};
|
||||
|
||||
// Step 3: Test proxy connectivity with an actual HTTP request
|
||||
// Use httpbin.org/ip to verify request goes through proxy
|
||||
const proxyUrl = this.proxy.getProxyUrl(currentProxy);
|
||||
const testUrl = 'https://httpbin.org/ip';
|
||||
|
||||
try {
|
||||
const { default: axios } = await import('axios');
|
||||
const { HttpsProxyAgent } = await import('https-proxy-agent');
|
||||
|
||||
const agent = new HttpsProxyAgent(proxyUrl);
|
||||
const startTime = Date.now();
|
||||
|
||||
const response = await axios.get(testUrl, {
|
||||
httpsAgent: agent,
|
||||
timeout: 15000, // 15 second timeout
|
||||
headers: {
|
||||
'User-Agent': fingerprint.userAgent,
|
||||
'Accept-Language': fingerprint.acceptLanguage,
|
||||
...(fingerprint.secChUa && { 'sec-ch-ua': fingerprint.secChUa }),
|
||||
...(fingerprint.secChUaPlatform && { 'sec-ch-ua-platform': fingerprint.secChUaPlatform }),
|
||||
...(fingerprint.secChUaMobile && { 'sec-ch-ua-mobile': fingerprint.secChUaMobile }),
|
||||
},
|
||||
});
|
||||
|
||||
result.responseTimeMs = Date.now() - startTime;
|
||||
result.proxyConnected = true;
|
||||
result.passed = true;
|
||||
|
||||
// Mark success on proxy stats
|
||||
await this.proxy.markSuccess(currentProxy.id, result.responseTimeMs);
|
||||
|
||||
console.log(`[Preflight] PASSED - Proxy ${currentProxy.host} connected (${result.responseTimeMs}ms), UA: ${fingerprint.browserName}/${fingerprint.deviceCategory}`);
|
||||
} catch (err: any) {
|
||||
result.error = `Proxy connection failed: ${err.message || 'Unknown error'}`;
|
||||
console.log(`[Preflight] FAILED - Proxy connection error: ${err.message}`);
|
||||
|
||||
// Mark failure on proxy stats
|
||||
await this.proxy.markFailed(currentProxy.id, err.message);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Result from preflight check
|
||||
*/
|
||||
export interface PreflightResult {
|
||||
/** Overall pass/fail */
|
||||
passed: boolean;
|
||||
/** Step 1: Is a proxy loaded? */
|
||||
proxyAvailable: boolean;
|
||||
/** Step 2: Did HTTP request through proxy succeed? */
|
||||
proxyConnected: boolean;
|
||||
/** Step 3: Is fingerprint/anti-detect ready? */
|
||||
antidetectReady: boolean;
|
||||
/** Current proxy IP */
|
||||
proxyIp: string | null;
|
||||
/** Fingerprint summary */
|
||||
fingerprint: { userAgent: string; browserName: string; deviceCategory: string } | null;
|
||||
/** Error message if failed */
|
||||
error: string | null;
|
||||
/** Proxy response time in ms */
|
||||
responseTimeMs: number | null;
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
|
||||
100
backend/src/services/curl-preflight.ts
Normal file
100
backend/src/services/curl-preflight.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
/**
|
||||
* Curl Preflight - Verify curl/axios transport works through proxy
|
||||
*
|
||||
* Tests:
|
||||
* 1. Proxy is available and active
|
||||
* 2. HTTP request through proxy succeeds
|
||||
* 3. Anti-detect headers are properly set
|
||||
*
|
||||
* Use case: Fast, simple API requests that don't need browser fingerprint
|
||||
*/
|
||||
|
||||
import axios from 'axios';
|
||||
import { HttpsProxyAgent } from 'https-proxy-agent';
|
||||
import { CrawlRotator, PreflightResult } from './crawl-rotator';
|
||||
|
||||
export interface CurlPreflightResult extends PreflightResult {
|
||||
method: 'curl';
|
||||
}
|
||||
|
||||
/**
|
||||
* Run curl preflight check
|
||||
* Tests proxy connectivity using axios/curl through the proxy
|
||||
*/
|
||||
export async function runCurlPreflight(
|
||||
crawlRotator: CrawlRotator
|
||||
): Promise<CurlPreflightResult> {
|
||||
const result: CurlPreflightResult = {
|
||||
method: 'curl',
|
||||
passed: false,
|
||||
proxyAvailable: false,
|
||||
proxyConnected: false,
|
||||
antidetectReady: false,
|
||||
proxyIp: null,
|
||||
fingerprint: null,
|
||||
error: null,
|
||||
responseTimeMs: null,
|
||||
};
|
||||
|
||||
// Step 1: Check proxy is available
|
||||
const currentProxy = crawlRotator.proxy.getCurrent();
|
||||
if (!currentProxy) {
|
||||
result.error = 'No proxy available';
|
||||
console.log('[CurlPreflight] FAILED - No proxy available');
|
||||
return result;
|
||||
}
|
||||
result.proxyAvailable = true;
|
||||
result.proxyIp = currentProxy.host;
|
||||
|
||||
// Step 2: Check fingerprint/anti-detect is ready
|
||||
const fingerprint = crawlRotator.userAgent.getCurrent();
|
||||
if (!fingerprint || !fingerprint.userAgent) {
|
||||
result.error = 'Anti-detect fingerprint not initialized';
|
||||
console.log('[CurlPreflight] FAILED - No fingerprint');
|
||||
return result;
|
||||
}
|
||||
result.antidetectReady = true;
|
||||
result.fingerprint = {
|
||||
userAgent: fingerprint.userAgent,
|
||||
browserName: fingerprint.browserName,
|
||||
deviceCategory: fingerprint.deviceCategory,
|
||||
};
|
||||
|
||||
// Step 3: Test proxy connectivity with an actual HTTP request
|
||||
const proxyUrl = crawlRotator.proxy.getProxyUrl(currentProxy);
|
||||
const testUrl = 'https://httpbin.org/ip';
|
||||
|
||||
try {
|
||||
const agent = new HttpsProxyAgent(proxyUrl);
|
||||
const startTime = Date.now();
|
||||
|
||||
const response = await axios.get(testUrl, {
|
||||
httpsAgent: agent,
|
||||
timeout: 15000, // 15 second timeout
|
||||
headers: {
|
||||
'User-Agent': fingerprint.userAgent,
|
||||
'Accept-Language': fingerprint.acceptLanguage,
|
||||
...(fingerprint.secChUa && { 'sec-ch-ua': fingerprint.secChUa }),
|
||||
...(fingerprint.secChUaPlatform && { 'sec-ch-ua-platform': fingerprint.secChUaPlatform }),
|
||||
...(fingerprint.secChUaMobile && { 'sec-ch-ua-mobile': fingerprint.secChUaMobile }),
|
||||
},
|
||||
});
|
||||
|
||||
result.responseTimeMs = Date.now() - startTime;
|
||||
result.proxyConnected = true;
|
||||
result.passed = true;
|
||||
|
||||
// Mark success on proxy stats
|
||||
await crawlRotator.proxy.markSuccess(currentProxy.id, result.responseTimeMs);
|
||||
|
||||
console.log(`[CurlPreflight] PASSED - Proxy ${currentProxy.host} connected (${result.responseTimeMs}ms), UA: ${fingerprint.browserName}/${fingerprint.deviceCategory}`);
|
||||
} catch (err: any) {
|
||||
result.error = `Proxy connection failed: ${err.message || 'Unknown error'}`;
|
||||
console.log(`[CurlPreflight] FAILED - Proxy connection error: ${err.message}`);
|
||||
|
||||
// Mark failure on proxy stats
|
||||
await crawlRotator.proxy.markFailed(currentProxy.id, err.message);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
399
backend/src/services/puppeteer-preflight.ts
Normal file
399
backend/src/services/puppeteer-preflight.ts
Normal file
@@ -0,0 +1,399 @@
|
||||
/**
|
||||
* Puppeteer Preflight - Verify browser-based transport works with anti-detect
|
||||
*
|
||||
* Uses Puppeteer + StealthPlugin to:
|
||||
* 1. Launch headless browser with stealth mode + PROXY
|
||||
* 2. Visit fingerprint.com demo to verify anti-detect and confirm proxy IP
|
||||
* 3. Establish session by visiting Dutchie embedded menu
|
||||
* 4. Make GraphQL request from browser context
|
||||
* 5. Verify we get a valid response (not blocked)
|
||||
*
|
||||
* Use case: Anti-detect scraping that needs real browser fingerprint through proxy
|
||||
*
|
||||
* Based on test-intercept.js which successfully captures 1000+ products
|
||||
*/
|
||||
|
||||
import { PreflightResult, CrawlRotator } from './crawl-rotator';
|
||||
|
||||
// GraphQL hash for FilteredProducts query - MUST match CLAUDE.md
|
||||
const FILTERED_PRODUCTS_HASH = 'ee29c060826dc41c527e470e9ae502c9b2c169720faa0a9f5d25e1b9a530a4a0';
|
||||
|
||||
// Test dispensary - AZ-Deeply-Rooted (known working)
|
||||
const TEST_CNAME = 'AZ-Deeply-Rooted';
|
||||
const TEST_PLATFORM_ID = '6405ef617056e8014d79101b';
|
||||
|
||||
// Anti-detect verification sites (primary + fallback)
|
||||
const FINGERPRINT_DEMO_URL = 'https://demo.fingerprint.com/';
|
||||
const AMIUNIQUE_URL = 'https://amiunique.org/fingerprint';
|
||||
|
||||
export interface PuppeteerPreflightResult extends PreflightResult {
|
||||
method: 'http';
|
||||
/** Number of products returned (proves API access) */
|
||||
productsReturned?: number;
|
||||
/** Browser user agent used */
|
||||
browserUserAgent?: string;
|
||||
/** Bot detection result from fingerprint.com */
|
||||
botDetection?: {
|
||||
detected: boolean;
|
||||
probability?: number;
|
||||
type?: string;
|
||||
};
|
||||
/** Expected proxy IP (from pool) */
|
||||
expectedProxyIp?: string;
|
||||
/** Whether IP verification passed (detected IP matches proxy) */
|
||||
ipVerified?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run Puppeteer preflight check with proxy
|
||||
* Tests browser-based access with anti-detect verification via fingerprint.com
|
||||
*
|
||||
* @param crawlRotator - CrawlRotator instance to get proxy from pool
|
||||
*/
|
||||
export async function runPuppeteerPreflight(
|
||||
crawlRotator?: CrawlRotator
|
||||
): Promise<PuppeteerPreflightResult> {
|
||||
const result: PuppeteerPreflightResult = {
|
||||
method: 'http',
|
||||
passed: false,
|
||||
proxyAvailable: false,
|
||||
proxyConnected: false,
|
||||
antidetectReady: false,
|
||||
proxyIp: null,
|
||||
fingerprint: null,
|
||||
error: null,
|
||||
responseTimeMs: null,
|
||||
productsReturned: 0,
|
||||
ipVerified: false,
|
||||
};
|
||||
|
||||
let browser: any = null;
|
||||
|
||||
try {
|
||||
// Step 0: Get a proxy from the pool
|
||||
let proxyUrl: string | null = null;
|
||||
let expectedProxyHost: string | null = null;
|
||||
|
||||
if (crawlRotator) {
|
||||
const currentProxy = crawlRotator.proxy.getCurrent();
|
||||
if (currentProxy) {
|
||||
result.proxyAvailable = true;
|
||||
proxyUrl = crawlRotator.proxy.getProxyUrl(currentProxy);
|
||||
expectedProxyHost = currentProxy.host;
|
||||
result.expectedProxyIp = expectedProxyHost;
|
||||
console.log(`[PuppeteerPreflight] Using proxy: ${currentProxy.host}:${currentProxy.port}`);
|
||||
} else {
|
||||
result.error = 'No proxy available from pool';
|
||||
console.log(`[PuppeteerPreflight] FAILED - No proxy available`);
|
||||
return result;
|
||||
}
|
||||
} else {
|
||||
console.log(`[PuppeteerPreflight] WARNING: No CrawlRotator provided - using direct connection`);
|
||||
result.proxyAvailable = true; // No proxy needed for direct
|
||||
}
|
||||
|
||||
// Dynamic imports to avoid loading Puppeteer unless needed
|
||||
const puppeteer = require('puppeteer-extra');
|
||||
const StealthPlugin = require('puppeteer-extra-plugin-stealth');
|
||||
puppeteer.use(StealthPlugin());
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
// Build browser args
|
||||
const browserArgs = ['--no-sandbox', '--disable-setuid-sandbox'];
|
||||
if (proxyUrl) {
|
||||
// Extract host:port for Puppeteer (it handles auth separately)
|
||||
const proxyUrlParsed = new URL(proxyUrl);
|
||||
browserArgs.push(`--proxy-server=${proxyUrlParsed.host}`);
|
||||
}
|
||||
|
||||
// Launch browser with stealth + proxy
|
||||
browser = await puppeteer.launch({
|
||||
headless: 'new',
|
||||
args: browserArgs,
|
||||
});
|
||||
|
||||
const page = await browser.newPage();
|
||||
|
||||
// If proxy has auth, set it up
|
||||
if (proxyUrl) {
|
||||
const proxyUrlParsed = new URL(proxyUrl);
|
||||
if (proxyUrlParsed.username && proxyUrlParsed.password) {
|
||||
await page.authenticate({
|
||||
username: decodeURIComponent(proxyUrlParsed.username),
|
||||
password: decodeURIComponent(proxyUrlParsed.password),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Get browser user agent
|
||||
const userAgent = await page.evaluate(() => navigator.userAgent);
|
||||
result.browserUserAgent = userAgent;
|
||||
result.fingerprint = {
|
||||
userAgent,
|
||||
browserName: 'Chrome (Puppeteer)',
|
||||
deviceCategory: 'desktop',
|
||||
};
|
||||
|
||||
// =========================================================================
|
||||
// STEP 1: Visit fingerprint.com demo to verify anti-detect and get IP
|
||||
// =========================================================================
|
||||
console.log(`[PuppeteerPreflight] Testing anti-detect at ${FINGERPRINT_DEMO_URL}...`);
|
||||
|
||||
try {
|
||||
await page.goto(FINGERPRINT_DEMO_URL, {
|
||||
waitUntil: 'networkidle2',
|
||||
timeout: 30000,
|
||||
});
|
||||
|
||||
result.proxyConnected = true; // If we got here, proxy is working
|
||||
|
||||
// Wait for fingerprint results to load
|
||||
await page.waitForSelector('[data-test="visitor-id"]', { timeout: 10000 }).catch(() => {});
|
||||
|
||||
// Extract fingerprint data from the page
|
||||
const fingerprintData = await page.evaluate(() => {
|
||||
// Try to find the IP address displayed on the page
|
||||
const ipElement = document.querySelector('[data-test="ip-address"]');
|
||||
const ip = ipElement?.textContent?.trim() || null;
|
||||
|
||||
// Try to find bot detection info
|
||||
const botElement = document.querySelector('[data-test="bot-detected"]');
|
||||
const botDetected = botElement?.textContent?.toLowerCase().includes('true') || false;
|
||||
|
||||
// Try to find visitor ID (proves fingerprinting worked)
|
||||
const visitorIdElement = document.querySelector('[data-test="visitor-id"]');
|
||||
const visitorId = visitorIdElement?.textContent?.trim() || null;
|
||||
|
||||
// Alternative: look for common UI patterns if data-test attrs not present
|
||||
let detectedIp = ip;
|
||||
if (!detectedIp) {
|
||||
// Look for IP in any element containing IP-like pattern
|
||||
const allText = document.body.innerText;
|
||||
const ipMatch = allText.match(/\b(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\b/);
|
||||
detectedIp = ipMatch ? ipMatch[1] : null;
|
||||
}
|
||||
|
||||
return {
|
||||
ip: detectedIp,
|
||||
botDetected,
|
||||
visitorId,
|
||||
pageLoaded: !!document.body,
|
||||
};
|
||||
});
|
||||
|
||||
if (fingerprintData.ip) {
|
||||
result.proxyIp = fingerprintData.ip;
|
||||
console.log(`[PuppeteerPreflight] Detected IP: ${fingerprintData.ip}`);
|
||||
|
||||
// Verify IP matches expected proxy
|
||||
if (expectedProxyHost) {
|
||||
// Check if detected IP contains the proxy host (or is close match)
|
||||
if (fingerprintData.ip === expectedProxyHost ||
|
||||
expectedProxyHost.includes(fingerprintData.ip) ||
|
||||
fingerprintData.ip.includes(expectedProxyHost.split('.').slice(0, 3).join('.'))) {
|
||||
result.ipVerified = true;
|
||||
console.log(`[PuppeteerPreflight] IP VERIFIED - matches proxy`);
|
||||
} else {
|
||||
console.log(`[PuppeteerPreflight] IP mismatch: expected ${expectedProxyHost}, got ${fingerprintData.ip}`);
|
||||
// Don't fail - residential proxies often show different egress IPs
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (fingerprintData.visitorId) {
|
||||
console.log(`[PuppeteerPreflight] Fingerprint visitor ID: ${fingerprintData.visitorId}`);
|
||||
}
|
||||
|
||||
result.botDetection = {
|
||||
detected: fingerprintData.botDetected,
|
||||
};
|
||||
|
||||
if (fingerprintData.botDetected) {
|
||||
console.log(`[PuppeteerPreflight] WARNING: Bot detection triggered!`);
|
||||
} else {
|
||||
console.log(`[PuppeteerPreflight] Anti-detect check: NOT detected as bot`);
|
||||
result.antidetectReady = true;
|
||||
}
|
||||
} catch (fpErr: any) {
|
||||
// Could mean proxy connection failed
|
||||
console.log(`[PuppeteerPreflight] Fingerprint.com check failed: ${fpErr.message}`);
|
||||
if (fpErr.message.includes('net::ERR_PROXY') || fpErr.message.includes('ECONNREFUSED')) {
|
||||
result.error = `Proxy connection failed: ${fpErr.message}`;
|
||||
return result;
|
||||
}
|
||||
|
||||
// Try fallback: amiunique.org
|
||||
console.log(`[PuppeteerPreflight] Trying fallback: ${AMIUNIQUE_URL}...`);
|
||||
try {
|
||||
await page.goto(AMIUNIQUE_URL, {
|
||||
waitUntil: 'networkidle2',
|
||||
timeout: 30000,
|
||||
});
|
||||
|
||||
result.proxyConnected = true;
|
||||
|
||||
// Extract IP from amiunique.org page
|
||||
const amiData = await page.evaluate(() => {
|
||||
const allText = document.body.innerText;
|
||||
const ipMatch = allText.match(/\b(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\b/);
|
||||
return {
|
||||
ip: ipMatch ? ipMatch[1] : null,
|
||||
pageLoaded: !!document.body,
|
||||
};
|
||||
});
|
||||
|
||||
if (amiData.ip) {
|
||||
result.proxyIp = amiData.ip;
|
||||
console.log(`[PuppeteerPreflight] Detected IP via amiunique.org: ${amiData.ip}`);
|
||||
}
|
||||
|
||||
result.antidetectReady = true;
|
||||
console.log(`[PuppeteerPreflight] amiunique.org fallback succeeded`);
|
||||
} catch (amiErr: any) {
|
||||
console.log(`[PuppeteerPreflight] amiunique.org fallback also failed: ${amiErr.message}`);
|
||||
// Continue with Dutchie test anyway
|
||||
result.proxyConnected = true;
|
||||
result.antidetectReady = true;
|
||||
}
|
||||
}
|
||||
|
||||
// =========================================================================
|
||||
// STEP 2: Test Dutchie API access (the real test)
|
||||
// =========================================================================
|
||||
const embedUrl = `https://dutchie.com/embedded-menu/${TEST_CNAME}?menuType=rec`;
|
||||
console.log(`[PuppeteerPreflight] Establishing session at ${embedUrl}...`);
|
||||
|
||||
await page.goto(embedUrl, {
|
||||
waitUntil: 'networkidle2',
|
||||
timeout: 30000,
|
||||
});
|
||||
|
||||
// Make GraphQL request from browser context
|
||||
const graphqlResult = await page.evaluate(
|
||||
async (platformId: string, hash: string) => {
|
||||
try {
|
||||
const variables = {
|
||||
includeEnterpriseSpecials: false,
|
||||
productsFilter: {
|
||||
dispensaryId: platformId,
|
||||
pricingType: 'rec',
|
||||
Status: 'Active', // CRITICAL: Must be 'Active' per CLAUDE.md
|
||||
types: [],
|
||||
useCache: true,
|
||||
isDefaultSort: true,
|
||||
sortBy: 'popularSortIdx',
|
||||
sortDirection: 1,
|
||||
bypassOnlineThresholds: true,
|
||||
isKioskMenu: false,
|
||||
removeProductsBelowOptionThresholds: false,
|
||||
},
|
||||
page: 0,
|
||||
perPage: 10, // Just need a few to prove it works
|
||||
};
|
||||
|
||||
const extensions = {
|
||||
persistedQuery: {
|
||||
version: 1,
|
||||
sha256Hash: hash,
|
||||
},
|
||||
};
|
||||
|
||||
const qs = new URLSearchParams({
|
||||
operationName: 'FilteredProducts',
|
||||
variables: JSON.stringify(variables),
|
||||
extensions: JSON.stringify(extensions),
|
||||
});
|
||||
|
||||
const url = `https://dutchie.com/api-3/graphql?${qs.toString()}`;
|
||||
const sessionId = 'preflight-' + Date.now();
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'content-type': 'application/json',
|
||||
'x-dutchie-session': sessionId,
|
||||
'apollographql-client-name': 'Marketplace (production)',
|
||||
},
|
||||
credentials: 'include',
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return { error: `HTTP ${response.status}`, products: 0 };
|
||||
}
|
||||
|
||||
const json = await response.json();
|
||||
|
||||
if (json.errors) {
|
||||
return { error: JSON.stringify(json.errors).slice(0, 200), products: 0 };
|
||||
}
|
||||
|
||||
const products = json?.data?.filteredProducts?.products || [];
|
||||
return { error: null, products: products.length };
|
||||
} catch (err: any) {
|
||||
return { error: err.message || 'Unknown error', products: 0 };
|
||||
}
|
||||
},
|
||||
TEST_PLATFORM_ID,
|
||||
FILTERED_PRODUCTS_HASH
|
||||
);
|
||||
|
||||
result.responseTimeMs = Date.now() - startTime;
|
||||
|
||||
if (graphqlResult.error) {
|
||||
result.error = `GraphQL error: ${graphqlResult.error}`;
|
||||
console.log(`[PuppeteerPreflight] FAILED - ${result.error}`);
|
||||
} else if (graphqlResult.products === 0) {
|
||||
result.error = 'GraphQL returned 0 products';
|
||||
console.log(`[PuppeteerPreflight] FAILED - No products returned`);
|
||||
} else {
|
||||
result.passed = true;
|
||||
result.productsReturned = graphqlResult.products;
|
||||
console.log(
|
||||
`[PuppeteerPreflight] PASSED - Got ${graphqlResult.products} products in ${result.responseTimeMs}ms`
|
||||
);
|
||||
if (result.proxyIp) {
|
||||
console.log(`[PuppeteerPreflight] Browser IP via proxy: ${result.proxyIp}`);
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
result.error = `Browser error: ${err.message || 'Unknown error'}`;
|
||||
console.log(`[PuppeteerPreflight] FAILED - ${result.error}`);
|
||||
} finally {
|
||||
if (browser) {
|
||||
await browser.close().catch(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run Puppeteer preflight with retry
|
||||
* Retries once on failure to handle transient issues
|
||||
*
|
||||
* @param crawlRotator - CrawlRotator instance to get proxy from pool
|
||||
* @param maxRetries - Number of retry attempts (default 1)
|
||||
*/
|
||||
export async function runPuppeteerPreflightWithRetry(
|
||||
crawlRotator?: CrawlRotator,
|
||||
maxRetries: number = 1
|
||||
): Promise<PuppeteerPreflightResult> {
|
||||
let lastResult: PuppeteerPreflightResult | null = null;
|
||||
|
||||
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
||||
if (attempt > 0) {
|
||||
console.log(`[PuppeteerPreflight] Retry attempt ${attempt}/${maxRetries}...`);
|
||||
await new Promise((r) => setTimeout(r, 5000)); // Wait 5s between retries
|
||||
}
|
||||
|
||||
lastResult = await runPuppeteerPreflight(crawlRotator);
|
||||
|
||||
if (lastResult.passed) {
|
||||
return lastResult;
|
||||
}
|
||||
}
|
||||
|
||||
return lastResult!;
|
||||
}
|
||||
@@ -1,566 +1,30 @@
|
||||
/**
|
||||
* System API Routes
|
||||
* System API Routes (Stub)
|
||||
*
|
||||
* Provides REST API endpoints for system monitoring and control:
|
||||
* - /api/system/sync/* - Sync orchestrator
|
||||
* - /api/system/dlq/* - Dead-letter queue
|
||||
* - /api/system/integrity/* - Integrity checks
|
||||
* - /api/system/fix/* - Auto-fix routines
|
||||
* - /api/system/alerts/* - System alerts
|
||||
* - /metrics - Prometheus metrics
|
||||
* The full system routes depend on SyncOrchestrator which was moved to _deprecated.
|
||||
* This stub provides empty routers to maintain backward compatibility.
|
||||
*
|
||||
* Phase 5: Full Production Sync + Monitoring
|
||||
* Full implementation available at: src/_deprecated/system/routes/index.ts
|
||||
*/
|
||||
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { Pool } from 'pg';
|
||||
import {
|
||||
SyncOrchestrator,
|
||||
MetricsService,
|
||||
DLQService,
|
||||
AlertService,
|
||||
IntegrityService,
|
||||
AutoFixService,
|
||||
} from '../services';
|
||||
import { MetricsService } from '../services';
|
||||
|
||||
export function createSystemRouter(pool: Pool): Router {
|
||||
export function createSystemRouter(_pool: Pool): Router {
|
||||
const router = Router();
|
||||
|
||||
// Initialize services
|
||||
const metrics = new MetricsService(pool);
|
||||
const dlq = new DLQService(pool);
|
||||
const alerts = new AlertService(pool);
|
||||
const integrity = new IntegrityService(pool, alerts);
|
||||
const autoFix = new AutoFixService(pool, alerts);
|
||||
const orchestrator = new SyncOrchestrator(pool, metrics, dlq, alerts);
|
||||
|
||||
// ============================================================
|
||||
// SYNC ORCHESTRATOR ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* GET /api/system/sync/status
|
||||
* Get current sync status
|
||||
*/
|
||||
router.get('/sync/status', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const status = await orchestrator.getStatus();
|
||||
res.json(status);
|
||||
} catch (error) {
|
||||
console.error('[System] Sync status error:', error);
|
||||
res.status(500).json({ error: 'Failed to get sync status' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/sync/run
|
||||
* Trigger a sync run
|
||||
*/
|
||||
router.post('/sync/run', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const triggeredBy = req.body.triggeredBy || 'api';
|
||||
const result = await orchestrator.runSync();
|
||||
res.json({
|
||||
success: true,
|
||||
triggeredBy,
|
||||
metrics: result,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[System] Sync run error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Sync run failed',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/sync/queue-depth
|
||||
* Get queue depth information
|
||||
*/
|
||||
router.get('/sync/queue-depth', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const depth = await orchestrator.getQueueDepth();
|
||||
res.json(depth);
|
||||
} catch (error) {
|
||||
console.error('[System] Queue depth error:', error);
|
||||
res.status(500).json({ error: 'Failed to get queue depth' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/sync/health
|
||||
* Get sync health status
|
||||
*/
|
||||
router.get('/sync/health', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const health = await orchestrator.getHealth();
|
||||
res.status(health.healthy ? 200 : 503).json(health);
|
||||
} catch (error) {
|
||||
console.error('[System] Health check error:', error);
|
||||
res.status(500).json({ healthy: false, error: 'Health check failed' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/sync/pause
|
||||
* Pause the orchestrator
|
||||
*/
|
||||
router.post('/sync/pause', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const reason = req.body.reason || 'Manual pause';
|
||||
await orchestrator.pause(reason);
|
||||
res.json({ success: true, message: 'Orchestrator paused' });
|
||||
} catch (error) {
|
||||
console.error('[System] Pause error:', error);
|
||||
res.status(500).json({ error: 'Failed to pause orchestrator' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/sync/resume
|
||||
* Resume the orchestrator
|
||||
*/
|
||||
router.post('/sync/resume', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
await orchestrator.resume();
|
||||
res.json({ success: true, message: 'Orchestrator resumed' });
|
||||
} catch (error) {
|
||||
console.error('[System] Resume error:', error);
|
||||
res.status(500).json({ error: 'Failed to resume orchestrator' });
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// DLQ ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* GET /api/system/dlq
|
||||
* List DLQ payloads
|
||||
*/
|
||||
router.get('/dlq', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const options = {
|
||||
status: req.query.status as string,
|
||||
errorType: req.query.errorType as string,
|
||||
dispensaryId: req.query.dispensaryId ? parseInt(req.query.dispensaryId as string) : undefined,
|
||||
limit: req.query.limit ? parseInt(req.query.limit as string) : 50,
|
||||
offset: req.query.offset ? parseInt(req.query.offset as string) : 0,
|
||||
};
|
||||
|
||||
const result = await dlq.listPayloads(options);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('[System] DLQ list error:', error);
|
||||
res.status(500).json({ error: 'Failed to list DLQ payloads' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/dlq/stats
|
||||
* Get DLQ statistics
|
||||
*/
|
||||
router.get('/dlq/stats', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const stats = await dlq.getStats();
|
||||
res.json(stats);
|
||||
} catch (error) {
|
||||
console.error('[System] DLQ stats error:', error);
|
||||
res.status(500).json({ error: 'Failed to get DLQ stats' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/dlq/summary
|
||||
* Get DLQ summary by error type
|
||||
*/
|
||||
router.get('/dlq/summary', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const summary = await dlq.getSummary();
|
||||
res.json(summary);
|
||||
} catch (error) {
|
||||
console.error('[System] DLQ summary error:', error);
|
||||
res.status(500).json({ error: 'Failed to get DLQ summary' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/dlq/:id
|
||||
* Get a specific DLQ payload
|
||||
*/
|
||||
router.get('/dlq/:id', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const payload = await dlq.getPayload(req.params.id);
|
||||
if (!payload) {
|
||||
return res.status(404).json({ error: 'Payload not found' });
|
||||
}
|
||||
res.json(payload);
|
||||
} catch (error) {
|
||||
console.error('[System] DLQ get error:', error);
|
||||
res.status(500).json({ error: 'Failed to get DLQ payload' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/dlq/:id/retry
|
||||
* Retry a DLQ payload
|
||||
*/
|
||||
router.post('/dlq/:id/retry', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const result = await dlq.retryPayload(req.params.id);
|
||||
if (result.success) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(400).json(result);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[System] DLQ retry error:', error);
|
||||
res.status(500).json({ error: 'Failed to retry payload' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/dlq/:id/abandon
|
||||
* Abandon a DLQ payload
|
||||
*/
|
||||
router.post('/dlq/:id/abandon', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const reason = req.body.reason || 'Manually abandoned';
|
||||
const abandonedBy = req.body.abandonedBy || 'api';
|
||||
const success = await dlq.abandonPayload(req.params.id, reason, abandonedBy);
|
||||
res.json({ success });
|
||||
} catch (error) {
|
||||
console.error('[System] DLQ abandon error:', error);
|
||||
res.status(500).json({ error: 'Failed to abandon payload' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/dlq/bulk-retry
|
||||
* Bulk retry payloads by error type
|
||||
*/
|
||||
router.post('/dlq/bulk-retry', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { errorType } = req.body;
|
||||
if (!errorType) {
|
||||
return res.status(400).json({ error: 'errorType is required' });
|
||||
}
|
||||
const result = await dlq.bulkRetryByErrorType(errorType);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('[System] DLQ bulk retry error:', error);
|
||||
res.status(500).json({ error: 'Failed to bulk retry' });
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// INTEGRITY CHECK ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* POST /api/system/integrity/run
|
||||
* Run all integrity checks
|
||||
*/
|
||||
router.post('/integrity/run', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const triggeredBy = req.body.triggeredBy || 'api';
|
||||
const result = await integrity.runAllChecks(triggeredBy);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('[System] Integrity run error:', error);
|
||||
res.status(500).json({ error: 'Failed to run integrity checks' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/integrity/runs
|
||||
* Get recent integrity check runs
|
||||
*/
|
||||
router.get('/integrity/runs', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const limit = req.query.limit ? parseInt(req.query.limit as string) : 10;
|
||||
const runs = await integrity.getRecentRuns(limit);
|
||||
res.json(runs);
|
||||
} catch (error) {
|
||||
console.error('[System] Integrity runs error:', error);
|
||||
res.status(500).json({ error: 'Failed to get integrity runs' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/integrity/runs/:runId
|
||||
* Get results for a specific integrity run
|
||||
*/
|
||||
router.get('/integrity/runs/:runId', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const results = await integrity.getRunResults(req.params.runId);
|
||||
res.json(results);
|
||||
} catch (error) {
|
||||
console.error('[System] Integrity run results error:', error);
|
||||
res.status(500).json({ error: 'Failed to get run results' });
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// AUTO-FIX ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* GET /api/system/fix/routines
|
||||
* Get available fix routines
|
||||
*/
|
||||
router.get('/fix/routines', (_req: Request, res: Response) => {
|
||||
try {
|
||||
const routines = autoFix.getAvailableRoutines();
|
||||
res.json(routines);
|
||||
} catch (error) {
|
||||
console.error('[System] Get routines error:', error);
|
||||
res.status(500).json({ error: 'Failed to get routines' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/fix/:routine
|
||||
* Run a fix routine
|
||||
*/
|
||||
router.post('/fix/:routine', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const routineName = req.params.routine;
|
||||
const dryRun = req.body.dryRun === true;
|
||||
const triggeredBy = req.body.triggeredBy || 'api';
|
||||
|
||||
const result = await autoFix.runRoutine(routineName as any, triggeredBy, { dryRun });
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('[System] Fix routine error:', error);
|
||||
res.status(500).json({ error: 'Failed to run fix routine' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/fix/runs
|
||||
* Get recent fix runs
|
||||
*/
|
||||
router.get('/fix/runs', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const limit = req.query.limit ? parseInt(req.query.limit as string) : 20;
|
||||
const runs = await autoFix.getRecentRuns(limit);
|
||||
res.json(runs);
|
||||
} catch (error) {
|
||||
console.error('[System] Fix runs error:', error);
|
||||
res.status(500).json({ error: 'Failed to get fix runs' });
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// ALERTS ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* GET /api/system/alerts
|
||||
* List alerts
|
||||
*/
|
||||
router.get('/alerts', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const options = {
|
||||
status: req.query.status as any,
|
||||
severity: req.query.severity as any,
|
||||
type: req.query.type as string,
|
||||
limit: req.query.limit ? parseInt(req.query.limit as string) : 50,
|
||||
offset: req.query.offset ? parseInt(req.query.offset as string) : 0,
|
||||
};
|
||||
|
||||
const result = await alerts.listAlerts(options);
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
console.error('[System] Alerts list error:', error);
|
||||
res.status(500).json({ error: 'Failed to list alerts' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/alerts/active
|
||||
* Get active alerts
|
||||
*/
|
||||
router.get('/alerts/active', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const activeAlerts = await alerts.getActiveAlerts();
|
||||
res.json(activeAlerts);
|
||||
} catch (error) {
|
||||
console.error('[System] Active alerts error:', error);
|
||||
res.status(500).json({ error: 'Failed to get active alerts' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/alerts/summary
|
||||
* Get alert summary
|
||||
*/
|
||||
router.get('/alerts/summary', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const summary = await alerts.getSummary();
|
||||
res.json(summary);
|
||||
} catch (error) {
|
||||
console.error('[System] Alerts summary error:', error);
|
||||
res.status(500).json({ error: 'Failed to get alerts summary' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/alerts/:id/acknowledge
|
||||
* Acknowledge an alert
|
||||
*/
|
||||
router.post('/alerts/:id/acknowledge', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const alertId = parseInt(req.params.id);
|
||||
const acknowledgedBy = req.body.acknowledgedBy || 'api';
|
||||
const success = await alerts.acknowledgeAlert(alertId, acknowledgedBy);
|
||||
res.json({ success });
|
||||
} catch (error) {
|
||||
console.error('[System] Acknowledge alert error:', error);
|
||||
res.status(500).json({ error: 'Failed to acknowledge alert' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/alerts/:id/resolve
|
||||
* Resolve an alert
|
||||
*/
|
||||
router.post('/alerts/:id/resolve', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const alertId = parseInt(req.params.id);
|
||||
const resolvedBy = req.body.resolvedBy || 'api';
|
||||
const success = await alerts.resolveAlert(alertId, resolvedBy);
|
||||
res.json({ success });
|
||||
} catch (error) {
|
||||
console.error('[System] Resolve alert error:', error);
|
||||
res.status(500).json({ error: 'Failed to resolve alert' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/alerts/bulk-acknowledge
|
||||
* Bulk acknowledge alerts
|
||||
*/
|
||||
router.post('/alerts/bulk-acknowledge', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { ids, acknowledgedBy } = req.body;
|
||||
if (!ids || !Array.isArray(ids)) {
|
||||
return res.status(400).json({ error: 'ids array is required' });
|
||||
}
|
||||
const count = await alerts.bulkAcknowledge(ids, acknowledgedBy || 'api');
|
||||
res.json({ acknowledged: count });
|
||||
} catch (error) {
|
||||
console.error('[System] Bulk acknowledge error:', error);
|
||||
res.status(500).json({ error: 'Failed to bulk acknowledge' });
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================
|
||||
// METRICS ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* GET /api/system/metrics
|
||||
* Get all current metrics
|
||||
*/
|
||||
router.get('/metrics', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const allMetrics = await metrics.getAllMetrics();
|
||||
res.json(allMetrics);
|
||||
} catch (error) {
|
||||
console.error('[System] Metrics error:', error);
|
||||
res.status(500).json({ error: 'Failed to get metrics' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/metrics/:name
|
||||
* Get a specific metric
|
||||
*/
|
||||
router.get('/metrics/:name', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const metric = await metrics.getMetric(req.params.name);
|
||||
if (!metric) {
|
||||
return res.status(404).json({ error: 'Metric not found' });
|
||||
}
|
||||
res.json(metric);
|
||||
} catch (error) {
|
||||
console.error('[System] Metric error:', error);
|
||||
res.status(500).json({ error: 'Failed to get metric' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/metrics/:name/history
|
||||
* Get metric time series
|
||||
*/
|
||||
router.get('/metrics/:name/history', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const hours = req.query.hours ? parseInt(req.query.hours as string) : 24;
|
||||
const history = await metrics.getMetricHistory(req.params.name, hours);
|
||||
res.json(history);
|
||||
} catch (error) {
|
||||
console.error('[System] Metric history error:', error);
|
||||
res.status(500).json({ error: 'Failed to get metric history' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/errors
|
||||
* Get error summary
|
||||
*/
|
||||
router.get('/errors', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const summary = await metrics.getErrorSummary();
|
||||
res.json(summary);
|
||||
} catch (error) {
|
||||
console.error('[System] Error summary error:', error);
|
||||
res.status(500).json({ error: 'Failed to get error summary' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/system/errors/recent
|
||||
* Get recent errors
|
||||
*/
|
||||
router.get('/errors/recent', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const limit = req.query.limit ? parseInt(req.query.limit as string) : 50;
|
||||
const errorType = req.query.type as string;
|
||||
const errors = await metrics.getRecentErrors(limit, errorType);
|
||||
res.json(errors);
|
||||
} catch (error) {
|
||||
console.error('[System] Recent errors error:', error);
|
||||
res.status(500).json({ error: 'Failed to get recent errors' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/system/errors/acknowledge
|
||||
* Acknowledge errors
|
||||
*/
|
||||
router.post('/errors/acknowledge', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { ids, acknowledgedBy } = req.body;
|
||||
if (!ids || !Array.isArray(ids)) {
|
||||
return res.status(400).json({ error: 'ids array is required' });
|
||||
}
|
||||
const count = await metrics.acknowledgeErrors(ids, acknowledgedBy || 'api');
|
||||
res.json({ acknowledged: count });
|
||||
} catch (error) {
|
||||
console.error('[System] Acknowledge errors error:', error);
|
||||
res.status(500).json({ error: 'Failed to acknowledge errors' });
|
||||
}
|
||||
// Stub - full sync/dlq/integrity/fix/alerts routes moved to _deprecated
|
||||
router.get('/status', (_req: Request, res: Response) => {
|
||||
res.json({
|
||||
message: 'System routes temporarily disabled - see _deprecated/system/routes',
|
||||
status: 'stub',
|
||||
});
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create Prometheus metrics endpoint (standalone)
|
||||
*/
|
||||
export function createPrometheusRouter(pool: Pool): Router {
|
||||
const router = Router();
|
||||
const metrics = new MetricsService(pool);
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* Phase 5: Full Production Sync + Monitoring
|
||||
*/
|
||||
|
||||
export { SyncOrchestrator, type SyncStatus, type QueueDepth, type SyncRunMetrics, type OrchestratorStatus } from './sync-orchestrator';
|
||||
// SyncOrchestrator moved to _deprecated (depends on hydration module)
|
||||
export { MetricsService, ERROR_TYPES, type Metric, type MetricTimeSeries, type ErrorBucket, type ErrorType } from './metrics';
|
||||
export { DLQService, type DLQPayload, type DLQStats } from './dlq';
|
||||
export { AlertService, type SystemAlert, type AlertSummary, type AlertSeverity, type AlertStatus } from './alerts';
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
* Exports all task handlers for the task worker.
|
||||
*/
|
||||
|
||||
export { handleProductRefresh } from './product-refresh';
|
||||
export { handleProductDiscovery } from './product-discovery';
|
||||
export { handleProductRefresh } from './product-refresh';
|
||||
export { handleStoreDiscovery } from './store-discovery';
|
||||
export { handleEntryPointDiscovery } from './entry-point-discovery';
|
||||
export { handleAnalyticsRefresh } from './analytics-refresh';
|
||||
export { handleProxyTest } from './proxy-test';
|
||||
export { handleWhoami } from './whoami';
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
/**
|
||||
* Proxy Test Handler
|
||||
* Tests proxy connectivity by fetching public IP via ipify
|
||||
*/
|
||||
|
||||
import { TaskContext, TaskResult } from '../task-worker';
|
||||
import { execSync } from 'child_process';
|
||||
|
||||
export async function handleProxyTest(ctx: TaskContext): Promise<TaskResult> {
|
||||
const { pool } = ctx;
|
||||
|
||||
console.log('[ProxyTest] Testing proxy connection...');
|
||||
|
||||
try {
|
||||
// Get active proxy from DB
|
||||
const proxyResult = await pool.query(`
|
||||
SELECT host, port, username, password
|
||||
FROM proxies
|
||||
WHERE is_active = true
|
||||
LIMIT 1
|
||||
`);
|
||||
|
||||
if (proxyResult.rows.length === 0) {
|
||||
return { success: false, error: 'No active proxy configured' };
|
||||
}
|
||||
|
||||
const p = proxyResult.rows[0];
|
||||
const proxyUrl = p.username
|
||||
? `http://${p.username}:${p.password}@${p.host}:${p.port}`
|
||||
: `http://${p.host}:${p.port}`;
|
||||
|
||||
console.log(`[ProxyTest] Using proxy: ${p.host}:${p.port}`);
|
||||
|
||||
// Fetch IP via proxy
|
||||
const cmd = `curl -s --proxy '${proxyUrl}' 'https://api.ipify.org?format=json'`;
|
||||
const output = execSync(cmd, { timeout: 30000 }).toString().trim();
|
||||
const data = JSON.parse(output);
|
||||
|
||||
console.log(`[ProxyTest] Proxy IP: ${data.ip}`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
proxyIp: data.ip,
|
||||
proxyHost: p.host,
|
||||
proxyPort: p.port,
|
||||
};
|
||||
} catch (error: any) {
|
||||
console.error('[ProxyTest] Error:', error.message);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
}
|
||||
80
backend/src/tasks/handlers/whoami.ts
Normal file
80
backend/src/tasks/handlers/whoami.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
/**
|
||||
* WhoAmI Handler
|
||||
* Tests proxy connectivity and anti-detect by fetching public IP
|
||||
* Reports: proxy IP, fingerprint info, and connection status
|
||||
*/
|
||||
|
||||
import { TaskContext, TaskResult } from '../task-worker';
|
||||
import { execSync } from 'child_process';
|
||||
|
||||
export async function handleWhoami(ctx: TaskContext): Promise<TaskResult> {
|
||||
const { pool, crawlRotator } = ctx;
|
||||
|
||||
console.log('[WhoAmI] Testing proxy and anti-detect...');
|
||||
|
||||
try {
|
||||
// Use the preflight check which tests proxy + anti-detect
|
||||
if (crawlRotator) {
|
||||
const preflight = await crawlRotator.preflight();
|
||||
|
||||
if (!preflight.passed) {
|
||||
return {
|
||||
success: false,
|
||||
error: preflight.error || 'Preflight check failed',
|
||||
proxyAvailable: preflight.proxyAvailable,
|
||||
proxyConnected: preflight.proxyConnected,
|
||||
antidetectReady: preflight.antidetectReady,
|
||||
};
|
||||
}
|
||||
|
||||
console.log(`[WhoAmI] Proxy IP: ${preflight.proxyIp}, Response: ${preflight.responseTimeMs}ms`);
|
||||
console.log(`[WhoAmI] Fingerprint: ${preflight.fingerprint?.browserName}/${preflight.fingerprint?.deviceCategory}`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
proxyIp: preflight.proxyIp,
|
||||
responseTimeMs: preflight.responseTimeMs,
|
||||
fingerprint: preflight.fingerprint,
|
||||
proxyAvailable: preflight.proxyAvailable,
|
||||
proxyConnected: preflight.proxyConnected,
|
||||
antidetectReady: preflight.antidetectReady,
|
||||
};
|
||||
}
|
||||
|
||||
// Fallback: Direct proxy test without CrawlRotator
|
||||
const proxyResult = await pool.query(`
|
||||
SELECT host, port, username, password
|
||||
FROM proxies
|
||||
WHERE is_active = true
|
||||
LIMIT 1
|
||||
`);
|
||||
|
||||
if (proxyResult.rows.length === 0) {
|
||||
return { success: false, error: 'No active proxy configured' };
|
||||
}
|
||||
|
||||
const p = proxyResult.rows[0];
|
||||
const proxyUrl = p.username
|
||||
? `http://${p.username}:${p.password}@${p.host}:${p.port}`
|
||||
: `http://${p.host}:${p.port}`;
|
||||
|
||||
console.log(`[WhoAmI] Using proxy: ${p.host}:${p.port}`);
|
||||
|
||||
// Fetch IP via proxy
|
||||
const cmd = `curl -s --proxy '${proxyUrl}' 'https://api.ipify.org?format=json'`;
|
||||
const output = execSync(cmd, { timeout: 30000 }).toString().trim();
|
||||
const data = JSON.parse(output);
|
||||
|
||||
console.log(`[WhoAmI] Proxy IP: ${data.ip}`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
proxyIp: data.ip,
|
||||
proxyHost: p.host,
|
||||
proxyPort: p.port,
|
||||
};
|
||||
} catch (error: any) {
|
||||
console.error('[WhoAmI] Error:', error.message);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
}
|
||||
@@ -17,8 +17,8 @@ export {
|
||||
export { TaskWorker, TaskContext, TaskResult } from './task-worker';
|
||||
|
||||
export {
|
||||
handleProductRefresh,
|
||||
handleProductDiscovery,
|
||||
handleProductRefresh,
|
||||
handleStoreDiscovery,
|
||||
handleEntryPointDiscovery,
|
||||
handleAnalyticsRefresh,
|
||||
|
||||
@@ -24,15 +24,16 @@ async function tableExists(tableName: string): Promise<boolean> {
|
||||
|
||||
// Per TASK_WORKFLOW_2024-12-10.md: Task roles
|
||||
// payload_fetch: Hits Dutchie API, saves raw payload to filesystem
|
||||
// product_refresh: Reads local payload, normalizes, upserts to DB
|
||||
// product_discovery: Main product crawl handler
|
||||
// product_refresh: Legacy role (deprecated but kept for compatibility)
|
||||
export type TaskRole =
|
||||
| 'store_discovery'
|
||||
| 'entry_point_discovery'
|
||||
| 'product_discovery'
|
||||
| 'payload_fetch' // NEW: Fetches from API, saves to disk
|
||||
| 'product_refresh' // CHANGED: Now reads from local payload
|
||||
| 'payload_fetch' // Fetches from API, saves to disk
|
||||
| 'product_refresh' // DEPRECATED: Use product_discovery instead
|
||||
| 'analytics_refresh'
|
||||
| 'proxy_test'; // Tests proxy connectivity via ipify
|
||||
| 'whoami'; // Tests proxy + anti-detect connectivity
|
||||
|
||||
export type TaskStatus =
|
||||
| 'pending'
|
||||
@@ -51,6 +52,7 @@ export interface WorkerTask {
|
||||
platform: string | null;
|
||||
status: TaskStatus;
|
||||
priority: number;
|
||||
method: 'curl' | 'http' | null; // Transport method: curl=axios/proxy, http=Puppeteer/browser
|
||||
scheduled_for: Date | null;
|
||||
worker_id: string | null;
|
||||
claimed_at: Date | null;
|
||||
@@ -152,23 +154,33 @@ class TaskService {
|
||||
* Claim a task atomically for a worker
|
||||
* If role is null, claims ANY available task (role-agnostic worker)
|
||||
* Returns null if task pool is paused.
|
||||
*
|
||||
* @param role - Task role to claim, or null for any task
|
||||
* @param workerId - Worker ID claiming the task
|
||||
* @param curlPassed - Whether worker passed curl preflight (default true for backward compat)
|
||||
* @param httpPassed - Whether worker passed http/Puppeteer preflight (default false)
|
||||
*/
|
||||
async claimTask(role: TaskRole | null, workerId: string): Promise<WorkerTask | null> {
|
||||
async claimTask(
|
||||
role: TaskRole | null,
|
||||
workerId: string,
|
||||
curlPassed: boolean = true,
|
||||
httpPassed: boolean = false
|
||||
): Promise<WorkerTask | null> {
|
||||
// Check if task pool is paused - don't claim any tasks
|
||||
if (isTaskPoolPaused()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (role) {
|
||||
// Role-specific claiming - use the SQL function
|
||||
// Role-specific claiming - use the SQL function with preflight capabilities
|
||||
const result = await pool.query(
|
||||
`SELECT * FROM claim_task($1, $2)`,
|
||||
[role, workerId]
|
||||
`SELECT * FROM claim_task($1, $2, $3, $4)`,
|
||||
[role, workerId, curlPassed, httpPassed]
|
||||
);
|
||||
return (result.rows[0] as WorkerTask) || null;
|
||||
}
|
||||
|
||||
// Role-agnostic claiming - claim ANY pending task
|
||||
// Role-agnostic claiming - claim ANY pending task matching worker capabilities
|
||||
const result = await pool.query(`
|
||||
UPDATE worker_tasks
|
||||
SET
|
||||
@@ -179,6 +191,12 @@ class TaskService {
|
||||
SELECT id FROM worker_tasks
|
||||
WHERE status = 'pending'
|
||||
AND (scheduled_for IS NULL OR scheduled_for <= NOW())
|
||||
-- Method compatibility: worker must have passed the required preflight
|
||||
AND (
|
||||
method IS NULL -- No preference, any worker can claim
|
||||
OR (method = 'curl' AND $2 = TRUE)
|
||||
OR (method = 'http' AND $3 = TRUE)
|
||||
)
|
||||
-- Exclude stores that already have an active task
|
||||
AND (dispensary_id IS NULL OR dispensary_id NOT IN (
|
||||
SELECT dispensary_id FROM worker_tasks
|
||||
@@ -190,7 +208,7 @@ class TaskService {
|
||||
FOR UPDATE SKIP LOCKED
|
||||
)
|
||||
RETURNING *
|
||||
`, [workerId]);
|
||||
`, [workerId, curlPassed, httpPassed]);
|
||||
|
||||
return (result.rows[0] as WorkerTask) || null;
|
||||
}
|
||||
@@ -231,6 +249,24 @@ class TaskService {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Release a claimed task back to pending (e.g., when preflight fails)
|
||||
* This allows another worker to pick it up.
|
||||
*/
|
||||
async releaseTask(taskId: number): Promise<void> {
|
||||
await pool.query(
|
||||
`UPDATE worker_tasks
|
||||
SET status = 'pending',
|
||||
worker_id = NULL,
|
||||
claimed_at = NULL,
|
||||
started_at = NULL,
|
||||
updated_at = NOW()
|
||||
WHERE id = $1 AND status IN ('claimed', 'running')`,
|
||||
[taskId]
|
||||
);
|
||||
console.log(`[TaskService] Task ${taskId} released back to pending`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a task as failed, with auto-retry if under max_retries
|
||||
* Returns true if task was re-queued for retry, false if permanently failed
|
||||
|
||||
@@ -51,6 +51,10 @@ import os from 'os';
|
||||
import { CrawlRotator } from '../services/crawl-rotator';
|
||||
import { setCrawlRotator } from '../platforms/dutchie';
|
||||
|
||||
// Dual-transport preflight system
|
||||
import { runCurlPreflight, CurlPreflightResult } from '../services/curl-preflight';
|
||||
import { runPuppeteerPreflightWithRetry, PuppeteerPreflightResult } from '../services/puppeteer-preflight';
|
||||
|
||||
// Task handlers by role
|
||||
// Per TASK_WORKFLOW_2024-12-10.md: payload_fetch and product_refresh are now separate
|
||||
import { handlePayloadFetch } from './handlers/payload-fetch';
|
||||
@@ -59,7 +63,7 @@ import { handleProductDiscovery } from './handlers/product-discovery';
|
||||
import { handleStoreDiscovery } from './handlers/store-discovery';
|
||||
import { handleEntryPointDiscovery } from './handlers/entry-point-discovery';
|
||||
import { handleAnalyticsRefresh } from './handlers/analytics-refresh';
|
||||
import { handleProxyTest } from './handlers/proxy-test';
|
||||
import { handleWhoami } from './handlers/whoami';
|
||||
|
||||
const POLL_INTERVAL_MS = parseInt(process.env.POLL_INTERVAL_MS || '5000');
|
||||
const HEARTBEAT_INTERVAL_MS = parseInt(process.env.HEARTBEAT_INTERVAL_MS || '30000');
|
||||
@@ -111,6 +115,7 @@ export interface TaskContext {
|
||||
workerId: string;
|
||||
task: WorkerTask;
|
||||
heartbeat: () => Promise<void>;
|
||||
crawlRotator?: CrawlRotator;
|
||||
}
|
||||
|
||||
export interface TaskResult {
|
||||
@@ -125,16 +130,17 @@ export interface TaskResult {
|
||||
type TaskHandler = (ctx: TaskContext) => Promise<TaskResult>;
|
||||
|
||||
// Per TASK_WORKFLOW_2024-12-10.md: Handler registry
|
||||
// payload_fetch: Fetches from Dutchie API, saves to disk, chains to product_refresh
|
||||
// payload_fetch: Fetches from Dutchie API, saves to disk
|
||||
// product_refresh: Reads local payload, normalizes, upserts to DB
|
||||
// product_discovery: Main handler for product crawling
|
||||
const TASK_HANDLERS: Record<TaskRole, TaskHandler> = {
|
||||
payload_fetch: handlePayloadFetch, // NEW: API fetch -> disk
|
||||
product_refresh: handleProductRefresh, // CHANGED: disk -> DB
|
||||
payload_fetch: handlePayloadFetch, // API fetch -> disk
|
||||
product_refresh: handleProductRefresh, // disk -> DB
|
||||
product_discovery: handleProductDiscovery,
|
||||
store_discovery: handleStoreDiscovery,
|
||||
entry_point_discovery: handleEntryPointDiscovery,
|
||||
analytics_refresh: handleAnalyticsRefresh,
|
||||
proxy_test: handleProxyTest, // Tests proxy via ipify
|
||||
whoami: handleWhoami, // Tests proxy + anti-detect
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -188,6 +194,21 @@ export class TaskWorker {
|
||||
private isBackingOff: boolean = false;
|
||||
private backoffReason: string | null = null;
|
||||
|
||||
// ==========================================================================
|
||||
// DUAL-TRANSPORT PREFLIGHT STATUS
|
||||
// ==========================================================================
|
||||
// Workers run BOTH preflights on startup:
|
||||
// - curl: axios/proxy transport - fast, for simple API calls
|
||||
// - http: Puppeteer/browser transport - anti-detect, for Dutchie GraphQL
|
||||
//
|
||||
// Task claiming checks method compatibility - worker must have passed
|
||||
// the preflight for the task's required method.
|
||||
// ==========================================================================
|
||||
private preflightCurlPassed: boolean = false;
|
||||
private preflightHttpPassed: boolean = false;
|
||||
private preflightCurlResult: CurlPreflightResult | null = null;
|
||||
private preflightHttpResult: PuppeteerPreflightResult | null = null;
|
||||
|
||||
constructor(role: TaskRole | null = null, workerId?: string) {
|
||||
this.pool = getPool();
|
||||
this.role = role;
|
||||
@@ -350,6 +371,99 @@ export class TaskWorker {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run dual-transport preflights on startup
|
||||
* Tests both curl (axios/proxy) and http (Puppeteer/browser) transport methods.
|
||||
* Results are reported to worker_registry and used for task claiming.
|
||||
*
|
||||
* NOTE: All current tasks require 'http' method, so http preflight must pass
|
||||
* for the worker to claim any tasks. Curl preflight is for future use.
|
||||
*/
|
||||
private async runDualPreflights(): Promise<void> {
|
||||
console.log(`[TaskWorker] Running dual-transport preflights...`);
|
||||
|
||||
// Run both preflights in parallel for efficiency
|
||||
const [curlResult, httpResult] = await Promise.all([
|
||||
runCurlPreflight(this.crawlRotator).catch((err): CurlPreflightResult => ({
|
||||
method: 'curl',
|
||||
passed: false,
|
||||
proxyAvailable: false,
|
||||
proxyConnected: false,
|
||||
antidetectReady: false,
|
||||
proxyIp: null,
|
||||
fingerprint: null,
|
||||
error: `Preflight error: ${err.message}`,
|
||||
responseTimeMs: null,
|
||||
})),
|
||||
runPuppeteerPreflightWithRetry(this.crawlRotator, 1).catch((err): PuppeteerPreflightResult => ({
|
||||
method: 'http',
|
||||
passed: false,
|
||||
proxyAvailable: false,
|
||||
proxyConnected: false,
|
||||
antidetectReady: false,
|
||||
proxyIp: null,
|
||||
fingerprint: null,
|
||||
error: `Preflight error: ${err.message}`,
|
||||
responseTimeMs: null,
|
||||
productsReturned: 0,
|
||||
})),
|
||||
]);
|
||||
|
||||
// Store results
|
||||
this.preflightCurlResult = curlResult;
|
||||
this.preflightHttpResult = httpResult;
|
||||
this.preflightCurlPassed = curlResult.passed;
|
||||
this.preflightHttpPassed = httpResult.passed;
|
||||
|
||||
// Log results
|
||||
console.log(`[TaskWorker] CURL preflight: ${curlResult.passed ? 'PASSED' : 'FAILED'}${curlResult.error ? ` - ${curlResult.error}` : ''}`);
|
||||
console.log(`[TaskWorker] HTTP preflight: ${httpResult.passed ? 'PASSED' : 'FAILED'}${httpResult.error ? ` - ${httpResult.error}` : ''}`);
|
||||
|
||||
if (httpResult.passed && httpResult.productsReturned) {
|
||||
console.log(`[TaskWorker] HTTP preflight returned ${httpResult.productsReturned} products from test store`);
|
||||
}
|
||||
|
||||
// Report to worker_registry via API
|
||||
await this.reportPreflightStatus();
|
||||
|
||||
// Since all tasks require 'http', warn if http preflight failed
|
||||
if (!this.preflightHttpPassed) {
|
||||
console.warn(`[TaskWorker] WARNING: HTTP preflight failed - this worker cannot claim any tasks!`);
|
||||
console.warn(`[TaskWorker] Error: ${httpResult.error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Report preflight status to worker_registry
|
||||
*/
|
||||
private async reportPreflightStatus(): Promise<void> {
|
||||
try {
|
||||
// Update worker_registry directly via SQL (more reliable than API)
|
||||
await this.pool.query(`
|
||||
SELECT update_worker_preflight($1, 'curl', $2, $3, $4)
|
||||
`, [
|
||||
this.workerId,
|
||||
this.preflightCurlPassed ? 'passed' : 'failed',
|
||||
this.preflightCurlResult?.responseTimeMs || null,
|
||||
this.preflightCurlResult?.error || null,
|
||||
]);
|
||||
|
||||
await this.pool.query(`
|
||||
SELECT update_worker_preflight($1, 'http', $2, $3, $4)
|
||||
`, [
|
||||
this.workerId,
|
||||
this.preflightHttpPassed ? 'passed' : 'failed',
|
||||
this.preflightHttpResult?.responseTimeMs || null,
|
||||
this.preflightHttpResult?.error || null,
|
||||
]);
|
||||
|
||||
console.log(`[TaskWorker] Preflight status reported to worker_registry`);
|
||||
} catch (err: any) {
|
||||
// Non-fatal - worker can still function
|
||||
console.warn(`[TaskWorker] Could not report preflight status: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Register worker with the registry (get friendly name)
|
||||
*/
|
||||
@@ -493,11 +607,15 @@ export class TaskWorker {
|
||||
// Register with the API to get a friendly name
|
||||
await this.register();
|
||||
|
||||
// Run dual-transport preflights
|
||||
await this.runDualPreflights();
|
||||
|
||||
// Start registry heartbeat
|
||||
this.startRegistryHeartbeat();
|
||||
|
||||
const roleMsg = this.role ? `for role: ${this.role}` : '(role-agnostic - any task)';
|
||||
console.log(`[TaskWorker] ${this.friendlyName} starting ${roleMsg} (max ${this.maxConcurrentTasks} concurrent tasks)`);
|
||||
const preflightMsg = `curl=${this.preflightCurlPassed ? '✓' : '✗'} http=${this.preflightHttpPassed ? '✓' : '✗'}`;
|
||||
console.log(`[TaskWorker] ${this.friendlyName} starting ${roleMsg} (${preflightMsg}, max ${this.maxConcurrentTasks} concurrent tasks)`);
|
||||
|
||||
while (this.isRunning) {
|
||||
try {
|
||||
@@ -551,10 +669,36 @@ export class TaskWorker {
|
||||
|
||||
// Try to claim more tasks if we have capacity
|
||||
if (this.canAcceptMoreTasks()) {
|
||||
const task = await taskService.claimTask(this.role, this.workerId);
|
||||
// Pass preflight capabilities to only claim compatible tasks
|
||||
const task = await taskService.claimTask(
|
||||
this.role,
|
||||
this.workerId,
|
||||
this.preflightCurlPassed,
|
||||
this.preflightHttpPassed
|
||||
);
|
||||
|
||||
if (task) {
|
||||
console.log(`[TaskWorker] ${this.friendlyName} claimed task ${task.id} (${task.role}) [${this.activeTasks.size + 1}/${this.maxConcurrentTasks}]`);
|
||||
|
||||
// =================================================================
|
||||
// PREFLIGHT CHECK - CRITICAL: Worker MUST pass before task execution
|
||||
// Verifies: 1) Proxy available 2) Proxy connected 3) Anti-detect ready
|
||||
// =================================================================
|
||||
const preflight = await this.crawlRotator.preflight();
|
||||
if (!preflight.passed) {
|
||||
console.log(`[TaskWorker] ${this.friendlyName} PREFLIGHT FAILED for task ${task.id}: ${preflight.error}`);
|
||||
console.log(`[TaskWorker] Releasing task ${task.id} back to pending - worker cannot proceed without proxy/anti-detect`);
|
||||
|
||||
// Release task back to pending so another worker can pick it up
|
||||
await taskService.releaseTask(task.id);
|
||||
|
||||
// Wait before trying again - give proxies time to recover
|
||||
await this.sleep(30000); // 30 second wait on preflight failure
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`[TaskWorker] ${this.friendlyName} preflight PASSED for task ${task.id} (proxy: ${preflight.proxyIp}, ${preflight.responseTimeMs}ms)`);
|
||||
|
||||
this.activeTasks.set(task.id, task);
|
||||
|
||||
// Start task in background (don't await)
|
||||
@@ -611,6 +755,7 @@ export class TaskWorker {
|
||||
heartbeat: async () => {
|
||||
await taskService.heartbeat(task.id);
|
||||
},
|
||||
crawlRotator: this.crawlRotator,
|
||||
};
|
||||
|
||||
// Execute the task
|
||||
@@ -716,6 +861,8 @@ export class TaskWorker {
|
||||
maxConcurrentTasks: number;
|
||||
isBackingOff: boolean;
|
||||
backoffReason: string | null;
|
||||
preflightCurlPassed: boolean;
|
||||
preflightHttpPassed: boolean;
|
||||
} {
|
||||
return {
|
||||
workerId: this.workerId,
|
||||
@@ -726,6 +873,8 @@ export class TaskWorker {
|
||||
maxConcurrentTasks: this.maxConcurrentTasks,
|
||||
isBackingOff: this.isBackingOff,
|
||||
backoffReason: this.backoffReason,
|
||||
preflightCurlPassed: this.preflightCurlPassed,
|
||||
preflightHttpPassed: this.preflightHttpPassed,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -742,8 +891,8 @@ async function main(): Promise<void> {
|
||||
'store_discovery',
|
||||
'entry_point_discovery',
|
||||
'product_discovery',
|
||||
'payload_fetch', // NEW: Fetches from API, saves to disk
|
||||
'product_refresh', // CHANGED: Reads from disk, processes to DB
|
||||
'payload_fetch', // Fetches from API, saves to disk
|
||||
'product_refresh', // Reads from disk, processes to DB
|
||||
'analytics_refresh',
|
||||
];
|
||||
|
||||
|
||||
180
backend/test-intercept.js
Normal file
180
backend/test-intercept.js
Normal file
@@ -0,0 +1,180 @@
|
||||
/**
|
||||
* Stealth Browser Payload Capture - Direct GraphQL Injection
|
||||
*
|
||||
* Uses the browser session to make GraphQL requests that look organic.
|
||||
* Adds proper headers matching what Dutchie's frontend sends.
|
||||
*/
|
||||
|
||||
const puppeteer = require('puppeteer-extra');
|
||||
const StealthPlugin = require('puppeteer-extra-plugin-stealth');
|
||||
const fs = require('fs');
|
||||
|
||||
puppeteer.use(StealthPlugin());
|
||||
|
||||
async function capturePayload(config) {
|
||||
const {
|
||||
dispensaryId = null,
|
||||
platformId,
|
||||
cName,
|
||||
outputPath = `/tmp/payload_${cName}_${Date.now()}.json`,
|
||||
} = config;
|
||||
|
||||
const browser = await puppeteer.launch({
|
||||
headless: 'new',
|
||||
args: ['--no-sandbox', '--disable-setuid-sandbox']
|
||||
});
|
||||
|
||||
const page = await browser.newPage();
|
||||
|
||||
// Establish session by visiting the embedded menu
|
||||
const embedUrl = `https://dutchie.com/embedded-menu/${cName}?menuType=rec`;
|
||||
console.log(`[Capture] Establishing session at ${embedUrl}...`);
|
||||
|
||||
await page.goto(embedUrl, {
|
||||
waitUntil: 'networkidle2',
|
||||
timeout: 60000
|
||||
});
|
||||
|
||||
console.log('[Capture] Session established, fetching ALL products...');
|
||||
|
||||
// Fetch all products using GET requests with proper headers
|
||||
const result = await page.evaluate(async (platformId, cName) => {
|
||||
const allProducts = [];
|
||||
const logs = [];
|
||||
let pageNum = 0;
|
||||
const perPage = 100;
|
||||
let totalCount = 0;
|
||||
const sessionId = 'browser-session-' + Date.now();
|
||||
|
||||
try {
|
||||
while (pageNum < 30) { // Max 30 pages = 3000 products
|
||||
const variables = {
|
||||
includeEnterpriseSpecials: false,
|
||||
productsFilter: {
|
||||
dispensaryId: platformId,
|
||||
pricingType: 'rec',
|
||||
Status: 'Active', // 'Active' for in-stock products per CLAUDE.md
|
||||
types: [],
|
||||
useCache: true,
|
||||
isDefaultSort: true,
|
||||
sortBy: 'popularSortIdx',
|
||||
sortDirection: 1,
|
||||
bypassOnlineThresholds: true,
|
||||
isKioskMenu: false,
|
||||
removeProductsBelowOptionThresholds: false,
|
||||
},
|
||||
page: pageNum,
|
||||
perPage: perPage,
|
||||
};
|
||||
|
||||
const extensions = {
|
||||
persistedQuery: {
|
||||
version: 1,
|
||||
sha256Hash: 'ee29c060826dc41c527e470e9ae502c9b2c169720faa0a9f5d25e1b9a530a4a0'
|
||||
}
|
||||
};
|
||||
|
||||
// Build GET URL like the browser does
|
||||
const qs = new URLSearchParams({
|
||||
operationName: 'FilteredProducts',
|
||||
variables: JSON.stringify(variables),
|
||||
extensions: JSON.stringify(extensions)
|
||||
});
|
||||
const url = `https://dutchie.com/api-3/graphql?${qs.toString()}`;
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'content-type': 'application/json',
|
||||
'x-dutchie-session': sessionId,
|
||||
'apollographql-client-name': 'Marketplace (production)',
|
||||
},
|
||||
credentials: 'include'
|
||||
});
|
||||
|
||||
logs.push(`Page ${pageNum}: HTTP ${response.status}`);
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
logs.push(`HTTP error: ${response.status} - ${text.slice(0, 200)}`);
|
||||
break;
|
||||
}
|
||||
|
||||
const json = await response.json();
|
||||
|
||||
if (json.errors) {
|
||||
logs.push(`GraphQL error: ${JSON.stringify(json.errors).slice(0, 200)}`);
|
||||
break;
|
||||
}
|
||||
|
||||
const data = json?.data?.filteredProducts;
|
||||
if (!data || !data.products) {
|
||||
logs.push('No products in response');
|
||||
break;
|
||||
}
|
||||
|
||||
const products = data.products;
|
||||
allProducts.push(...products);
|
||||
|
||||
if (pageNum === 0) {
|
||||
totalCount = data.queryInfo?.totalCount || 0;
|
||||
logs.push(`Total reported: ${totalCount}`);
|
||||
}
|
||||
|
||||
logs.push(`Got ${products.length} products (total: ${allProducts.length}/${totalCount})`);
|
||||
|
||||
if (allProducts.length >= totalCount || products.length < perPage) {
|
||||
break;
|
||||
}
|
||||
|
||||
pageNum++;
|
||||
|
||||
// Small delay between pages to be polite
|
||||
await new Promise(r => setTimeout(r, 200));
|
||||
}
|
||||
} catch (err) {
|
||||
logs.push(`Error: ${err.message}`);
|
||||
}
|
||||
|
||||
return { products: allProducts, totalCount, logs };
|
||||
}, platformId, cName);
|
||||
|
||||
await browser.close();
|
||||
|
||||
// Print logs from browser context
|
||||
result.logs.forEach(log => console.log(`[Browser] ${log}`));
|
||||
|
||||
console.log(`[Capture] Got ${result.products.length} products (API reported ${result.totalCount})`);
|
||||
|
||||
const payload = {
|
||||
dispensaryId: dispensaryId,
|
||||
platformId: platformId,
|
||||
cName,
|
||||
fetchedAt: new Date().toISOString(),
|
||||
productCount: result.products.length,
|
||||
products: result.products,
|
||||
};
|
||||
|
||||
fs.writeFileSync(outputPath, JSON.stringify(payload, null, 2));
|
||||
|
||||
console.log(`\n=== Capture Complete ===`);
|
||||
console.log(`Total products: ${result.products.length}`);
|
||||
console.log(`Saved to: ${outputPath}`);
|
||||
console.log(`File size: ${(fs.statSync(outputPath).size / 1024).toFixed(1)} KB`);
|
||||
|
||||
return payload;
|
||||
}
|
||||
|
||||
// Run
|
||||
(async () => {
|
||||
const payload = await capturePayload({
|
||||
cName: 'AZ-Deeply-Rooted',
|
||||
platformId: '6405ef617056e8014d79101b',
|
||||
});
|
||||
|
||||
if (payload.products.length > 0) {
|
||||
const sample = payload.products[0];
|
||||
console.log(`\nSample: ${sample.Name || sample.name} - ${sample.brand?.name || sample.brandName}`);
|
||||
}
|
||||
})().catch(console.error);
|
||||
@@ -14,5 +14,5 @@
|
||||
"allowSyntheticDefaultImports": true
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist", "src/**/*.test.ts", "src/**/__tests__/**"]
|
||||
"exclude": ["node_modules", "dist", "src/**/*.test.ts", "src/**/__tests__/**", "src/_deprecated/**"]
|
||||
}
|
||||
|
||||
@@ -48,6 +48,17 @@ interface Worker {
|
||||
seconds_since_heartbeat: number;
|
||||
decommission_requested?: boolean;
|
||||
decommission_reason?: string;
|
||||
// Dual-transport preflight status
|
||||
preflight_curl_status?: 'pending' | 'passed' | 'failed' | 'skipped';
|
||||
preflight_http_status?: 'pending' | 'passed' | 'failed' | 'skipped';
|
||||
preflight_curl_at?: string;
|
||||
preflight_http_at?: string;
|
||||
preflight_curl_error?: string;
|
||||
preflight_http_error?: string;
|
||||
preflight_curl_ms?: number;
|
||||
preflight_http_ms?: number;
|
||||
can_curl?: boolean;
|
||||
can_http?: boolean;
|
||||
metadata: {
|
||||
cpu?: number;
|
||||
memory?: number;
|
||||
@@ -277,6 +288,67 @@ function ResourceBadge({ worker }: { worker: Worker }) {
|
||||
);
|
||||
}
|
||||
|
||||
// Transport capability badge showing curl/http preflight status
|
||||
function TransportBadge({ worker }: { worker: Worker }) {
|
||||
const curlStatus = worker.preflight_curl_status || 'pending';
|
||||
const httpStatus = worker.preflight_http_status || 'pending';
|
||||
|
||||
const getStatusConfig = (status: string, label: string, ms?: number, error?: string) => {
|
||||
switch (status) {
|
||||
case 'passed':
|
||||
return {
|
||||
bg: 'bg-emerald-100',
|
||||
text: 'text-emerald-700',
|
||||
icon: <CheckCircle className="w-3 h-3" />,
|
||||
tooltip: ms ? `${label}: Passed (${ms}ms)` : `${label}: Passed`,
|
||||
};
|
||||
case 'failed':
|
||||
return {
|
||||
bg: 'bg-red-100',
|
||||
text: 'text-red-700',
|
||||
icon: <XCircle className="w-3 h-3" />,
|
||||
tooltip: error ? `${label}: Failed - ${error}` : `${label}: Failed`,
|
||||
};
|
||||
case 'skipped':
|
||||
return {
|
||||
bg: 'bg-gray-100',
|
||||
text: 'text-gray-500',
|
||||
icon: <Clock className="w-3 h-3" />,
|
||||
tooltip: `${label}: Skipped`,
|
||||
};
|
||||
default:
|
||||
return {
|
||||
bg: 'bg-yellow-100',
|
||||
text: 'text-yellow-700',
|
||||
icon: <Clock className="w-3 h-3 animate-pulse" />,
|
||||
tooltip: `${label}: Pending`,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const curlConfig = getStatusConfig(curlStatus, 'CURL', worker.preflight_curl_ms, worker.preflight_curl_error);
|
||||
const httpConfig = getStatusConfig(httpStatus, 'HTTP', worker.preflight_http_ms, worker.preflight_http_error);
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-1">
|
||||
<div
|
||||
className={`inline-flex items-center gap-1 px-1.5 py-0.5 rounded text-xs font-medium ${curlConfig.bg} ${curlConfig.text}`}
|
||||
title={curlConfig.tooltip}
|
||||
>
|
||||
{curlConfig.icon}
|
||||
<span>curl</span>
|
||||
</div>
|
||||
<div
|
||||
className={`inline-flex items-center gap-1 px-1.5 py-0.5 rounded text-xs font-medium ${httpConfig.bg} ${httpConfig.text}`}
|
||||
title={httpConfig.tooltip}
|
||||
>
|
||||
{httpConfig.icon}
|
||||
<span>http</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Task count badge showing active/max concurrent tasks
|
||||
function TaskCountBadge({ worker, tasks }: { worker: Worker; tasks: Task[] }) {
|
||||
const activeCount = worker.active_task_count ?? (worker.current_task_id ? 1 : 0);
|
||||
@@ -883,6 +955,7 @@ export function WorkersDashboard() {
|
||||
<th className="px-4 py-3 text-left text-xs font-medium text-gray-500 uppercase">Worker</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-medium text-gray-500 uppercase">Role</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-medium text-gray-500 uppercase">Status</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-medium text-gray-500 uppercase">Transport</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-medium text-gray-500 uppercase">Resources</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-medium text-gray-500 uppercase">Tasks</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-medium text-gray-500 uppercase">Duration</th>
|
||||
@@ -934,6 +1007,9 @@ export function WorkersDashboard() {
|
||||
<td className="px-4 py-3">
|
||||
<HealthBadge status={worker.status} healthStatus={worker.health_status} />
|
||||
</td>
|
||||
<td className="px-4 py-3">
|
||||
<TransportBadge worker={worker} />
|
||||
</td>
|
||||
<td className="px-4 py-3">
|
||||
<ResourceBadge worker={worker} />
|
||||
</td>
|
||||
|
||||
18
k8s/woodpecker-agent-compose.yml
Normal file
18
k8s/woodpecker-agent-compose.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
# Woodpecker Agent Docker Compose
|
||||
# Path: /opt/woodpecker/docker-compose.yml
|
||||
# Deploy: cd /opt/woodpecker && docker compose up -d
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
woodpecker-agent:
|
||||
image: woodpeckerci/woodpecker-agent:latest
|
||||
container_name: woodpecker-agent
|
||||
restart: always
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
environment:
|
||||
- WOODPECKER_SERVER=localhost:9000
|
||||
- WOODPECKER_AGENT_SECRET=${WOODPECKER_AGENT_SECRET}
|
||||
- WOODPECKER_MAX_WORKFLOWS=5
|
||||
- WOODPECKER_HEALTHCHECK=true
|
||||
- WOODPECKER_LOG_LEVEL=info
|
||||
Reference in New Issue
Block a user