Files
cannaiq/backend/src/dutchie-az/services/analytics/cache.ts
Kelly b4a2fb7d03 feat: Add v2 architecture with multi-state support and orchestrator services
Major additions:
- Multi-state expansion: states table, StateSelector, NationalDashboard, StateHeatmap, CrossStateCompare
- Orchestrator services: trace service, error taxonomy, retry manager, proxy rotator
- Discovery system: dutchie discovery service, geo validation, city seeding scripts
- Analytics infrastructure: analytics v2 routes, brand/pricing/stores intelligence pages
- Local development: setup-local.sh starts all 5 services (postgres, backend, cannaiq, findadispo, findagram)
- Migrations 037-056: crawler profiles, states, analytics indexes, worker metadata

Frontend pages added:
- Discovery, ChainsDashboard, IntelligenceBrands, IntelligencePricing, IntelligenceStores
- StateHeatmap, CrossStateCompare, SyncInfoPanel

Components added:
- StateSelector, OrchestratorTraceModal, WorkflowStepper

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-07 11:30:57 -07:00

228 lines
6.0 KiB
TypeScript

/**
* Analytics Cache Service
*
* Provides caching layer for expensive analytics queries.
* Uses PostgreSQL for persistence with configurable TTLs.
*
* Phase 3: Analytics Dashboards
*/
import { Pool } from 'pg';
export interface CacheEntry<T = unknown> {
key: string;
data: T;
computedAt: Date;
expiresAt: Date;
queryTimeMs?: number;
}
export interface CacheConfig {
defaultTtlMinutes: number;
}
const DEFAULT_CONFIG: CacheConfig = {
defaultTtlMinutes: 15,
};
export class AnalyticsCache {
private pool: Pool;
private config: CacheConfig;
private memoryCache: Map<string, CacheEntry> = new Map();
constructor(pool: Pool, config: Partial<CacheConfig> = {}) {
this.pool = pool;
this.config = { ...DEFAULT_CONFIG, ...config };
}
/**
* Get cached data or compute and cache it
*/
async getOrCompute<T>(
key: string,
computeFn: () => Promise<T>,
ttlMinutes?: number
): Promise<{ data: T; fromCache: boolean; queryTimeMs: number }> {
const ttl = ttlMinutes ?? this.config.defaultTtlMinutes;
// Check memory cache first
const memEntry = this.memoryCache.get(key);
if (memEntry && new Date() < memEntry.expiresAt) {
return { data: memEntry.data as T, fromCache: true, queryTimeMs: memEntry.queryTimeMs || 0 };
}
// Check database cache
const dbEntry = await this.getFromDb<T>(key);
if (dbEntry && new Date() < dbEntry.expiresAt) {
this.memoryCache.set(key, dbEntry);
return { data: dbEntry.data, fromCache: true, queryTimeMs: dbEntry.queryTimeMs || 0 };
}
// Compute fresh data
const startTime = Date.now();
const data = await computeFn();
const queryTimeMs = Date.now() - startTime;
// Cache result
const entry: CacheEntry<T> = {
key,
data,
computedAt: new Date(),
expiresAt: new Date(Date.now() + ttl * 60 * 1000),
queryTimeMs,
};
await this.saveToDb(entry);
this.memoryCache.set(key, entry);
return { data, fromCache: false, queryTimeMs };
}
/**
* Get from database cache
*/
private async getFromDb<T>(key: string): Promise<CacheEntry<T> | null> {
try {
const result = await this.pool.query(`
SELECT cache_data, computed_at, expires_at, query_time_ms
FROM analytics_cache
WHERE cache_key = $1
AND expires_at > NOW()
`, [key]);
if (result.rows.length === 0) return null;
const row = result.rows[0];
return {
key,
data: row.cache_data as T,
computedAt: row.computed_at,
expiresAt: row.expires_at,
queryTimeMs: row.query_time_ms,
};
} catch (error) {
console.warn(`[AnalyticsCache] Failed to get from DB: ${error}`);
return null;
}
}
/**
* Save to database cache
*/
private async saveToDb<T>(entry: CacheEntry<T>): Promise<void> {
try {
await this.pool.query(`
INSERT INTO analytics_cache (cache_key, cache_data, computed_at, expires_at, query_time_ms)
VALUES ($1, $2, $3, $4, $5)
ON CONFLICT (cache_key)
DO UPDATE SET
cache_data = EXCLUDED.cache_data,
computed_at = EXCLUDED.computed_at,
expires_at = EXCLUDED.expires_at,
query_time_ms = EXCLUDED.query_time_ms
`, [entry.key, JSON.stringify(entry.data), entry.computedAt, entry.expiresAt, entry.queryTimeMs]);
} catch (error) {
console.warn(`[AnalyticsCache] Failed to save to DB: ${error}`);
}
}
/**
* Invalidate a cache entry
*/
async invalidate(key: string): Promise<void> {
this.memoryCache.delete(key);
try {
await this.pool.query('DELETE FROM analytics_cache WHERE cache_key = $1', [key]);
} catch (error) {
console.warn(`[AnalyticsCache] Failed to invalidate: ${error}`);
}
}
/**
* Invalidate all entries matching a pattern
*/
async invalidatePattern(pattern: string): Promise<number> {
// Clear memory cache
for (const key of this.memoryCache.keys()) {
if (key.includes(pattern)) {
this.memoryCache.delete(key);
}
}
try {
const result = await this.pool.query(
'DELETE FROM analytics_cache WHERE cache_key LIKE $1',
[`%${pattern}%`]
);
return result.rowCount || 0;
} catch (error) {
console.warn(`[AnalyticsCache] Failed to invalidate pattern: ${error}`);
return 0;
}
}
/**
* Clean expired entries
*/
async cleanExpired(): Promise<number> {
// Clean memory cache
const now = new Date();
for (const [key, entry] of this.memoryCache.entries()) {
if (now >= entry.expiresAt) {
this.memoryCache.delete(key);
}
}
try {
const result = await this.pool.query('DELETE FROM analytics_cache WHERE expires_at < NOW()');
return result.rowCount || 0;
} catch (error) {
console.warn(`[AnalyticsCache] Failed to clean expired: ${error}`);
return 0;
}
}
/**
* Get cache statistics
*/
async getStats(): Promise<{
memoryCacheSize: number;
dbCacheSize: number;
expiredCount: number;
}> {
try {
const result = await this.pool.query(`
SELECT
COUNT(*) FILTER (WHERE expires_at > NOW()) as active,
COUNT(*) FILTER (WHERE expires_at <= NOW()) as expired
FROM analytics_cache
`);
return {
memoryCacheSize: this.memoryCache.size,
dbCacheSize: parseInt(result.rows[0]?.active || '0'),
expiredCount: parseInt(result.rows[0]?.expired || '0'),
};
} catch (error) {
return {
memoryCacheSize: this.memoryCache.size,
dbCacheSize: 0,
expiredCount: 0,
};
}
}
}
/**
* Generate cache key with parameters
*/
export function cacheKey(prefix: string, params: Record<string, unknown> = {}): string {
const sortedParams = Object.keys(params)
.sort()
.filter(k => params[k] !== undefined && params[k] !== null)
.map(k => `${k}=${params[k]}`)
.join('&');
return sortedParams ? `${prefix}:${sortedParams}` : prefix;
}