mirror of
https://github.com/netbymatt/ws4kp.git
synced 2026-04-14 07:39:29 -07:00
Replace CORS proxy with complete server-side cache
- Replace cors/ directory and cors.mjs utility with comprehensive
HTTP caching proxy in proxy/ directory
- Implement RFC-compliant caching with cache-control headers,
conditional requests, and in-flight deduplication
- Centralized error handling with "safe" fetch utilities
- Add unified proxy handlers for weather.gov, SPC, radar, and mesonet APIs
- Include cache management endpoint and extensive diagnostic logging
- Migrate client-side URL rewriting from cors.mjs to url-rewrite.mjs
This commit is contained in:
60
index.mjs
60
index.mjs
@@ -1,8 +1,12 @@
|
||||
import 'dotenv/config';
|
||||
import express from 'express';
|
||||
import fs from 'fs';
|
||||
import {
|
||||
weatherProxy, radarProxy, outlookProxy, mesonetProxy,
|
||||
} from './proxy/handlers.mjs';
|
||||
import playlist from './src/playlist.mjs';
|
||||
import OVERRIDES from './src/overrides.mjs';
|
||||
import cache from './proxy/cache.mjs';
|
||||
|
||||
const app = express();
|
||||
const port = process.env.WS4KP_PORT ?? 8080;
|
||||
@@ -69,20 +73,57 @@ const geoip = (req, res) => {
|
||||
res.json({});
|
||||
};
|
||||
|
||||
// debugging
|
||||
if (process.env?.DIST === '1') {
|
||||
// distribution
|
||||
app.use('/scripts', express.static('./server/scripts'));
|
||||
app.use('/geoip', geoip);
|
||||
app.use('/', express.static('./dist'));
|
||||
// Configure static asset caching with proper ETags and cache validation
|
||||
const staticOptions = {
|
||||
etag: true, // Enable ETag generation
|
||||
lastModified: true, // Enable Last-Modified headers
|
||||
setHeaders: (res, path, stat) => {
|
||||
// Generate ETag based on file modification time and size for better cache validation
|
||||
const etag = `"${stat.mtime.getTime().toString(16)}-${stat.size.toString(16)}"`;
|
||||
res.setHeader('ETag', etag);
|
||||
|
||||
if (path.match(/\.(png|jpg|jpeg|gif|webp|ico|woff|woff2|ttf|eot)$/i)) {
|
||||
// Images and fonts - cache for 1 year (immutable content)
|
||||
res.setHeader('Cache-Control', 'public, max-age=31536000, immutable');
|
||||
} else if (path.match(/\.(css|js|mjs)$/i)) {
|
||||
// Scripts and styles - use cache validation instead of no-cache
|
||||
// This allows browsers to use cached version if ETag matches (304 response)
|
||||
res.setHeader('Cache-Control', 'public, max-age=0, must-revalidate');
|
||||
} else {
|
||||
// debugging
|
||||
// Other files - cache for 1 hour with validation
|
||||
res.setHeader('Cache-Control', 'public, max-age=3600, must-revalidate');
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// Weather.gov API proxy (catch-all for any Weather.gov API endpoint)
|
||||
app.use('/api/', weatherProxy);
|
||||
|
||||
// Cache management DELETE endpoint to allow "uncaching" specific URLs
|
||||
app.delete(/^\/cache\/.*/, (req, res) => {
|
||||
const path = req.url.replace('/cache', '');
|
||||
const cleared = cache.clearEntry(path);
|
||||
res.json({ cleared, path });
|
||||
});
|
||||
|
||||
// specific proxies for other services
|
||||
app.use('/radar/', radarProxy);
|
||||
app.use('/spc/', outlookProxy);
|
||||
app.use('/mesonet/', mesonetProxy);
|
||||
|
||||
if (process.env?.DIST === '1') {
|
||||
// Production ("distribution") mode uses pre-baked files in the dist directory
|
||||
// 'npm run build' and then 'DIST=1 npm start'
|
||||
app.use('/scripts', express.static('./server/scripts', staticOptions));
|
||||
app.use('/geoip', geoip);
|
||||
app.use('/', express.static('./dist', staticOptions));
|
||||
} else {
|
||||
// Development mode serves files from the server directory: 'npm start'
|
||||
app.get('/index.html', index);
|
||||
app.use('/geoip', geoip);
|
||||
app.use('/resources', express.static('./server/scripts/modules'));
|
||||
app.get('/', index);
|
||||
app.get('*name', express.static('./server'));
|
||||
// cors pass-thru to api.weather.gov
|
||||
app.get('*name', express.static('./server', staticOptions));
|
||||
app.get('/playlist.json', playlist);
|
||||
}
|
||||
|
||||
@@ -94,6 +135,7 @@ const server = app.listen(port, () => {
|
||||
const gracefulShutdown = () => {
|
||||
server.close(() => {
|
||||
console.log('Server closed');
|
||||
process.exit(0);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
464
proxy/cache.mjs
Normal file
464
proxy/cache.mjs
Normal file
@@ -0,0 +1,464 @@
|
||||
/**
|
||||
* HTTP cache implementation for API proxy that respects cache-control headers
|
||||
* (without external dependencies)
|
||||
*
|
||||
* Features:
|
||||
* - Respects HTTP cache-control headers (s-maxage, max-age)
|
||||
* - Heuristic caching based on Last-Modified headers when no explicit cache directives exist
|
||||
* - Conditional requests using ETags and If-Modified-Since headers to validate stale content
|
||||
* - In-flight request deduplication to prevent multiple simultaneous requests for the same resource
|
||||
* - Comprehensive logging with cache hit/miss statistics and timing information
|
||||
* - Timeout handling and error recovery mechanisms
|
||||
*
|
||||
* The cache uses a three-state system:
|
||||
* - 'fresh': Content is within its TTL and served immediately
|
||||
* - 'stale': Content has expired but can be revalidated with conditional requests (304 Not Modified)
|
||||
* - 'miss': No cached content exists
|
||||
*
|
||||
* @class HttpCache
|
||||
*/
|
||||
|
||||
import https from 'https';
|
||||
|
||||
class HttpCache {
|
||||
constructor() {
|
||||
this.cache = new Map();
|
||||
this.inFlight = new Map();
|
||||
this.cleanupInterval = null;
|
||||
this.startCleanup();
|
||||
}
|
||||
|
||||
// Parse cache-control header to extract s-maxage or max-age
|
||||
static parseCacheControl(cacheControlHeader) {
|
||||
if (!cacheControlHeader) return 0;
|
||||
|
||||
// Look for s-maxage first (preferred for proxy caches), then max-age
|
||||
const sMaxAgeMatch = cacheControlHeader.match(/s-maxage=(\d+)/i);
|
||||
if (sMaxAgeMatch) {
|
||||
return parseInt(sMaxAgeMatch[1], 10);
|
||||
}
|
||||
|
||||
const maxAgeMatch = cacheControlHeader.match(/max-age=(\d+)/i);
|
||||
if (maxAgeMatch) {
|
||||
return parseInt(maxAgeMatch[1], 10);
|
||||
}
|
||||
|
||||
return 0; // No cache if no cache directives found
|
||||
}
|
||||
|
||||
// Helper method to set filtered headers and our cache policy
|
||||
static setFilteredHeaders(res, headers) {
|
||||
// Strip cache-related headers and pass through others
|
||||
Object.entries(headers).forEach(([key, value]) => {
|
||||
const lowerKey = key.toLowerCase();
|
||||
// Skip cache-related headers that should be controlled by our proxy
|
||||
if (!['cache-control', 'expires', 'etag', 'last-modified'].includes(lowerKey)) {
|
||||
res.header(lowerKey, value);
|
||||
}
|
||||
});
|
||||
|
||||
// Set our own cache policy - short cache to ensure browser checks back with our server
|
||||
res.header('cache-control', 'public, max-age=30');
|
||||
}
|
||||
|
||||
// Generate cache key from request
|
||||
static generateKey(req) {
|
||||
// Since this cache is intended only by the frontend, we can use a simple URL-based key
|
||||
return `${req.path}${req.url.includes('?') ? req.url.substring(req.url.indexOf('?')) : ''}`;
|
||||
}
|
||||
|
||||
// High-level method to handle caching for HTTP proxies
|
||||
async handleRequest(req, res, upstreamUrl, options = {}) {
|
||||
// Check cache status
|
||||
const cacheResult = this.getCachedRequest(req);
|
||||
|
||||
if (cacheResult.status === 'fresh') {
|
||||
const cached = cacheResult.data;
|
||||
res.status(cached.statusCode);
|
||||
HttpCache.setFilteredHeaders(res, cached.headers);
|
||||
res.send(cached.data);
|
||||
return true; // Indicates cache hit
|
||||
}
|
||||
// For 'miss' or 'stale', proceed to upstream request
|
||||
|
||||
// Generate cache key for in-flight tracking
|
||||
const cacheKey = HttpCache.generateKey(req);
|
||||
|
||||
// Check if there's already a request in flight for this resource
|
||||
if (this.inFlight.has(cacheKey)) {
|
||||
console.log(`⏳ Wait | ${upstreamUrl}${req.path} (request already in flight)`);
|
||||
|
||||
// Track when we start waiting for latency measurement
|
||||
const waitStartTime = Date.now();
|
||||
|
||||
// Wait for the in-flight request to complete
|
||||
try {
|
||||
await this.inFlight.get(cacheKey);
|
||||
|
||||
// After waiting, try cache again (should be populated now)
|
||||
const key = HttpCache.generateKey(req);
|
||||
const cached = this.cache.get(key);
|
||||
|
||||
if (cached && Date.now() <= cached.expiry) {
|
||||
const waitLatency = Date.now() - waitStartTime;
|
||||
|
||||
// Log cache hit with wait latency
|
||||
const age = Math.round((Date.now() - cached.timestamp) / 1000);
|
||||
const remainingTTL = Math.round((cached.expiry - Date.now()) / 1000);
|
||||
console.log(`🟢 Hit | ${cached.url} (age: ${age}s, remaining: ${remainingTTL}s, waited: ${waitLatency}ms)`);
|
||||
|
||||
res.status(cached.statusCode);
|
||||
HttpCache.setFilteredHeaders(res, cached.headers);
|
||||
res.send(cached.data);
|
||||
return true; // Served from cache after waiting
|
||||
}
|
||||
|
||||
// Fallthrough to make request if cache miss (shouldn't happen but safety net)
|
||||
console.warn(`⚠️ Redo | Cache miss after waiting for in-flight request: ${upstreamUrl}${req.path}`);
|
||||
} catch (_error) {
|
||||
// If the in-flight request failed, we'll make our own request
|
||||
console.warn(`⚠️ Redo | In-flight request failed, making new request: ${upstreamUrl}${req.path}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Create promise for this request
|
||||
const requestPromise = this.makeUpstreamRequest(req, res, upstreamUrl, options, cacheResult);
|
||||
|
||||
// Store a wrapped promise that doesn't reject for waiters - they just need to know when it's done
|
||||
|
||||
const inflightPromise = requestPromise.catch(() => null);
|
||||
this.inFlight.set(cacheKey, inflightPromise);
|
||||
|
||||
try {
|
||||
// Send the request to the upstream service
|
||||
const result = await requestPromise;
|
||||
return result;
|
||||
} catch (error) {
|
||||
// Handle errors from the upstream request
|
||||
if (error.message === 'Timeout') {
|
||||
// Timeout errors are already logged and handled by makeUpstreamRequest
|
||||
return false;
|
||||
}
|
||||
// Re-throw other errors
|
||||
throw error;
|
||||
} finally {
|
||||
// Always clean up the in-flight tracking
|
||||
this.inFlight.delete(cacheKey);
|
||||
}
|
||||
}
|
||||
|
||||
// Make the actual upstream request, handling caching and conditional requests
|
||||
async makeUpstreamRequest(req, res, upstreamUrl, options = {}, cacheResult = null) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const headers = {
|
||||
'user-agent': options.userAgent || '(WeatherStar 4000+, ws4000@netbymatt.com)',
|
||||
accept: req.headers.accept,
|
||||
...options.headers,
|
||||
};
|
||||
|
||||
// Handle query parameters
|
||||
const queryParams = Object.keys(req.query).reduce((acc, key) => {
|
||||
if (options.skipParams && options.skipParams.includes(key)) return acc;
|
||||
acc[key] = req.query[key];
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
const queryString = new URLSearchParams(queryParams).toString();
|
||||
const fullUrl = `${upstreamUrl}${req.path}${queryString ? `?${queryString}` : ''}`;
|
||||
|
||||
// Use the cache result passed from handleRequest (no additional cache call)
|
||||
let staleCache = null;
|
||||
|
||||
if (cacheResult && cacheResult.status === 'stale' && cacheResult.data.originalHeaders) {
|
||||
staleCache = cacheResult.data;
|
||||
// Add conditional headers based on cached etag or last-modified header
|
||||
if (staleCache.originalHeaders.etag) {
|
||||
headers['if-none-match'] = staleCache.originalHeaders.etag;
|
||||
// console.log(`🏷️ Added If-None-Match: ${staleCache.originalHeaders.etag} for ${fullUrl}`);
|
||||
} else if (staleCache.originalHeaders['last-modified']) {
|
||||
headers['if-modified-since'] = staleCache.originalHeaders['last-modified'];
|
||||
// console.log(`📅 Added If-Modified-Since: ${staleCache.originalHeaders['last-modified']} for ${fullUrl}`);
|
||||
}
|
||||
}
|
||||
|
||||
let responseHandled = false; // Track if we've already sent a response
|
||||
|
||||
const upstreamReq = https.get(fullUrl, { headers }, (getRes) => {
|
||||
const { statusCode } = getRes;
|
||||
|
||||
// Handle 304 Not Modified responses - refresh stale cache and serve
|
||||
if (statusCode === 304) {
|
||||
if (responseHandled) return; // Prevent double response
|
||||
responseHandled = true;
|
||||
|
||||
if (staleCache) {
|
||||
const newCacheControl = getRes.headers['cache-control'];
|
||||
const newMaxAge = HttpCache.parseCacheControl(newCacheControl);
|
||||
if (newMaxAge > 0) {
|
||||
staleCache.expiry = Date.now() + (newMaxAge * 1000);
|
||||
staleCache.timestamp = Date.now(); // Reset age counter for 304 refresh
|
||||
console.log(`🔄 Unchg | ${fullUrl} (got 304 Not Modified; refreshing cache expiry by ${newMaxAge}s)`);
|
||||
} else {
|
||||
console.log(`📉 NoCache | ${fullUrl} (no valid cache directives in 304, not updating expiry)`);
|
||||
}
|
||||
|
||||
res.status(staleCache.statusCode);
|
||||
HttpCache.setFilteredHeaders(res, staleCache.headers);
|
||||
res.send(staleCache.data);
|
||||
resolve(false); // Cache hit after 304
|
||||
return;
|
||||
}
|
||||
// No stale entry for 304 response (this shouldn't happen!)
|
||||
console.warn(`⚠️ 304 response but no stale cache entry for ${fullUrl}`);
|
||||
res.status(500).json({ error: 'Cache inconsistency error' });
|
||||
reject(new Error('304 response without stale cache entry'));
|
||||
return;
|
||||
}
|
||||
|
||||
// Helper function to handle response after data collection
|
||||
const handleResponse = (data) => {
|
||||
if (responseHandled) return; // Prevent double response
|
||||
responseHandled = true;
|
||||
|
||||
// Log HTTP error status codes
|
||||
if (statusCode >= 400) {
|
||||
console.error(`🚫 ${statusCode} | ${fullUrl}`);
|
||||
}
|
||||
|
||||
// Filter out cache headers before storing - we don't need them in our cache
|
||||
const filteredHeaders = {};
|
||||
Object.entries(getRes.headers).forEach(([key, value]) => {
|
||||
const lowerKey = key.toLowerCase();
|
||||
if (!['cache-control', 'expires', 'etag', 'last-modified'].includes(lowerKey)) {
|
||||
filteredHeaders[key] = value;
|
||||
}
|
||||
});
|
||||
|
||||
const response = {
|
||||
statusCode,
|
||||
headers: filteredHeaders,
|
||||
data,
|
||||
};
|
||||
|
||||
// Store in cache (pass original headers for cache logic, but store filtered headers)
|
||||
this.storeCachedResponse(req, response, fullUrl, getRes.headers);
|
||||
|
||||
// Send response to client
|
||||
res.status(statusCode);
|
||||
|
||||
// Set filtered headers and our cache policy
|
||||
HttpCache.setFilteredHeaders(res, getRes.headers);
|
||||
|
||||
res.send(response.data);
|
||||
resolve(false); // Indicates cache miss, but successful
|
||||
};
|
||||
|
||||
if (options.encoding === 'binary') {
|
||||
// For binary data, collect as Buffer chunks
|
||||
const chunks = [];
|
||||
getRes.on('data', (chunk) => chunks.push(chunk));
|
||||
getRes.on('end', () => handleResponse(Buffer.concat(chunks)));
|
||||
} else {
|
||||
// For text data, use string encoding
|
||||
let data = '';
|
||||
getRes.setEncoding(options.encoding || 'utf8');
|
||||
getRes.on('data', (chunk) => {
|
||||
data += chunk;
|
||||
});
|
||||
getRes.on('end', () => handleResponse(data));
|
||||
}
|
||||
});
|
||||
|
||||
upstreamReq.on('error', (e) => {
|
||||
if (responseHandled) return; // Prevent double response
|
||||
responseHandled = true;
|
||||
|
||||
console.error(`💥 Err | ${fullUrl}: ${e.message}`);
|
||||
res.status(500).json({ error: `Failed to fetch data from ${options.serviceName || 'upstream API'}` });
|
||||
|
||||
// For known/expected network errors, resolve with false instead of rejecting to avoid extra logging
|
||||
if (e.code === 'ENOTFOUND' || e.code === 'ECONNREFUSED' || e.code === 'ETIMEDOUT') {
|
||||
resolve(false);
|
||||
} else {
|
||||
reject(e);
|
||||
}
|
||||
});
|
||||
|
||||
upstreamReq.setTimeout(options.timeout || 30000, () => {
|
||||
if (responseHandled) return; // Prevent double response
|
||||
responseHandled = true;
|
||||
|
||||
upstreamReq.destroy();
|
||||
console.error(`⏰ Timeout | ${fullUrl} (after ${options.timeout || 30000}ms)`);
|
||||
res.status(504).json({ error: 'Gateway timeout' });
|
||||
reject(new Error('Timeout'));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
getCachedRequest(req) {
|
||||
const key = HttpCache.generateKey(req);
|
||||
const cached = this.cache.get(key);
|
||||
|
||||
if (!cached) {
|
||||
return { status: 'miss', data: null };
|
||||
}
|
||||
|
||||
const isExpired = Date.now() > cached.expiry;
|
||||
|
||||
// If fresh, return immediately
|
||||
if (!isExpired) {
|
||||
const age = Math.round((Date.now() - cached.timestamp) / 1000);
|
||||
const remainingTTL = Math.round((cached.expiry - Date.now()) / 1000);
|
||||
console.log(`🎯 Hit | ${cached.url} (age: ${age}s, remaining: ${remainingTTL}s)`);
|
||||
return { status: 'fresh', data: cached };
|
||||
}
|
||||
|
||||
// If stale, return for potential conditional request
|
||||
// const staleAge = Math.round((Date.now() - cached.expiry) / 1000);
|
||||
// console.log(`🕐 Stale | ${cached.url} (expired ${staleAge}s ago, will check upstream)`);
|
||||
return { status: 'stale', data: cached };
|
||||
}
|
||||
|
||||
storeCachedResponse(req, response, url, originalHeaders) {
|
||||
const key = HttpCache.generateKey(req);
|
||||
|
||||
const cacheControl = originalHeaders['cache-control'];
|
||||
let maxAge = HttpCache.parseCacheControl(cacheControl);
|
||||
let cacheType = '';
|
||||
|
||||
// If no explicit cache directives, try heuristic caching for Last-Modified
|
||||
if (maxAge <= 0) {
|
||||
const lastModified = originalHeaders['last-modified'];
|
||||
if (lastModified) {
|
||||
maxAge = HttpCache.calculateHeuristicMaxAge(lastModified);
|
||||
cacheType = 'heuristic';
|
||||
}
|
||||
} else {
|
||||
cacheType = 'explicit';
|
||||
}
|
||||
|
||||
// Don't cache if still no valid max-age
|
||||
if (maxAge <= 0) {
|
||||
console.log(`📤 Sent | ${url} (no cache directives; not cached)`);
|
||||
return;
|
||||
}
|
||||
|
||||
const cached = {
|
||||
statusCode: response.statusCode,
|
||||
headers: { ...response.headers },
|
||||
data: response.data,
|
||||
expiry: Date.now() + (maxAge * 1000),
|
||||
timestamp: Date.now(),
|
||||
url, // Store the URL for logging
|
||||
originalHeaders: { // Store original headers for conditional requests
|
||||
etag: originalHeaders.etag,
|
||||
'last-modified': originalHeaders['last-modified'],
|
||||
},
|
||||
};
|
||||
|
||||
this.cache.set(key, cached);
|
||||
|
||||
console.log(`🌐 Add | ${url} (${cacheType} ${maxAge}s TTL, expires: ${new Date(cached.expiry).toISOString()})`);
|
||||
}
|
||||
|
||||
// Calculate heuristic max-age based on Last-Modified header
|
||||
// RFC 7234: A cache can use heuristic freshness calculation
|
||||
// Common heuristic: 10% of the age of the resource, with limits
|
||||
static calculateHeuristicMaxAge(lastModifiedHeader) {
|
||||
try {
|
||||
const lastModified = new Date(lastModifiedHeader);
|
||||
const now = new Date();
|
||||
const age = (now.getTime() - lastModified.getTime()) / 1000; // age in seconds
|
||||
|
||||
if (age <= 0) return 0;
|
||||
|
||||
// Use 10% of age, but limit between 1 hour and 4 hours
|
||||
const heuristicAge = Math.floor(age * 0.1);
|
||||
const minAge = 60 * 60; // 1 hour
|
||||
const maxAge = 4 * 60 * 60; // 4 hours
|
||||
|
||||
return Math.max(minAge, Math.min(maxAge, heuristicAge));
|
||||
} catch (_error) {
|
||||
return 0; // Invalid date format
|
||||
}
|
||||
}
|
||||
|
||||
// Periodic cleanup of expired entries
|
||||
startCleanup() {
|
||||
if (this.cleanupInterval) return;
|
||||
|
||||
this.cleanupInterval = setInterval(() => {
|
||||
const now = Date.now();
|
||||
let removedCount = 0;
|
||||
|
||||
Array.from(this.cache.entries()).forEach(([key, cached]) => {
|
||||
// Allow stale entries to persist for up to 3 hours before cleanup
|
||||
// This gives us time to make conditional requests and potentially refresh them
|
||||
const staleTimeLimit = 3 * 60 * 60 * 1000;
|
||||
if (now > cached.expiry + staleTimeLimit) {
|
||||
this.cache.delete(key);
|
||||
removedCount += 1;
|
||||
}
|
||||
});
|
||||
|
||||
if (removedCount > 0) {
|
||||
console.log(`🧹 Clean | Removed ${removedCount} stale entries (${this.cache.size} remaining)`);
|
||||
}
|
||||
}, 5 * 60 * 1000); // Cleanup every 5 minutes
|
||||
}
|
||||
|
||||
// Cache statistics
|
||||
getStats() {
|
||||
const now = Date.now();
|
||||
let expired = 0;
|
||||
let valid = 0;
|
||||
|
||||
Array.from(this.cache.values()).forEach((cached) => {
|
||||
if (now > cached.expiry) {
|
||||
expired += 1;
|
||||
} else {
|
||||
valid += 1;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
total: this.cache.size,
|
||||
valid,
|
||||
expired,
|
||||
inFlight: this.inFlight.size,
|
||||
};
|
||||
}
|
||||
|
||||
// Clear all cache entries
|
||||
clear() {
|
||||
this.cache.clear();
|
||||
console.log('🗑️ Clear | Cache cleared');
|
||||
}
|
||||
|
||||
// Clear a specific cache entry by path
|
||||
clearEntry(path) {
|
||||
const key = path;
|
||||
const deleted = this.cache.delete(key);
|
||||
if (deleted) {
|
||||
console.log(`🗑️ Clear | ${path} removed from cache`);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Stop cleanup interval
|
||||
destroy() {
|
||||
if (this.cleanupInterval) {
|
||||
clearInterval(this.cleanupInterval);
|
||||
this.cleanupInterval = null;
|
||||
}
|
||||
this.clear();
|
||||
this.inFlight.clear();
|
||||
}
|
||||
}
|
||||
|
||||
// Create singleton instance of our cache
|
||||
const cache = new HttpCache();
|
||||
|
||||
export default cache;
|
||||
44
proxy/handlers.mjs
Normal file
44
proxy/handlers.mjs
Normal file
@@ -0,0 +1,44 @@
|
||||
// Consolidated proxy handlers for all external API requests with caching
|
||||
|
||||
import cache from './cache.mjs';
|
||||
import OVERRIDES from '../src/overrides.mjs';
|
||||
|
||||
// Weather.gov API proxy (catch-all for any Weather.gov API endpoint)
|
||||
export const weatherProxy = async (req, res) => {
|
||||
await cache.handleRequest(req, res, 'https://api.weather.gov', {
|
||||
serviceName: 'Weather.gov',
|
||||
skipParams: ['u'],
|
||||
});
|
||||
};
|
||||
|
||||
// Radar proxy for weather radar images
|
||||
export const radarProxy = async (req, res) => {
|
||||
await cache.handleRequest(req, res, 'https://radar.weather.gov', {
|
||||
serviceName: 'Radar',
|
||||
skipParams: ['u'],
|
||||
encoding: 'binary', // Radar images are binary data
|
||||
});
|
||||
};
|
||||
|
||||
// SPC (Storm Prediction Center) outlook proxy
|
||||
export const outlookProxy = async (req, res) => {
|
||||
await cache.handleRequest(req, res, 'https://www.spc.noaa.gov', {
|
||||
serviceName: 'SPC Outlook',
|
||||
skipParams: ['u'],
|
||||
});
|
||||
};
|
||||
|
||||
// Iowa State Mesonet proxy with configurable host
|
||||
export const mesonetProxy = async (req, res) => {
|
||||
// Determine if this is a binary file (images)
|
||||
const isBinary = req.path.match(/\.(png|jpg|jpeg|gif|webp|ico)$/i);
|
||||
|
||||
// Use override radar host if provided, otherwise default to mesonet
|
||||
const radarHost = OVERRIDES.RADAR_HOST || 'mesonet.agron.iastate.edu';
|
||||
|
||||
await cache.handleRequest(req, res, `https://${radarHost}`, {
|
||||
serviceName: `Iowa State Mesonet (${radarHost})`,
|
||||
skipParams: [], // No parameters to skip for Mesonet
|
||||
encoding: isBinary ? 'binary' : 'utf8', // Use binary encoding for images
|
||||
});
|
||||
};
|
||||
40
server/scripts/modules/utils/cache.mjs
Normal file
40
server/scripts/modules/utils/cache.mjs
Normal file
@@ -0,0 +1,40 @@
|
||||
import { rewriteUrl } from './url-rewrite.mjs';
|
||||
|
||||
// Clear cache utility for client-side use
|
||||
const clearCacheEntry = async (url, baseUrl = '') => {
|
||||
try {
|
||||
// Rewrite the URL to get the local proxy path
|
||||
const rewrittenUrl = rewriteUrl(url);
|
||||
const urlObj = typeof rewrittenUrl === 'string' ? new URL(rewrittenUrl, baseUrl || window.location.origin) : rewrittenUrl;
|
||||
let cachePath = urlObj.pathname + urlObj.search;
|
||||
|
||||
// Strip the route designator (first path segment) to match actual cache keys
|
||||
const firstSlashIndex = cachePath.indexOf('/', 1); // Find second slash
|
||||
if (firstSlashIndex > 0) {
|
||||
cachePath = cachePath.substring(firstSlashIndex);
|
||||
}
|
||||
|
||||
// Call the cache clear endpoint
|
||||
const fetchUrl = baseUrl ? `${baseUrl}/cache${cachePath}` : `/cache${cachePath}`;
|
||||
const response = await fetch(fetchUrl, {
|
||||
method: 'DELETE',
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const result = await response.json();
|
||||
if (result.cleared) {
|
||||
console.log(`🗑️ Cleared cache entry: ${cachePath}`);
|
||||
return true;
|
||||
}
|
||||
console.log(`🔍 Cache entry not found: ${cachePath}`);
|
||||
return false;
|
||||
}
|
||||
console.warn(`⚠️ Failed to clear cache entry: ${response.status} ${response.statusText}`);
|
||||
return false;
|
||||
} catch (error) {
|
||||
console.error(`❌ Error clearing cache entry for ${url}:`, error.message);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
export default clearCacheEntry;
|
||||
@@ -1,12 +0,0 @@
|
||||
// rewrite some urls for local server
|
||||
const rewriteUrl = (_url) => {
|
||||
let url = _url;
|
||||
url = url.replace('https://api.weather.gov/', `${window.location.protocol}//${window.location.host}/`);
|
||||
url = url.replace('https://www.cpc.ncep.noaa.gov/', `${window.location.protocol}//${window.location.host}/`);
|
||||
return url;
|
||||
};
|
||||
|
||||
export {
|
||||
// eslint-disable-next-line import/prefer-default-export
|
||||
rewriteUrl,
|
||||
};
|
||||
@@ -1,31 +1,107 @@
|
||||
import { rewriteUrl } from './cors.mjs';
|
||||
import { rewriteUrl } from './url-rewrite.mjs';
|
||||
|
||||
// Centralized utilities for handling errors in Promise contexts
|
||||
const safeJson = async (url, params) => {
|
||||
try {
|
||||
const result = await json(url, params);
|
||||
// Return an object with both data and url if params.returnUrl is true
|
||||
if (params?.returnUrl) {
|
||||
return result;
|
||||
}
|
||||
// If caller didn't specify returnUrl, result is the raw API response
|
||||
return result;
|
||||
} catch (error) {
|
||||
// Error already logged in fetchAsync; return null to be "safe"
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const safeText = async (url, params) => {
|
||||
try {
|
||||
const result = await text(url, params);
|
||||
// Return an object with both data and url if params.returnUrl is true
|
||||
if (params?.returnUrl) {
|
||||
return result;
|
||||
}
|
||||
// If caller didn't specify returnUrl, result is the raw API response
|
||||
return result;
|
||||
} catch (error) {
|
||||
// Error already logged in fetchAsync; return null to be "safe"
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const safeBlob = async (url, params) => {
|
||||
try {
|
||||
const result = await blob(url, params);
|
||||
// Return an object with both data and url if params.returnUrl is true
|
||||
if (params?.returnUrl) {
|
||||
return result;
|
||||
}
|
||||
// If caller didn't specify returnUrl, result is the raw API response
|
||||
return result;
|
||||
} catch (error) {
|
||||
// Error already logged in fetchAsync; return null to be "safe"
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const safePromiseAll = async (promises) => {
|
||||
try {
|
||||
const results = await Promise.allSettled(promises);
|
||||
|
||||
return results.map((result, index) => {
|
||||
if (result.status === 'fulfilled') {
|
||||
return result.value;
|
||||
}
|
||||
// Log rejected promises for debugging (except AbortErrors which are expected)
|
||||
if (result.reason?.name !== 'AbortError') {
|
||||
console.warn(`Promise ${index} rejected:`, result.reason?.message || result.reason);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('safePromiseAll encountered an unexpected error:', error);
|
||||
// Return array of nulls matching the input length
|
||||
return new Array(promises.length).fill(null);
|
||||
}
|
||||
};
|
||||
|
||||
const json = (url, params) => fetchAsync(url, 'json', params);
|
||||
const text = (url, params) => fetchAsync(url, 'text', params);
|
||||
const blob = (url, params) => fetchAsync(url, 'blob', params);
|
||||
|
||||
// Hosts that don't allow custom User-Agent headers due to CORS restrictions
|
||||
const USER_AGENT_EXCLUDED_HOSTS = [
|
||||
'geocode.arcgis.com',
|
||||
'services.arcgis.com',
|
||||
];
|
||||
|
||||
const fetchAsync = async (_url, responseType, _params = {}) => {
|
||||
// add user agent header to json request at api.weather.gov
|
||||
const headers = {};
|
||||
if (_url.toString().match(/api\.weather\.gov/)) {
|
||||
|
||||
const checkUrl = new URL(_url, window.location.origin);
|
||||
const shouldExcludeUserAgent = USER_AGENT_EXCLUDED_HOSTS.some((host) => checkUrl.hostname.includes(host));
|
||||
|
||||
if (!shouldExcludeUserAgent) {
|
||||
headers['user-agent'] = 'Weatherstar 4000+; weatherstar@netbymatt.com';
|
||||
}
|
||||
|
||||
// combine default and provided parameters
|
||||
const params = {
|
||||
method: 'GET',
|
||||
mode: 'cors',
|
||||
type: 'GET',
|
||||
retryCount: 0,
|
||||
timeout: 30000,
|
||||
..._params,
|
||||
headers,
|
||||
};
|
||||
// store original number of retries
|
||||
params.originalRetries = params.retryCount;
|
||||
|
||||
// build a url, including the rewrite for cors if necessary
|
||||
let corsUrl = _url;
|
||||
if (params.cors === true) corsUrl = rewriteUrl(_url);
|
||||
const url = new URL(corsUrl, `${window.location.origin}/`);
|
||||
// rewrite URLs for various services to use the backend proxy server for proper caching (and request logging)
|
||||
const url = rewriteUrl(_url);
|
||||
// match the security protocol when not on localhost
|
||||
// url.protocol = window.location.hostname === 'localhost' ? url.protocol : window.location.protocol;
|
||||
// add parameters if necessary
|
||||
@@ -39,53 +115,148 @@ const fetchAsync = async (_url, responseType, _params = {}) => {
|
||||
}
|
||||
|
||||
// make the request
|
||||
try {
|
||||
const response = await doFetch(url, params);
|
||||
|
||||
// check for ok response
|
||||
if (!response.ok) throw new Error(`Fetch error ${response.status} ${response.statusText} while fetching ${response.url}`);
|
||||
// return the requested response
|
||||
// process the response based on type
|
||||
let result;
|
||||
switch (responseType) {
|
||||
case 'json':
|
||||
return response.json();
|
||||
result = await response.json();
|
||||
break;
|
||||
case 'text':
|
||||
return response.text();
|
||||
result = await response.text();
|
||||
break;
|
||||
case 'blob':
|
||||
return response.blob();
|
||||
result = await response.blob();
|
||||
break;
|
||||
default:
|
||||
return response;
|
||||
result = response;
|
||||
}
|
||||
|
||||
// Return both data and URL if requested
|
||||
if (params.returnUrl) {
|
||||
return {
|
||||
data: result,
|
||||
url: response.url,
|
||||
};
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
// Enhanced error handling for different error types
|
||||
if (error.name === 'AbortError') {
|
||||
// AbortError is always handled gracefully (background tab throttling)
|
||||
console.log(`🛑 Fetch aborted for ${_url} (background tab throttling?)`);
|
||||
return null; // Always return null for AbortError instead of throwing
|
||||
} if (error.message.includes('502')) {
|
||||
console.warn(`🚪 Bad Gateway error for ${_url}`);
|
||||
} else if (error.message.includes('503')) {
|
||||
console.warn(`⌛ Temporarily unavailable for ${_url}`);
|
||||
} else if (error.message.includes('504')) {
|
||||
console.warn(`⏱️ Gateway Timeout for ${_url}`);
|
||||
} else if (error.message.includes('500')) {
|
||||
console.warn(`💥 Internal Server Error for ${_url}`);
|
||||
} else if (error.message.includes('CORS') || error.message.includes('Access-Control')) {
|
||||
console.warn(`🔒 CORS or Access Control error for ${_url}`);
|
||||
} else {
|
||||
console.warn(`❌ Fetch failed for ${_url} (${error.message})`);
|
||||
}
|
||||
|
||||
// Add standard error properties that calling code expects
|
||||
if (!error.status) error.status = 0;
|
||||
if (!error.responseJSON) error.responseJSON = null;
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// fetch with retry and back-off
|
||||
const doFetch = (url, params) => new Promise((resolve, reject) => {
|
||||
fetch(url, params).then((response) => {
|
||||
if (params.retryCount > 0) {
|
||||
// 500 status codes should be retried after a short backoff
|
||||
if (response.status >= 500 && response.status <= 599 && params.retryCount > 0) {
|
||||
// call the "still waiting" function
|
||||
if (typeof params.stillWaiting === 'function' && params.retryCount === params.originalRetries) {
|
||||
params.stillWaiting();
|
||||
// Create AbortController for timeout
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => {
|
||||
controller.abort();
|
||||
}, params.timeout);
|
||||
|
||||
// Add signal to fetch params
|
||||
const fetchParams = {
|
||||
...params,
|
||||
signal: controller.signal,
|
||||
};
|
||||
|
||||
// Shared retry logic to avoid duplication
|
||||
const attemptRetry = (reason) => {
|
||||
// Safety check for params
|
||||
if (!params || typeof params.retryCount !== 'number' || typeof params.originalRetries !== 'number') {
|
||||
console.error(`❌ Invalid params for retry: ${url}`);
|
||||
return reject(new Error('Invalid retry parameters'));
|
||||
}
|
||||
// decrement and retry
|
||||
|
||||
const retryAttempt = params.originalRetries - params.retryCount + 1;
|
||||
const remainingRetries = params.retryCount - 1;
|
||||
const delayMs = retryDelay(retryAttempt);
|
||||
|
||||
console.warn(`🔄 Retry ${retryAttempt}/${params.originalRetries} for ${url} - ${reason} (retrying in ${delayMs}ms, ${remainingRetries} retries left)`);
|
||||
|
||||
// call the "still waiting" function on first retry
|
||||
if (params && params.stillWaiting && typeof params.stillWaiting === 'function' && params.retryCount === params.originalRetries) {
|
||||
try {
|
||||
params.stillWaiting();
|
||||
} catch (callbackError) {
|
||||
console.warn(`⚠️ stillWaiting callback error for ${url}:`, callbackError.message);
|
||||
}
|
||||
}
|
||||
// decrement and retry with safe parameter copying
|
||||
const newParams = {
|
||||
...params,
|
||||
retryCount: params.retryCount - 1,
|
||||
retryCount: Math.max(0, params.retryCount - 1), // Ensure retryCount doesn't go negative
|
||||
};
|
||||
return resolve(delay(retryDelay(params.originalRetries - newParams.retryCount), doFetch, url, newParams));
|
||||
}
|
||||
// not 500 status
|
||||
return resolve(response);
|
||||
}
|
||||
// out of retries
|
||||
return resolve(response);
|
||||
})
|
||||
.catch(reject);
|
||||
});
|
||||
|
||||
const delay = (time, func, ...args) => new Promise((resolve) => {
|
||||
// Use setTimeout directly instead of the delay wrapper to avoid Promise resolution issues
|
||||
setTimeout(() => {
|
||||
resolve(func(...args));
|
||||
}, time);
|
||||
doFetch(url, newParams).then(resolve).catch(reject);
|
||||
}, delayMs);
|
||||
return undefined; // Explicit return for linter
|
||||
};
|
||||
|
||||
fetch(url, fetchParams).then((response) => {
|
||||
clearTimeout(timeoutId); // Clear timeout on successful response
|
||||
|
||||
// Retry 500 status codes if we have retries left
|
||||
if (params && params.retryCount > 0 && response.status >= 500 && response.status <= 599) {
|
||||
let errorType = 'Server error';
|
||||
if (response.status === 502) {
|
||||
errorType = 'Bad Gateway';
|
||||
} else if (response.status === 503) {
|
||||
errorType = 'Service Unavailable';
|
||||
} else if (response.status === 504) {
|
||||
errorType = 'Gateway Timeout';
|
||||
}
|
||||
return attemptRetry(`${errorType} ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
// Log when we're out of retries for server errors
|
||||
// if (response.status >= 500 && response.status <= 599) {
|
||||
// console.warn(`⚠️ Server error ${response.status} ${response.statusText} for ${url} - no retries remaining`);
|
||||
// }
|
||||
|
||||
// successful response or out of retries
|
||||
return resolve(response);
|
||||
}).catch((error) => {
|
||||
clearTimeout(timeoutId); // Clear timeout on error
|
||||
|
||||
// Retry network errors if we have retries left (but not AbortError)
|
||||
if (params && params.retryCount > 0 && error.name !== 'AbortError') {
|
||||
const reason = error.name === 'TimeoutError' ? 'Request timeout' : `Network error: ${error.message}`;
|
||||
return attemptRetry(reason);
|
||||
}
|
||||
|
||||
// out of retries or AbortError - reject
|
||||
reject(error);
|
||||
return undefined; // Explicit return for linter
|
||||
});
|
||||
});
|
||||
|
||||
const retryDelay = (retryNumber) => {
|
||||
@@ -102,4 +273,8 @@ export {
|
||||
json,
|
||||
text,
|
||||
blob,
|
||||
safeJson,
|
||||
safeText,
|
||||
safeBlob,
|
||||
safePromiseAll,
|
||||
};
|
||||
|
||||
41
server/scripts/modules/utils/url-rewrite.mjs
Normal file
41
server/scripts/modules/utils/url-rewrite.mjs
Normal file
@@ -0,0 +1,41 @@
|
||||
// rewrite URLs to use local proxy server
|
||||
const rewriteUrl = (_url) => {
|
||||
// Handle relative URLs - return them as-is since they don't need rewriting
|
||||
if (typeof _url === 'string' && !_url.startsWith('http')) {
|
||||
return _url;
|
||||
}
|
||||
|
||||
// Handle both string URLs and URL objects
|
||||
const url = typeof _url === 'string' ? new URL(_url) : new URL(_url.toString());
|
||||
|
||||
// Rewrite the origin to use local proxy server
|
||||
if (url.origin === 'https://api.weather.gov') {
|
||||
url.protocol = window.location.protocol;
|
||||
url.host = window.location.host;
|
||||
url.pathname = `/api${url.pathname}`;
|
||||
} else if (url.origin === 'https://www.spc.noaa.gov') {
|
||||
url.protocol = window.location.protocol;
|
||||
url.host = window.location.host;
|
||||
url.pathname = `/spc${url.pathname}`;
|
||||
} else if (url.origin === 'https://radar.weather.gov') {
|
||||
url.protocol = window.location.protocol;
|
||||
url.host = window.location.host;
|
||||
url.pathname = `/radar${url.pathname}`;
|
||||
} else if (url.origin === 'https://mesonet.agron.iastate.edu') {
|
||||
url.protocol = window.location.protocol;
|
||||
url.host = window.location.host;
|
||||
url.pathname = `/mesonet${url.pathname}`;
|
||||
} else if (typeof OVERRIDES !== 'undefined' && OVERRIDES?.RADAR_HOST && url.origin === `https://${OVERRIDES.RADAR_HOST}`) {
|
||||
// Handle override radar host
|
||||
url.protocol = window.location.protocol;
|
||||
url.host = window.location.host;
|
||||
url.pathname = `/mesonet${url.pathname}`;
|
||||
}
|
||||
|
||||
return url;
|
||||
};
|
||||
|
||||
export {
|
||||
// eslint-disable-next-line import/prefer-default-export
|
||||
rewriteUrl,
|
||||
};
|
||||
Reference in New Issue
Block a user