Skip to content

Instantly share code, notes, and snippets.

@Avi-E-Koenig
Created August 13, 2025 12:54
Show Gist options
  • Save Avi-E-Koenig/757e24e529c69f1c2530214e7f162420 to your computer and use it in GitHub Desktop.
Save Avi-E-Koenig/757e24e529c69f1c2530214e7f162420 to your computer and use it in GitHub Desktop.
import Dexie from "dexie";
import helpers from "./helpers";
// Constants
const CACHE_CONSTANTS = {
DB_NAME: "ProgressCacheDB",
// TTL Settings
HISTORICAL_TTL: 7 * 24 * 60 * 60 * 1000, // 7 days
TODAY_TTL: 2 * 60 * 60 * 1000, // 2 hours
// Cache Limits
MAX_CACHE_SIZE: 50 * 1024 * 1024, // 50MB
MAX_MEMORY_ENTRIES: 100,
// Cleanup Settings
CLEANUP_PERCENTAGE: 0.3, // Remove 30% oldest entries
CLEANUP_BATCH_SIZE: 500, // Reduced for better low-end device performance
// Performance Settings
MAX_RETRIES: 3,
RETRY_DELAY_BASE: 100, // ms
MEMORY_WARNING_THRESHOLD: 0.8,
// Preload Settings
PRELOAD_RECENT_DAYS: 7,
WARM_MEMORY_LIMIT: 50, // Renamed from PRELOAD_MEMORY_LIMIT
// Leader Election Settings
LEADER_KEY: "progress-cache-leader",
LEADER_TTL_MS: 5000,
};
/**
* @typedef {Object} CacheEntry
* @property {string} dateString - Date in YYYY-MM-DD format
* @property {number} timestamp - Unix timestamp
* @property {string} data - Serialized data
* @property {number} size - Size in bytes
*/
/**
* @typedef {Object} CacheStats
* @property {string} id - Stats identifier
* @property {number} lastCleanup - Last cleanup timestamp
* @property {number} totalSize - Total cache size in bytes
*/
/**
* @typedef {Object} CacheMetrics
* @property {number} hits - Cache hit count
* @property {number} misses - Cache miss count
* @property {number} errors - Error count
* @property {number} memoryHits - Memory cache hit count
* @property {number} dbHits - Database cache hit count
* @property {number} evictions - Number of entries evicted during cleanup
* @property {number} reads - Total read operations
* @property {number} writes - Total write operations
*/
/**
* IndexedDB wrapper using Dexie.js
*/
class ProgressDatabase extends Dexie {
constructor() {
super(CACHE_CONSTANTS.DB_NAME);
this.version(1).stores({
progressData: "dateString, timestamp", // PK on dateString, index on timestamp
cacheStats: "id",
});
// Future schema upgrade path
this.version(2)
.stores({
progressData: "dateString, timestamp, [timestamp+dateString]",
cacheStats: "id",
})
.upgrade(async _tx => {
// no-op: Dexie will build the new index
});
}
}
/**
* Mobile-optimized cache service with dual-layer architecture
* Features:
* - Memory cache with LRU eviction for hot data
* - IndexedDB for persistent storage with large capacity
* - Network-aware fetching with offline support
* - Automatic TTL management and cleanup
* - Performance monitoring and metrics
* - Error recovery with retry mechanisms
* - Leader election for multi-tab coordination
* - Import/export with validation and batching
*/
class MobileProgressCache {
constructor() {
this.db = new ProgressDatabase();
this.memoryCache = new Map();
this.ready = this.initialize();
// Cleanup timer management
this._cleanupTid = null;
this._paused = false;
// Leader election for multi-tab coordination
this._tabId = Math.random().toString(36).slice(2);
this._leaderTimer = null;
this._iAmLeader = false;
this._tryBecomeLeader();
// Listen for leader changes from other tabs
this._onStorage = e => {
if (e.key === CACHE_CONSTANTS.LEADER_KEY) {
this._tryBecomeLeader();
}
};
if (
typeof window !== "undefined" &&
typeof window.addEventListener === "function"
) {
window.addEventListener("storage", this._onStorage);
}
// Performance metrics
/** @type {CacheMetrics} */
this.metrics = {
hits: 0,
misses: 0,
errors: 0,
memoryHits: 0,
dbHits: 0,
evictions: 0,
reads: 0,
writes: 0,
};
this.metricsSince = Date.now();
// Bind methods to preserve context
this.fetchWithCache = this.fetchWithCache.bind(this);
}
/**
* Initialize cache service
* @private
*/
async initialize() {
try {
// Set up database event handlers
this.db.on("versionchange", () => {
console.warn("DB versionchange: closing to allow upgrade");
this.db.close();
});
this.db.on("blocked", () => {
console.warn("DB upgrade blocked by another tab");
});
await this.initCacheStats();
await this.requestPersistentStorage();
// Schedule periodic cleanup
this.schedulePeriodicCleanup();
console.log("MobileProgressCache initialized successfully");
} catch (error) {
console.warn("Cache initialization failed:", error);
this.logError(error);
}
}
/**
* Try to become the leader for cleanup coordination across tabs
* @private
*/
_tryBecomeLeader() {
if (typeof localStorage === "undefined") return;
try {
const now = Date.now();
const cur = JSON.parse(
localStorage.getItem(CACHE_CONSTANTS.LEADER_KEY) || "null"
);
if (!cur || now - cur.timestamp > CACHE_CONSTANTS.LEADER_TTL_MS) {
// Take leadership
localStorage.setItem(
CACHE_CONSTANTS.LEADER_KEY,
JSON.stringify({ id: this._tabId, timestamp: now })
);
this._iAmLeader = true;
this._startLeaderHeartbeat();
} else {
const wasLeader = this._iAmLeader;
this._iAmLeader = cur.id === this._tabId;
if (this._iAmLeader) {
this._startLeaderHeartbeat();
} else if (wasLeader) {
this._stopLeaderHeartbeat();
}
}
} catch (e) {
// Safari private mode can throw on localStorage access
console.warn("Leader election storage error:", e);
}
}
/**
* Start heartbeat to maintain leadership
* @private
*/
_startLeaderHeartbeat() {
this._stopLeaderHeartbeat();
this._leaderTimer = setInterval(() => {
try {
localStorage.setItem(
CACHE_CONSTANTS.LEADER_KEY,
JSON.stringify({ id: this._tabId, timestamp: Date.now() })
);
} catch (e) {
// Safari private mode can throw on localStorage access
console.warn("Leader heartbeat storage error:", e);
}
}, CACHE_CONSTANTS.LEADER_TTL_MS / 2);
}
/**
* Stop heartbeat when no longer leader
* @private
*/
_stopLeaderHeartbeat() {
if (this._leaderTimer) {
clearInterval(this._leaderTimer);
this._leaderTimer = null;
}
}
/**
* Schedule periodic cleanup operations
* @private
*/
schedulePeriodicCleanup() {
if (this._cleanupTid) {
clearInterval(this._cleanupTid);
}
this._cleanupTid = setInterval(
() => {
if (!this._iAmLeader || this._paused) return;
this.pruneExpired().catch(error => {
console.warn("Periodic cleanup failed:", error);
this.logError(error);
});
},
60 * 60 * 1000
);
}
/**
* Pause cache operations (useful for app suspend/background)
*/
pause() {
this._paused = true;
console.log("Cache operations paused");
}
/**
* Resume cache operations
*/
resume() {
this._paused = false;
console.log("Cache operations resumed");
}
/**
* Close cache service and clean up resources
*/
async close() {
if (this._cleanupTid) {
clearInterval(this._cleanupTid);
this._cleanupTid = null;
}
// Remove storage event listener
if (typeof window !== "undefined" && this._onStorage) {
window.removeEventListener("storage", this._onStorage);
}
this._stopLeaderHeartbeat();
// Clean up leadership if we're the current leader
if (this._iAmLeader && typeof localStorage !== "undefined") {
try {
const cur = JSON.parse(
localStorage.getItem(CACHE_CONSTANTS.LEADER_KEY) || "null"
);
if (cur?.id === this._tabId) {
localStorage.removeItem(CACHE_CONSTANTS.LEADER_KEY);
}
} catch (e) {
console.warn("Error cleaning up leadership:", e);
}
}
this.memoryCache.clear();
this.db.close();
console.log("MobileProgressCache closed successfully");
}
/**
* Check if we're in a browser environment with navigator APIs
* @returns {boolean}
*/
get hasNavigator() {
return typeof navigator !== "undefined";
}
/**
* Check memory pressure and clear memory cache if needed
* @private
*/
checkMemoryPressure() {
try {
if (
typeof performance !== "undefined" &&
"memory" in performance &&
performance.memory.usedJSHeapSize > 0
) {
const { usedJSHeapSize: used, jsHeapSizeLimit: limit } =
performance.memory;
if (used / limit > CACHE_CONSTANTS.MEMORY_WARNING_THRESHOLD) {
console.warn("High memory usage detected, clearing memory cache");
this.memoryCache.clear();
}
}
} catch (_error) {
// Silent fail for non-browser environments
}
}
/**
* Add entry to memory cache with LRU eviction
* @param {string} key - Cache key
* @param {*} value - Cache value
* @private
*/
setMemoryEntry(key, value) {
// Check memory pressure
this.checkMemoryPressure();
// Remove oldest entries if we exceed the limit
if (this.memoryCache.size >= CACHE_CONSTANTS.MAX_MEMORY_ENTRIES) {
const firstKey = this.memoryCache.keys().next().value;
if (firstKey) {
this.memoryCache.delete(firstKey);
}
}
// Add new entry (Map maintains insertion order)
this.memoryCache.set(key, value);
}
/**
* Get entry from memory cache and update LRU order
* @param {string} key - Cache key
* @returns {*} - Cache value or undefined
* @private
*/
getMemoryEntry(key) {
const value = this.memoryCache.get(key);
if (value !== undefined) {
// Move to end (most recently used)
this.memoryCache.delete(key);
this.memoryCache.set(key, value);
}
return value;
}
/**
* Initialize cache statistics
* @private
*/
async initCacheStats() {
try {
const stats = await this.db.cacheStats.get("main");
if (!stats) {
await this.db.cacheStats.put({
id: "main",
lastCleanup: Date.now(),
totalSize: 0,
});
}
} catch (error) {
console.warn("Failed to initialize cache stats:", error);
throw error;
}
}
/**
* Get accurate byte size of string using TextEncoder
* @param {string} str - String to measure
* @returns {number} - Size in bytes
* @private
*/
byteSize(str) {
if (typeof str !== "string") {
throw new TypeError("Expected string input");
}
return new TextEncoder().encode(str).byteLength;
}
/**
* Ensure there's enough space for new entry
* @param {number} entrySize - Size of entry to add
* @param {string} dateString - Date string for potential overwrite
* @private
*/
async ensureSpace(entrySize, dateString) {
await this.ready;
if (entrySize <= 0) {
throw new Error("Entry size must be positive");
}
// Get current stats
let stats = await this.db.cacheStats.get("main");
let currentSize = stats?.totalSize ?? 0;
// Check if we're overwriting an existing entry
let existing = await this.db.progressData.get(dateString);
let prevSize = existing?.size ?? 0;
// Clean up until we have enough space
let attempts = 0;
const maxAttempts = 10; // Prevent infinite loops
while (
currentSize - prevSize + entrySize > CACHE_CONSTANTS.MAX_CACHE_SIZE &&
attempts < maxAttempts
) {
const freed = await this.cleanupOldest();
if (!freed) break; // Nothing left to remove
// Refresh stats after cleanup
stats = await this.db.cacheStats.get("main");
currentSize = stats?.totalSize ?? 0;
// Refresh existing entry size in case it was cleaned up
existing = await this.db.progressData.get(dateString);
prevSize = existing?.size ?? 0;
attempts++;
}
if (currentSize - prevSize + entrySize > CACHE_CONSTANTS.MAX_CACHE_SIZE) {
throw new Error("Unable to free enough cache space");
}
}
/**
* Execute operation with retry logic
* @param {Function} operation - Operation to execute
* @param {number} maxRetries - Maximum retry attempts
* @param {string} operationName - Name for logging
* @returns {Promise<*>} - Operation result
* @private
*/
async withRetry(
operation,
maxRetries = CACHE_CONSTANTS.MAX_RETRIES,
operationName = "operation"
) {
let lastError;
for (let attempt = 0; attempt < maxRetries; attempt++) {
try {
return await operation();
} catch (error) {
lastError = error;
if (attempt === maxRetries - 1) {
console.warn(
`${operationName} failed after ${maxRetries} attempts:`,
error
);
this.logError(error);
throw error;
}
// Exponential backoff
const delay = CACHE_CONSTANTS.RETRY_DELAY_BASE * Math.pow(2, attempt);
await new Promise(resolve => setTimeout(resolve, delay));
}
}
throw lastError;
}
/**
* Get data from cache (memory first, then IndexedDB)
* @param {string} dateString - Date in YYYY-MM-DD format
* @returns {Promise<{data: Array, timestamp: number}|null>} - Cached data or null
*/
async get(dateString) {
if (!dateString || typeof dateString !== "string") {
throw new Error("Invalid dateString parameter");
}
return this.withRetry(
async () => {
this.metrics.reads++;
await this.ready;
// Try memory cache first (fastest)
const memResult = this.getMemoryEntry(dateString);
if (memResult && !this.isExpired(memResult.timestamp, dateString)) {
this.logCacheHit("memory");
return memResult;
}
// Try IndexedDB
const dbEntry = await this.db.progressData.get(dateString);
if (dbEntry && !this.isExpired(dbEntry.timestamp, dateString)) {
const data = this.deserialize(dbEntry.data);
const result = { data, timestamp: dbEntry.timestamp };
// Update memory cache
this.setMemoryEntry(dateString, result);
this.logCacheHit("database");
return result;
}
// Clean up expired entry if found
if (dbEntry && this.isExpired(dbEntry.timestamp, dateString)) {
await this.removeExpiredEntry(dateString, dbEntry);
}
this.logCacheMiss();
return null;
},
CACHE_CONSTANTS.MAX_RETRIES,
"cache get"
);
}
/**
* Remove expired entry from database
* @param {string} dateString - Date string
* @param {CacheEntry} dbEntry - Database entry
* @private
*/
async removeExpiredEntry(dateString, dbEntry) {
await this.db.transaction(
"readwrite",
this.db.progressData,
this.db.cacheStats,
async () => {
await this.db.progressData.delete(dateString);
const stats = await this.getStatsOrDefault();
stats.totalSize = Math.max(0, stats.totalSize - (dbEntry.size || 0));
stats.lastCleanup = Date.now();
await this.db.cacheStats.put(stats);
}
);
}
/**
* Store data in both memory and IndexedDB
* @param {string} dateString - Date in YYYY-MM-DD format
* @param {Array} data - Data to cache
*/
async set(dateString, data) {
if (!dateString || typeof dateString !== "string") {
throw new Error("Invalid dateString parameter");
}
if (!Array.isArray(data)) {
throw new Error("Data must be an array");
}
const performSet = async () => {
await this.ready;
const stored = this.serialize(data);
const entrySize = this.byteSize(stored);
const timestamp = Date.now();
// Ensure space before opening transaction
await this.ensureSpace(entrySize, dateString);
await this.db.transaction(
"readwrite",
this.db.progressData,
this.db.cacheStats,
async () => {
const existing = await this.db.progressData.get(dateString);
const prevSize = existing?.size ?? 0;
await this.db.progressData.put({
dateString,
timestamp,
data: stored,
size: entrySize,
});
// Update memory cache
this.setMemoryEntry(dateString, { data, timestamp });
// Update stats
const stats = await this.getStatsOrDefault();
stats.totalSize = Math.max(0, stats.totalSize - prevSize + entrySize);
stats.lastCleanup = Date.now();
await this.db.cacheStats.put(stats);
// Update write metrics
this.metrics.writes++;
}
);
};
return this.withRetry(
performSet,
CACHE_CONSTANTS.MAX_RETRIES,
"cache set"
).catch(async error => {
// Handle quota errors with additional cleanup attempt and retry
if (/quota/i.test(error?.name || error?.message)) {
console.warn("Quota error detected, attempting additional cleanup");
await this.cleanupOldest();
// Ensure DB is open and retry once
await this.db?.open?.().catch(() => {});
return this.withRetry(performSet, 1, "cache set retry");
}
throw error;
});
}
/**
* Get cache stats with default fallback
* @returns {Promise<CacheStats>}
* @private
*/
async getStatsOrDefault() {
return (
(await this.db.cacheStats.get("main")) || {
id: "main",
lastCleanup: 0,
totalSize: 0,
}
);
}
/**
* Check if data should be fetched based on cache age and network status
* @param {string} dateString - Date in YYYY-MM-DD format
* @returns {Promise<boolean>} - Whether to fetch fresh data
*/
async shouldFetch(dateString) {
if (!dateString) return true;
try {
const today = helpers.getTodayString();
const isToday = dateString === today;
const cached = await this.get(dateString);
if (!cached) return true;
// Check network status if available
if (this.hasNavigator && "onLine" in navigator && !navigator.onLine) {
return false; // Don't fetch if offline
}
const ttl = isToday
? CACHE_CONSTANTS.TODAY_TTL
: CACHE_CONSTANTS.HISTORICAL_TTL;
return Date.now() - cached.timestamp > ttl;
} catch (error) {
console.warn("Error in shouldFetch:", error);
this.logError(error);
return true; // Default to fetching on error
}
}
/**
* Fetch data with caching (network-aware)
* @param {string} dateString - Date in YYYY-MM-DD format
* @param {Function} fetchFunction - Function that returns fresh data
* @param {Function} [customExtractor] - Optional custom extractor function
* @returns {Promise<Array>} - Fresh or cached data
*/
async fetchWithCache(dateString, fetchFunction, customExtractor) {
if (!dateString || typeof fetchFunction !== "function") {
throw new Error("Invalid parameters for fetchWithCache");
}
try {
// Check cache first
const cached = await this.get(dateString);
if (cached) {
return cached.data;
}
// Check if offline
if (this.hasNavigator && "onLine" in navigator && !navigator.onLine) {
throw new Error(`Offline and no cached data for ${dateString}`);
}
// Fetch fresh data
const response = await fetchFunction();
const data = this.normalizeResponseData(response, customExtractor);
// Cache the data
await this.set(dateString, data);
return data;
} catch (error) {
console.warn(`Failed to fetch data for ${dateString}:`, error);
this.logError(error);
throw error;
}
}
/**
* Normalize API response data to array format
* @param {*} response - API response
* @param {Function} [customExtractor] - Optional custom extractor function
* @returns {Array} - Normalized data array
* @private
*/
normalizeResponseData(response, customExtractor) {
// Use custom extractor if provided
if (typeof customExtractor === "function") {
try {
const extracted = customExtractor(response);
if (Array.isArray(extracted)) {
return extracted;
}
} catch (error) {
console.warn("Custom extractor failed:", error);
}
}
if (Array.isArray(response)) {
return response;
}
if (response && Array.isArray(response.data)) {
return response.data;
}
if (response && typeof response === "object") {
// Try to find array in common response properties
const arrayProps = ["items", "results", "records"];
for (const prop of arrayProps) {
if (Array.isArray(response[prop])) {
return response[prop];
}
}
}
console.warn("Unexpected response format, returning empty array");
return [];
}
/**
* Get all cached dates sorted
* @returns {Promise<Array<string>>} - Array of cached date strings
*/
async getCachedDates() {
try {
await this.ready;
const keys = await this.db.progressData
.orderBy("dateString")
.primaryKeys();
return keys;
} catch (error) {
console.warn("Failed to get cached dates:", error);
this.logError(error);
return [];
}
}
/**
* Cleanup oldest cache entries when storage is full
* @returns {Promise<number>} - Bytes freed
* @private
*/
async cleanupOldest() {
try {
const entries = await this.db.progressData
.orderBy("timestamp")
.limit(CACHE_CONSTANTS.CLEANUP_BATCH_SIZE)
.toArray();
if (!entries.length) return 0;
const cutCount = Math.max(
1,
Math.ceil(entries.length * CACHE_CONSTANTS.CLEANUP_PERCENTAGE)
);
const toDelete = entries.slice(0, cutCount);
const keysToDelete = toDelete.map(e => e.dateString);
const bytesFreed = toDelete.reduce((sum, e) => sum + (e.size || 0), 0);
await this.db.transaction(
"readwrite",
this.db.progressData,
this.db.cacheStats,
async () => {
await this.db.progressData.bulkDelete(keysToDelete);
const stats = await this.getStatsOrDefault();
stats.totalSize = Math.max(0, stats.totalSize - bytesFreed);
stats.lastCleanup = Date.now();
await this.db.cacheStats.put(stats);
}
);
// Remove from memory cache
keysToDelete.forEach(key => this.memoryCache.delete(key));
// Update eviction metrics
this.metrics.evictions += cutCount;
console.log(
`Cache cleanup: removed ${cutCount} entries, freed ${(bytesFreed / 1024).toFixed(1)} KB`
);
// If we freed very little space, signal quota pressure
if (bytesFreed < 1024) {
// Less than 1KB freed
const error = new Error("CACHE_QUOTA_EXCEEDED");
error.code = "CACHE_QUOTA_EXCEEDED";
error.bytesFreed = bytesFreed;
throw error;
}
return bytesFreed;
} catch (error) {
console.warn("Failed to cleanup cache:", error);
this.logError(error);
return 0;
}
}
/**
* Prune expired entries using indexed queries
*/
async pruneExpired() {
// Only run cleanup if we're the leader and not paused
if (!this._iAmLeader || this._paused) {
return;
}
try {
const today = helpers.getTodayString();
const now = Date.now();
const cutoffHistorical = now - CACHE_CONSTANTS.HISTORICAL_TTL;
await this.db.transaction(
"readwrite",
this.db.progressData,
this.db.cacheStats,
async () => {
// Find expired historical entries
const expiredHistorical = await this.db.progressData
.where("timestamp")
.below(cutoffHistorical)
.toArray();
// Find expired today entries (simplified comparison)
const expiredToday = await this.db.progressData
.where("dateString")
.equals(today)
.filter(entry => entry.timestamp < now - CACHE_CONSTANTS.TODAY_TTL)
.toArray();
const allExpired = [...expiredHistorical, ...expiredToday];
if (allExpired.length === 0) return;
const keysToDelete = allExpired.map(e => e.dateString);
const bytesFreed = allExpired.reduce(
(sum, e) => sum + (e.size || 0),
0
);
await this.db.progressData.bulkDelete(keysToDelete);
const stats = await this.getStatsOrDefault();
stats.totalSize = Math.max(0, stats.totalSize - bytesFreed);
stats.lastCleanup = Date.now();
await this.db.cacheStats.put(stats);
// Remove from memory cache
keysToDelete.forEach(key => this.memoryCache.delete(key));
// Update eviction metrics
this.metrics.evictions += allExpired.length;
console.log(
`Pruned ${allExpired.length} expired entries, freed ${(bytesFreed / 1024).toFixed(1)} KB`
);
}
);
} catch (error) {
console.warn("Failed to prune expired entries:", error);
this.logError(error);
}
}
/**
* Clear all cache data
*/
async clear() {
try {
// Clear memory cache
this.memoryCache.clear();
// Clear IndexedDB
await this.db.progressData.clear();
await this.db.cacheStats.clear();
// Reset metrics
this.resetMetrics();
// Reinitialize cache stats
await this.initCacheStats();
console.log("Cache cleared successfully");
} catch (error) {
console.warn("Failed to clear cache:", error);
this.logError(error);
throw error;
}
}
/**
* Get comprehensive cache statistics
* @returns {Promise<Object>} - Cache statistics
*/
async getStats() {
try {
await this.ready;
const [dates, totalSize, dbEntries] = await Promise.all([
this.getCachedDates(),
this.getCacheSize(),
this.db.progressData.count(),
]);
const memoryEntries = this.memoryCache.size;
const maxSizeMB = CACHE_CONSTANTS.MAX_CACHE_SIZE / (1024 * 1024);
const totalSizeMB = totalSize / (1024 * 1024);
const usagePercent = (totalSize / CACHE_CONSTANTS.MAX_CACHE_SIZE) * 100;
return {
totalEntries: dbEntries,
memoryEntries,
dbEntries,
totalSize: `${totalSizeMB.toFixed(2)} MB`,
maxSize: `${maxSizeMB.toFixed(2)} MB`,
usagePercent: usagePercent.toFixed(1),
usagePercentNum: usagePercent,
cachedDates: dates,
metrics: { ...this.metrics },
hitRate: this.calculateHitRate(),
hitRateNum: this.calculateHitRateNum(),
metricsSince: this.metricsSince,
};
} catch (error) {
console.warn("Failed to get cache stats:", error);
this.logError(error);
return {
totalEntries: 0,
memoryEntries: 0,
dbEntries: 0,
totalSize: "0 MB",
maxSize: `${(CACHE_CONSTANTS.MAX_CACHE_SIZE / (1024 * 1024)).toFixed(2)} MB`,
usagePercent: "0",
usagePercentNum: 0,
cachedDates: [],
metrics: { ...this.metrics },
hitRate: "0%",
hitRateNum: 0,
metricsSince: this.metricsSince,
};
}
}
/**
* Calculate cache hit rate
* @returns {string} - Hit rate percentage
* @private
*/
calculateHitRate() {
const total = this.metrics.hits + this.metrics.misses;
if (total === 0) return "0%";
return `${((this.metrics.hits / total) * 100).toFixed(1)}%`;
}
/**
* Calculate cache hit rate as number
* @returns {number} - Hit rate as decimal (0-1)
* @private
*/
calculateHitRateNum() {
const total = this.metrics.hits + this.metrics.misses;
if (total === 0) return 0;
return this.metrics.hits / total;
}
/**
* Get total cache size in bytes
* @returns {Promise<number>} - Total cache size
*/
async getCacheSize() {
try {
await this.ready;
const stats = await this.db.cacheStats.get("main");
return stats?.totalSize ?? 0;
} catch (error) {
console.warn("Failed to get cache size:", error);
this.logError(error);
return 0;
}
}
/**
* Serialize data to JSON string
* @param {Array} data - Data to serialize
* @returns {string} - Serialized data
* @private
*/
serialize(data) {
if (!Array.isArray(data)) {
throw new Error("Data must be an array for serialization");
}
return JSON.stringify(data);
}
/**
* Deserialize JSON string to data
* @param {string} str - Serialized data string
* @returns {Array} - Deserialized data
* @private
*/
deserialize(str) {
try {
if (typeof str !== "string") {
throw new Error("Input must be a string");
}
const result = JSON.parse(str);
return Array.isArray(result) ? result : [];
} catch (error) {
console.warn("Failed to deserialize cache data:", error);
this.logError(error);
return [];
}
}
/**
* Check if cache entry is expired
* @param {number} timestamp - Cache entry timestamp
* @param {string} dateString - Date string
* @returns {boolean} - Whether entry is expired
* @private
*/
isExpired(timestamp, dateString) {
if (typeof timestamp !== "number" || timestamp <= 0) {
return true;
}
try {
const today = helpers.getTodayString();
const isToday = dateString === today;
const ttl = isToday
? CACHE_CONSTANTS.TODAY_TTL
: CACHE_CONSTANTS.HISTORICAL_TTL;
return Date.now() - timestamp > ttl;
} catch (error) {
console.warn("Error checking expiration:", error);
return true; // Default to expired on error
}
}
/**
* Preload cache for specific date range with concurrency control
* @param {Array<string>} dateStrings - Array of date strings to preload
* @param {Function} fetchFunction - Function to fetch data for a date
* @param {number} concurrency - Maximum concurrent operations (default: 3)
* @returns {Promise<{fulfilled: number, rejected: number}>} - Results summary
*/
async preloadCache(dateStrings, fetchFunction, concurrency = 3) {
if (!Array.isArray(dateStrings) || typeof fetchFunction !== "function") {
throw new Error("Invalid parameters for preloadCache");
}
const results = [];
const semaphore = new Array(concurrency).fill(null);
let index = 0;
const executeOperation = async dateString => {
try {
const cached = await this.get(dateString);
if (!cached) {
await this.fetchWithCache(dateString, () =>
fetchFunction(dateString)
);
}
return { status: "fulfilled", dateString };
} catch (error) {
console.warn(`Failed to preload cache for ${dateString}:`, error);
this.logError(error);
return { status: "rejected", dateString, error };
}
};
const processNext = async () => {
if (index >= dateStrings.length) return;
const dateString = dateStrings[index++];
const result = await executeOperation(dateString);
results.push(result);
// Process next item
await processNext();
};
// Start concurrent operations
const promises = semaphore.map(() => processNext());
await Promise.all(promises);
const fulfilled = results.filter(r => r.status === "fulfilled").length;
const rejected = results.filter(r => r.status === "rejected").length;
console.log(
`Cache preload completed: ${fulfilled} succeeded, ${rejected} failed of ${dateStrings.length} total`
);
return { fulfilled, rejected };
}
/**
* Smart cache warming based on usage patterns
* @param {Function} fetchFunction - Function to fetch data
*/
async warmCache(fetchFunction) {
if (typeof fetchFunction !== "function") {
throw new Error("fetchFunction is required for cache warming");
}
try {
const recentDates = this.generateRecentDateRange(
CACHE_CONSTANTS.PRELOAD_RECENT_DAYS
);
const missedDates = [];
// Check which dates are missing from cache
for (const date of recentDates) {
const cached = await this.get(date);
if (!cached) {
missedDates.push(date);
}
}
if (missedDates.length > 0) {
console.log(`Warming cache for ${missedDates.length} dates`);
await this.preloadCache(missedDates, fetchFunction, 2); // Lower concurrency for warming
}
} catch (error) {
console.warn("Cache warming failed:", error);
this.logError(error);
}
}
/**
* Generate array of recent date strings
* @param {number} days - Number of days to include
* @returns {Array<string>} - Array of date strings
* @private
*/
generateRecentDateRange(days = 7) {
const dates = [];
const today = new Date();
for (let i = 0; i < days; i++) {
const date = new Date(today);
date.setDate(today.getDate() - i);
// Use the same date formatting as helpers for consistency
dates.push(
typeof helpers.getTodayString === "function" &&
helpers.getTodayString.length >= 1
? helpers.getTodayString(date)
: date.toISOString().slice(0, 10)
);
}
return dates;
}
/**
* Export cache data for backup with validation
* @returns {Promise<Object|null>} - Cache data for export
*/
async exportCache() {
try {
await this.ready;
const entries = await this.db.progressData.toArray();
const stats = await this.getStats();
// Validate entries before export
const validEntries = entries.filter(entry => {
return (
entry.dateString &&
typeof entry.dateString === "string" &&
entry.timestamp &&
typeof entry.timestamp === "number" &&
entry.data &&
typeof entry.data === "string"
);
});
if (validEntries.length !== entries.length) {
console.warn(
`Filtered out ${entries.length - validEntries.length} invalid entries during export`
);
}
const exportData = {
version: "1.0",
entries: validEntries.map(entry => ({
dateString: entry.dateString,
timestamp: entry.timestamp,
data: this.deserialize(entry.data),
size: entry.size || 0,
})),
stats,
exportDate: new Date().toISOString(),
totalEntries: validEntries.length,
};
// Validate export data integrity
if (!this.validateExportData(exportData)) {
throw new Error("Export data validation failed");
}
return exportData;
} catch (error) {
console.warn("Failed to export cache:", error);
this.logError(error);
return null;
}
}
/**
* Validate export data structure
* @param {Object} exportData - Export data to validate
* @returns {boolean} - Whether data is valid
* @private
*/
validateExportData(exportData) {
return (
exportData &&
typeof exportData === "object" &&
exportData.version &&
Array.isArray(exportData.entries) &&
exportData.exportDate &&
typeof exportData.totalEntries === "number"
);
}
/**
* Import cache data from backup with validation
* @param {Object} cacheData - Cache data to import
*/
async importCache(cacheData) {
if (!cacheData || typeof cacheData !== "object") {
throw new Error("Invalid cache data format");
}
if (!Array.isArray(cacheData.entries)) {
throw new Error("Cache data must contain entries array");
}
try {
// Validate import data
if (!this.validateImportData(cacheData)) {
throw new Error("Import data validation failed");
}
console.log(
`Starting import of ${cacheData.entries.length} cache entries`
);
// Clear existing cache
await this.clear();
// Process entries in batches to avoid memory issues
const batchSize = 100;
let totalImported = 0;
for (let i = 0; i < cacheData.entries.length; i += batchSize) {
const batch = cacheData.entries.slice(i, i + batchSize);
const imported = await this.importBatch(batch);
totalImported += imported;
// Log progress for large imports
if (cacheData.entries.length > 200) {
console.log(
`Imported ${totalImported}/${cacheData.entries.length} entries`
);
}
}
// Warm memory cache for recent items
await this.warmMemoryCacheAfterImport();
console.log(`Successfully imported ${totalImported} cache entries`);
} catch (error) {
console.warn("Failed to import cache:", error);
this.logError(error);
throw error;
}
}
/**
* Validate import data structure
* @param {Object} cacheData - Cache data to validate
* @returns {boolean} - Whether data is valid
* @private
*/
validateImportData(cacheData) {
if (!cacheData.entries || !Array.isArray(cacheData.entries)) {
return false;
}
// Validate sample of entries
const sampleSize = Math.min(10, cacheData.entries.length);
for (let i = 0; i < sampleSize; i++) {
const entry = cacheData.entries[i];
if (
!entry ||
!entry.dateString ||
typeof entry.dateString !== "string" ||
!entry.timestamp ||
typeof entry.timestamp !== "number" ||
!Array.isArray(entry.data)
) {
return false;
}
}
return true;
}
/**
* Import a batch of cache entries
* @param {Array} batch - Batch of entries to import
* @returns {Promise<number>} - Number of entries imported
* @private
*/
async importBatch(batch) {
try {
const validEntries = batch.filter(entry => {
return (
entry &&
entry.dateString &&
typeof entry.dateString === "string" &&
entry.timestamp &&
typeof entry.timestamp === "number" &&
Array.isArray(entry.data)
);
});
if (validEntries.length === 0) {
return 0;
}
await this.db.transaction(
"readwrite",
this.db.progressData,
this.db.cacheStats,
async () => {
const bulkEntries = validEntries.map(entry => {
const serialized = this.serialize(entry.data);
return {
dateString: entry.dateString,
timestamp: entry.timestamp,
data: serialized,
size: this.byteSize(serialized),
};
});
await this.db.progressData.bulkPut(bulkEntries);
// Update total size
const totalSize = bulkEntries.reduce(
(sum, entry) => sum + entry.size,
0
);
const currentStats = await this.getStatsOrDefault();
currentStats.totalSize += totalSize;
currentStats.lastCleanup = Date.now();
await this.db.cacheStats.put(currentStats);
}
);
return validEntries.length;
} catch (error) {
console.warn("Failed to import batch:", error);
this.logError(error);
return 0;
}
}
/**
* Warm memory cache with recent entries after import
* @private
*/
async warmMemoryCacheAfterImport() {
try {
const recentEntries = await this.db.progressData
.orderBy("timestamp")
.reverse()
.limit(CACHE_CONSTANTS.WARM_MEMORY_LIMIT)
.toArray();
for (const entry of recentEntries) {
const data = this.deserialize(entry.data);
this.setMemoryEntry(entry.dateString, {
data,
timestamp: entry.timestamp,
});
}
console.log(
`Warmed memory cache with ${recentEntries.length} recent entries`
);
} catch (error) {
console.warn("Failed to warm memory cache after import:", error);
}
}
/**
* Request persistent storage (mobile optimization)
* @returns {Promise<boolean>} - Whether persistent storage was granted
*/
async requestPersistentStorage() {
try {
if (!this.hasNavigator || !navigator.storage?.persist) {
return false;
}
// Check if already persisted
const persisted = await navigator.storage.persisted();
if (persisted) {
console.log("Storage is already persistent");
return true;
}
// Request persistence
const granted = await navigator.storage.persist();
if (granted) {
console.log("Persistent storage granted");
} else {
console.log("Persistent storage denied");
}
return granted;
} catch (error) {
console.warn("Failed to request persistent storage:", error);
this.logError(error);
return false;
}
}
/**
* Get storage estimate if available
* @returns {Promise<Object|null>} - Storage estimate
*/
async getStorageEstimate() {
try {
if (!this.hasNavigator || !navigator.storage?.estimate) {
return null;
}
const estimate = await navigator.storage.estimate();
return {
quota: estimate.quota
? `${(estimate.quota / 1024 / 1024).toFixed(2)} MB`
: "Unknown",
usage: estimate.usage
? `${(estimate.usage / 1024 / 1024).toFixed(2)} MB`
: "Unknown",
usageDetails: estimate.usageDetails || {},
};
} catch (error) {
console.warn("Failed to get storage estimate:", error);
return null;
}
}
// Metrics and Logging Methods
/**
* Log cache hit
* @param {string} source - Source of cache hit ('memory' or 'database')
* @private
*/
logCacheHit(source) {
this.metrics.hits++;
if (source === "memory") {
this.metrics.memoryHits++;
} else if (source === "database") {
this.metrics.dbHits++;
}
console.debug(`Cache hit from ${source}`);
}
/**
* Log cache miss
* @private
*/
logCacheMiss() {
this.metrics.misses++;
console.debug("Cache miss");
}
/**
* Log error
* @param {Error} error - Error to log
* @private
*/
logError(error) {
this.metrics.errors++;
console.debug("Cache error:", error.message);
}
/**
* Reset metrics
* @private
*/
resetMetrics() {
this.metrics = {
hits: 0,
misses: 0,
errors: 0,
memoryHits: 0,
dbHits: 0,
evictions: 0,
reads: 0,
writes: 0,
};
this.metricsSince = Date.now();
}
/**
* Get performance metrics
* @returns {Object} - Performance metrics with timestamp
*/
getMetrics() {
return {
...this.metrics,
since: this.metricsSince,
};
}
/**
* Health check for cache system
* @returns {Promise<Object>} - Health status
*/
async healthCheck() {
const health = {
status: "healthy",
issues: [],
checks: {
initialization: false,
database: false,
memoryCache: false,
storage: false,
},
};
try {
// Check initialization
await this.ready;
health.checks.initialization = true;
// Check database access
await this.db.cacheStats.get("main");
health.checks.database = true;
// Check memory cache
health.checks.memoryCache = this.memoryCache instanceof Map;
// Check storage estimate
const estimate = await this.getStorageEstimate();
health.checks.storage = estimate !== null;
// Check for issues
const stats = await this.getStats();
const usagePercent = parseFloat(stats.usagePercent);
if (usagePercent > 90) {
health.issues.push("Cache usage is very high (>90%)");
}
if (this.metrics.errors > 10) {
health.issues.push("High error rate detected");
}
if (health.issues.length > 0) {
health.status = "warning";
}
} catch (error) {
health.status = "error";
health.issues.push(`Health check failed: ${error.message}`);
}
return health;
}
/**
* Debug information for troubleshooting
* @returns {Promise<Object>} - Debug information
*/
async getDebugInfo() {
const info = {
constants: CACHE_CONSTANTS,
hasNavigator: this.hasNavigator,
onlineStatus: this.hasNavigator ? navigator.onLine : "unknown",
metrics: this.getMetrics(),
};
try {
const [stats, health, storageEstimate] = await Promise.all([
this.getStats(),
this.healthCheck(),
this.getStorageEstimate(),
]);
return {
...info,
stats,
health,
storageEstimate,
timestamp: new Date().toISOString(),
};
} catch (error) {
return {
...info,
error: error.message,
timestamp: new Date().toISOString(),
};
}
}
}
// Export singleton instance
const progressCache = new MobileProgressCache();
export default progressCache;
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment