refactor(backend): Redis -> Memcached

This commit is contained in:
Tibo De Peuter 2025-05-15 00:08:16 +02:00
parent 0f2a502521
commit 0928df2cab
Signed by: tdpeuter
GPG key ID: 38297DE43F75FFE2
9 changed files with 1124 additions and 1152 deletions

View file

@ -67,9 +67,9 @@ DWENGO_AUTH_TEACHER_JWKS_ENDPOINT=http://hostname/realms/teacher/protocol/openid
# If running your stack in docker, this should use the docker service name.
#DWENGO_LOGGING_LOKI_HOST=http://localhost:3102
# The hostname or IP address of the Redis cache.
# The hostname or IP address of the caching server, e.g. Memcached.
# If running your stack in docker, this should use the docker service name.
#DWENGO_CACHE_HOST=localhost
#DWENGO_CACHE_PORT=6379
#DWENGO_CACHE_PORT=11211
#DWENGO_CACHE_TTL=3600
#DWENGO_CACHE_KEY_PREFIX=dwengo

View file

@ -37,5 +37,5 @@ DWENGO_LOGGING_LEVEL=info
DWENGO_LOGGING_LOKI_HOST=http://logging:3102
DWENGO_CACHE_HOST=cache
#DWENGO_CACHE_PORT=6379
#DWENGO_CACHE_PORT=11211
DWENGO_CACHE_TTL=604800

View file

@ -37,8 +37,8 @@
"jwks-rsa": "^3.1.0",
"loki-logger-ts": "^1.0.2",
"marked": "^15.0.7",
"memjs": "^1.3.2",
"nanoid": "^5.1.5",
"redis": "^5.0.1",
"response-time": "^2.3.3",
"swagger-ui-express": "^5.0.1",
"uuid": "^11.1.0",
@ -50,6 +50,7 @@
"@types/cors": "^2.8.17",
"@types/express": "^5.0.0",
"@types/js-yaml": "^4.0.9",
"@types/memjs": "^1.3.3",
"@types/node": "^22.13.4",
"@types/response-time": "^2.3.8",
"@types/swagger-ui-express": "^4.1.8",

View file

@ -1,42 +1,40 @@
import { createClient, RedisClientType } from 'redis';
import { getLogger } from './logging/initalize.js';
import { envVars, getEnvVar } from './util/envVars.js';
import { Client } from 'memjs';
export type CacheClient = RedisClientType;
export type CacheClient = Client;
let redisClient: CacheClient;
let cacheClient: CacheClient;
async function initializeClient(): Promise<CacheClient> {
if (redisClient !== undefined) {
return redisClient;
if (cacheClient !== undefined) {
return cacheClient;
}
const redisHost = getEnvVar(envVars.CacheHost);
const redisPort = getEnvVar(envVars.CachePort);
const redisUrl = `redis://${redisHost}:${redisPort}`;
const cachingHost = getEnvVar(envVars.CacheHost);
const cachingPort = getEnvVar(envVars.CachePort);
const cachingUrl = `${cachingHost}:${cachingPort}`;
redisClient = createClient({
url: redisUrl
});
cacheClient = Client.create(cachingUrl);
redisClient.on('error', (err) => getLogger().error('Redis error:', err));
await redisClient.connect();
getLogger().info(`Memcached client initialized at ${cachingUrl}`);
return redisClient;
return cacheClient;
}
export async function getCacheClient(): Promise<CacheClient> {
redisClient ||= await initializeClient();
return redisClient;
cacheClient ||= await initializeClient();
return cacheClient;
}
export async function checkRedisHealth(): Promise<boolean> {
export async function checkCachingHealth(): Promise<boolean> {
try {
await redisClient.set('health', 'ok');
const reply = await redisClient.get('health');
return reply === 'ok';
const client = await getCacheClient();
await client.set('health', Buffer.from('ok'), { expires: 60 });
const reply = await cacheClient.get('health');
return reply?.value?.toString() === 'ok';
} catch (error) {
getLogger().error('Redis Health Check Failed:', error);
getLogger().error('Caching Health Check Failed:', error);
return false;
}
}

View file

@ -3,6 +3,7 @@ import { getLogger, Logger } from '../logging/initalize.js';
import { LearningObjectIdentifier } from '../entities/content/learning-object-identifier.js';
import { getCacheClient } from '../caching.js';
import { envVars, getEnvVar, getNumericEnvVar } from './envVars.js';
import { createHash } from 'crypto';
const logger: Logger = getLogger();
const runMode: string = getEnvVar(envVars.RunMode);
@ -29,13 +30,13 @@ export async function fetchRemote<T>(
url: string,
description: string,
options?: Options,
cacheTTL?: number
cacheTTL?: number,
): Promise<T | null> {
if (runMode !== 'dev') {
if (runMode !== 'dev' && !runMode.includes('test')) {
return fetchWithCache<T>(url, description, options, cacheTTL);
}
getLogger().info(`🔄 INFO: Bypassing cache for ${description} at "${url}".`);
getLogger().debug(`🔄 INFO: Bypassing cache for ${description} at "${url}".`);
return fetchWithLogging(url, description, options);
}
@ -43,25 +44,28 @@ async function fetchWithCache<T>(
url: string,
description: string,
options?: Options,
cacheTTL?: number
cacheTTL?: number,
): Promise<T | null> {
// Create a unique cache key based on the URL and options
const cacheKey = `${prefix}:${url}${options?.params ? JSON.stringify(options.params) : ''}`;
const cacheClient = await getCacheClient();
// Combine the URL and parameters to create a unique cache key.
// NOTE Using a hash function to keep the key short, since Memcached has a limit on key size
const urlWithParams = `${url}${options?.params ? JSON.stringify(options.params) : ''}`;
const hashedUrl = createHash('sha256').update(urlWithParams).digest('hex');
const key = `${prefix}:${hashedUrl}`;
const client = await getCacheClient();
const cachedData = await cacheClient.get(cacheKey);
if (cachedData !== null && cachedData !== undefined) { // TODO What should this condition actually be?
// Cache hit! :)
getLogger().debug(`✅ INFO: Cache hit for ${description} at "${url}".`);
return JSON.parse(cachedData) as T;
const cachedData = await client.get(key);
if (cachedData?.value) {
logger.debug(`✅ INFO: Cache hit for ${description} at "${url}" (key: "${key}")`);
return JSON.parse(cachedData.value.toString()) as T;
}
// Cache miss :(
logger.debug(`🔄 INFO: Cache miss for ${description} at "${url}". Fetching data...`);
const response = await fetchWithLogging<T>(url, description, options);
logger.debug(`🔄 INFO: Fetched data for ${description} at "${url}".`);
await cacheClient.setEx(cacheKey, cacheTTL || getNumericEnvVar(envVars.CacheTTL), JSON.stringify(response));
logger.debug(`✅ INFO: Cached response for ${description} at "${url}" for ${cacheTTL} seconds.`);
const ttl = cacheTTL || getNumericEnvVar(envVars.CacheTTL);
await client.set(key, JSON.stringify(response), { expires: ttl });
logger.debug(`✅ INFO: Cached response for ${description} at "${url}" for ${ttl} seconds. (key: "${key}")`);
return response;
}

View file

@ -42,7 +42,7 @@ export const envVars: Record<string, EnvVar> = {
LokiHost: { key: LOGGING_PREFIX + 'LOKI_HOST', defaultValue: 'http://localhost:3102' },
CacheHost: { key: CACHE_PREFIX + 'HOST', defaultValue: 'localhost' },
CachePort: { key: CACHE_PREFIX + 'PORT', defaultValue: 6379 },
CachePort: { key: CACHE_PREFIX + 'PORT', defaultValue: 11211 },
CacheTTL: { key: CACHE_PREFIX + 'TTL', defaultValue: 60 * 60 * 24 }, // 24 hours
CacheKeyPrefix: { key: CACHE_PREFIX + 'KEY_PREFIX', defaultValue: 'dwengo' },
} as const;

View file

@ -143,6 +143,16 @@ services:
volumes:
- dwengo_grafana_data:/var/lib/grafana
caching:
image: memcached
restart: always
ports:
- '11211:11211'
command:
- --conn-limit=1024
- --memory-limit=64
- --threads=4
volumes:
dwengo_grafana_data:
dwengo_letsencrypt:

View file

@ -87,16 +87,17 @@ services:
restart: unless-stopped
caching:
image: redis:6.2-alpine
image: memcached
restart: always
ports:
- '6379:6379'
command: redis-server --save 20 1 --loglevel warning
volumes:
- dwengo_cache:/data
- '11211:11211'
command:
- --conn-limit=1024
- --memory-limit=2048
- -I 128m
- --threads=4
volumes:
dwengo_grafana_data:
dwengo_loki_data:
dwengo_postgres_data:
dwengo_cache:

2166
package-lock.json generated

File diff suppressed because it is too large Load diff