feat: custom image proxy (#3056)

This commit is contained in:
Ryan Cohen
2022-10-18 14:40:24 +09:00
committed by GitHub
parent bfe56c3470
commit 500cd1f872
16 changed files with 499 additions and 50 deletions

View File

@@ -17,6 +17,7 @@ import WebPushAgent from '@server/lib/notifications/agents/webpush';
import { getSettings } from '@server/lib/settings';
import logger from '@server/logger';
import routes from '@server/routes';
import imageproxy from '@server/routes/imageproxy';
import { getAppVersion } from '@server/utils/appVersion';
import restartFlag from '@server/utils/restartFlag';
import { getClientIp } from '@supercharge/request-ip';
@@ -176,6 +177,9 @@ app
next();
});
server.use('/api/v1', routes);
server.use('/imageproxy', imageproxy);
server.get('*', (req, res) => handle(req, res));
server.use(
(

View File

@@ -51,6 +51,11 @@ export interface CacheItem {
};
}
export interface CacheResponse {
apiCaches: CacheItem[];
imageCache: Record<'tmdb', { size: number; imageCount: number }>;
}
export interface StatusResponse {
version: string;
commitTag: string;

View File

@@ -1,4 +1,5 @@
import downloadTracker from '@server/lib/downloadtracker';
import ImageProxy from '@server/lib/imageproxy';
import { plexFullScanner, plexRecentScanner } from '@server/lib/scanners/plex';
import { radarrScanner } from '@server/lib/scanners/radarr';
import { sonarrScanner } from '@server/lib/scanners/sonarr';
@@ -133,5 +134,21 @@ export const startJobs = (): void => {
}),
});
// Run image cache cleanup every 5 minutes
scheduledJobs.push({
id: 'image-cache-cleanup',
name: 'Image Cache Cleanup',
type: 'process',
interval: 'long',
cronSchedule: jobs['image-cache-cleanup'].schedule,
job: schedule.scheduleJob(jobs['image-cache-cleanup'].schedule, () => {
logger.info('Starting scheduled job: Image Cache Cleanup', {
label: 'Jobs',
});
// Clean TMDB image cache
ImageProxy.clearCache('tmdb');
}),
});
logger.info('Scheduled jobs loaded', { label: 'Jobs' });
};

268
server/lib/imageproxy.ts Normal file
View File

@@ -0,0 +1,268 @@
import logger from '@server/logger';
import axios from 'axios';
import rateLimit, { type rateLimitOptions } from 'axios-rate-limit';
import { createHash } from 'crypto';
import { promises } from 'fs';
import path, { join } from 'path';
type ImageResponse = {
meta: {
revalidateAfter: number;
curRevalidate: number;
isStale: boolean;
etag: string;
extension: string;
cacheKey: string;
cacheMiss: boolean;
};
imageBuffer: Buffer;
};
class ImageProxy {
public static async clearCache(key: string) {
let deletedImages = 0;
const cacheDirectory = path.join(
__dirname,
'../../config/cache/images/',
key
);
const files = await promises.readdir(cacheDirectory);
for (const file of files) {
const filePath = path.join(cacheDirectory, file);
const stat = await promises.lstat(filePath);
if (stat.isDirectory()) {
const imageFiles = await promises.readdir(filePath);
for (const imageFile of imageFiles) {
const [, expireAtSt] = imageFile.split('.');
const expireAt = Number(expireAtSt);
const now = Date.now();
if (now > expireAt) {
await promises.rm(path.join(filePath, imageFile));
deletedImages += 1;
}
}
}
}
logger.info(`Cleared ${deletedImages} stale image(s) from cache`, {
label: 'Image Cache',
});
}
public static async getImageStats(
key: string
): Promise<{ size: number; imageCount: number }> {
const cacheDirectory = path.join(
__dirname,
'../../config/cache/images/',
key
);
const imageTotalSize = await ImageProxy.getDirectorySize(cacheDirectory);
const imageCount = await ImageProxy.getImageCount(cacheDirectory);
return {
size: imageTotalSize,
imageCount,
};
}
private static async getDirectorySize(dir: string): Promise<number> {
const files = await promises.readdir(dir, {
withFileTypes: true,
});
const paths = files.map(async (file) => {
const path = join(dir, file.name);
if (file.isDirectory()) return await ImageProxy.getDirectorySize(path);
if (file.isFile()) {
const { size } = await promises.stat(path);
return size;
}
return 0;
});
return (await Promise.all(paths))
.flat(Infinity)
.reduce((i, size) => i + size, 0);
}
private static async getImageCount(dir: string) {
const files = await promises.readdir(dir);
return files.length;
}
private axios;
private cacheVersion;
private key;
constructor(
key: string,
baseUrl: string,
options: {
cacheVersion?: number;
rateLimitOptions?: rateLimitOptions;
} = {}
) {
this.cacheVersion = options.cacheVersion ?? 1;
this.key = key;
this.axios = axios.create({
baseURL: baseUrl,
});
if (options.rateLimitOptions) {
this.axios = rateLimit(this.axios, options.rateLimitOptions);
}
}
public async getImage(path: string): Promise<ImageResponse> {
const cacheKey = this.getCacheKey(path);
const imageResponse = await this.get(cacheKey);
if (!imageResponse) {
const newImage = await this.set(path, cacheKey);
if (!newImage) {
throw new Error('Failed to load image');
}
return newImage;
}
// If the image is stale, we will revalidate it in the background.
if (imageResponse.meta.isStale) {
this.set(path, cacheKey);
}
return imageResponse;
}
private async get(cacheKey: string): Promise<ImageResponse | null> {
try {
const directory = join(this.getCacheDirectory(), cacheKey);
const files = await promises.readdir(directory);
const now = Date.now();
for (const file of files) {
const [maxAgeSt, expireAtSt, etag, extension] = file.split('.');
const buffer = await promises.readFile(join(directory, file));
const expireAt = Number(expireAtSt);
const maxAge = Number(maxAgeSt);
return {
meta: {
curRevalidate: maxAge,
revalidateAfter: maxAge * 1000 + now,
isStale: now > expireAt,
etag,
extension,
cacheKey,
cacheMiss: false,
},
imageBuffer: buffer,
};
}
} catch (e) {
// No files. Treat as empty cache.
}
return null;
}
private async set(
path: string,
cacheKey: string
): Promise<ImageResponse | null> {
try {
const directory = join(this.getCacheDirectory(), cacheKey);
const response = await this.axios.get(path, {
responseType: 'arraybuffer',
});
const buffer = Buffer.from(response.data, 'binary');
const extension = path.split('.').pop() ?? '';
const maxAge = Number(response.headers['cache-control'].split('=')[1]);
const expireAt = Date.now() + maxAge * 1000;
const etag = response.headers.etag.replace(/"/g, '');
await this.writeToCacheDir(
directory,
extension,
maxAge,
expireAt,
buffer,
etag
);
return {
meta: {
curRevalidate: maxAge,
revalidateAfter: expireAt,
isStale: false,
etag,
extension,
cacheKey,
cacheMiss: true,
},
imageBuffer: buffer,
};
} catch (e) {
logger.debug('Something went wrong caching image.', {
label: 'Image Cache',
errorMessage: e.message,
});
return null;
}
}
private async writeToCacheDir(
dir: string,
extension: string,
maxAge: number,
expireAt: number,
buffer: Buffer,
etag: string
) {
const filename = join(dir, `${maxAge}.${expireAt}.${etag}.${extension}`);
await promises.rm(dir, { force: true, recursive: true }).catch(() => {
// do nothing
});
await promises.mkdir(dir, { recursive: true });
await promises.writeFile(filename, buffer);
}
private getCacheKey(path: string) {
return this.getHash([this.key, this.cacheVersion, path]);
}
private getHash(items: (string | number | Buffer)[]) {
const hash = createHash('sha256');
for (const item of items) {
if (typeof item === 'number') hash.update(String(item));
else {
hash.update(item);
}
}
// See https://en.wikipedia.org/wiki/Base64#Filenames
return hash.digest('base64').replace(/\//g, '-');
}
private getCacheDirectory() {
return path.join(__dirname, '../../config/cache/images/', this.key);
}
}
export default ImageProxy;

View File

@@ -247,7 +247,8 @@ export type JobId =
| 'radarr-scan'
| 'sonarr-scan'
| 'download-sync'
| 'download-sync-reset';
| 'download-sync-reset'
| 'image-cache-cleanup';
interface AllSettings {
clientId: string;
@@ -414,6 +415,9 @@ class Settings {
'download-sync-reset': {
schedule: '0 0 1 * * *',
},
'image-cache-cleanup': {
schedule: '0 0 5 * * *',
},
},
};
if (initialSettings) {

View File

@@ -0,0 +1,39 @@
import ImageProxy from '@server/lib/imageproxy';
import logger from '@server/logger';
import { Router } from 'express';
const router = Router();
const tmdbImageProxy = new ImageProxy('tmdb', 'https://image.tmdb.org', {
rateLimitOptions: {
maxRequests: 20,
maxRPS: 50,
},
});
/**
* Image Proxy
*/
router.get('/*', async (req, res) => {
const imagePath = req.path.replace('/image', '');
try {
const imageData = await tmdbImageProxy.getImage(imagePath);
res.writeHead(200, {
'Content-Type': `image/${imageData.meta.extension}`,
'Content-Length': imageData.imageBuffer.length,
'Cache-Control': `public, max-age=${imageData.meta.curRevalidate}`,
'OS-Cache-Key': imageData.meta.cacheKey,
'OS-Cache-Status': imageData.meta.cacheMiss ? 'MISS' : 'HIT',
});
res.end(imageData.imageBuffer);
} catch (e) {
logger.error('Failed to proxy image', {
imagePath,
errorMessage: e.message,
});
res.status(500).send();
}
});
export default router;

View File

@@ -14,9 +14,10 @@ import type {
import { scheduledJobs } from '@server/job/schedule';
import type { AvailableCacheIds } from '@server/lib/cache';
import cacheManager from '@server/lib/cache';
import ImageProxy from '@server/lib/imageproxy';
import { Permission } from '@server/lib/permissions';
import { plexFullScanner } from '@server/lib/scanners/plex';
import type { MainSettings } from '@server/lib/settings';
import type { JobId, MainSettings } from '@server/lib/settings';
import { getSettings } from '@server/lib/settings';
import logger from '@server/logger';
import { isAuthenticated } from '@server/middleware/auth';
@@ -491,7 +492,7 @@ settingsRoutes.post<{ jobId: string }>('/jobs/:jobId/run', (req, res, next) => {
});
});
settingsRoutes.post<{ jobId: string }>(
settingsRoutes.post<{ jobId: JobId }>(
'/jobs/:jobId/cancel',
(req, res, next) => {
const scheduledJob = scheduledJobs.find(
@@ -518,7 +519,7 @@ settingsRoutes.post<{ jobId: string }>(
}
);
settingsRoutes.post<{ jobId: string }>(
settingsRoutes.post<{ jobId: JobId }>(
'/jobs/:jobId/schedule',
(req, res, next) => {
const scheduledJob = scheduledJobs.find(
@@ -553,16 +554,23 @@ settingsRoutes.post<{ jobId: string }>(
}
);
settingsRoutes.get('/cache', (req, res) => {
const caches = cacheManager.getAllCaches();
settingsRoutes.get('/cache', async (_req, res) => {
const cacheManagerCaches = cacheManager.getAllCaches();
return res.status(200).json(
Object.values(caches).map((cache) => ({
id: cache.id,
name: cache.name,
stats: cache.getStats(),
}))
);
const apiCaches = Object.values(cacheManagerCaches).map((cache) => ({
id: cache.id,
name: cache.name,
stats: cache.getStats(),
}));
const tmdbImageCache = await ImageProxy.getImageStats('tmdb');
return res.status(200).json({
apiCaches,
imageCache: {
tmdb: tmdbImageCache,
},
});
});
settingsRoutes.post<{ cacheId: AvailableCacheIds }>(