Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion frontend/nuxt.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import primevue from './setup/primevue';
import echarts from './setup/echarts';
import caching from './setup/caching';
import sitemap from './setup/sitemap';
import rateLimiter from './setup/rate-limiter';

const isProduction = process.env.NUXT_APP_ENV === 'production';
const isDevelopment = process.env.NODE_ENV === 'development';
Expand Down Expand Up @@ -52,7 +53,7 @@ export default defineNuxtConfig({
primevue,
echarts,
runtimeConfig: {
// These are are only available on the server-side and can be overridden by the .env file
// These are only available on the server-side and can be overridden by the .env file
appEnv: process.env.APP_ENV,
tinybirdBaseUrl: 'https://api.us-west-2.aws.tinybird.co',
tinybirdToken: '',
Expand All @@ -79,6 +80,7 @@ export default defineNuxtConfig({
cmDbPassword: 'example',
cmDbDatabase: 'crowd-web',
dataCopilotDefaultSegmentId: '',
rateLimiter: rateLimiter,
// These are also exposed on the client-side
public: {
apiBase: '/api',
Expand Down
1 change: 1 addition & 0 deletions frontend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
"@pinia/nuxt": "^0.11.2",
"@popperjs/core": "^2.11.8",
"@primevue/themes": "^4.4.1",
"@redis/client": "^5.9.0",
"@tanstack/vue-query": "^5.90.5",
"@types/jsonwebtoken": "^9.0.10",
"@vuelidate/core": "^2.0.3",
Expand Down
21 changes: 16 additions & 5 deletions frontend/pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

141 changes: 141 additions & 0 deletions frontend/server/middleware/rate-limiter.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
// Copyright (c) 2025 The Linux Foundation and each contributor.
// SPDX-License-Identifier: MIT

import type { H3Event } from 'h3';
import type { RedisClientType } from '@redis/client';
import { describe, it, expect, vi, beforeEach, afterEach, beforeAll } from 'vitest';
import type { RateLimiterConfig } from '~~/server/types/rate-limiter';

const checkRateLimitMock = vi.fn();
const mockSetResponseHeaders = vi.fn();
const mockCreateError = vi.fn((error) => error);

// Mock Nuxt/H3 global functions (auto-imported by Nuxt)
global.defineEventHandler = vi.fn((handler) => handler);
global.useRuntimeConfig = vi.fn();
global.setResponseHeaders = mockSetResponseHeaders;
global.createError = mockCreateError;

vi.mock('../utils/rate-limiter', () => ({
checkRateLimit: (...args: unknown[]) => checkRateLimitMock(...args),
}));

let handleRateLimiting: typeof import('./rate-limiter').handleRateLimiting;

beforeAll(async () => {
// Import after mocks are set so the module picks up the mocked dependencies.
({ handleRateLimiting } = await import('./rate-limiter'));
});

const baseConfig: RateLimiterConfig = {
enabled: true,
defaultLimit: { maxRequests: 100, windowSeconds: 60 },
secret: 'secret',
redisDatabase: 0,
rules: [],
exclusions: [],
};

const mockRedisClient = {} as unknown as RedisClientType;

function createEvent(): H3Event {
return {
path: '/api/test',
method: 'GET',
node: {
req: {
socket: {
remoteAddress: '127.0.0.1',
},
},
},
} as unknown as H3Event;
}

describe('handleRateLimiting', () => {
beforeEach(() => {
checkRateLimitMock.mockReset();
mockSetResponseHeaders.mockReset();
mockCreateError.mockReset().mockImplementation((error) => error);
});

afterEach(() => {
vi.clearAllMocks();
});

it('skips rate limiting when disabled', async () => {
const config = { ...baseConfig, enabled: false };
const event = createEvent();

await handleRateLimiting(event, config, mockRedisClient);

expect(checkRateLimitMock).not.toHaveBeenCalled();
expect(mockSetResponseHeaders).not.toHaveBeenCalled();
});

it('sets rate limit headers when request is allowed', async () => {
const config = { ...baseConfig };
const event = createEvent();
checkRateLimitMock.mockResolvedValue({
allowed: true,
limit: 10,
remaining: 9,
resetIn: 30,
current: 1,
});

await handleRateLimiting(event, config, mockRedisClient);

expect(mockSetResponseHeaders).toHaveBeenCalledWith(event, {
'X-RateLimit-Limit': '10',
'X-RateLimit-Remaining': '9',
'X-RateLimit-Reset': '30',
});
});

it('throws a 429 error and sets headers when request is blocked', async () => {
const config = { ...baseConfig };
const event = createEvent();
checkRateLimitMock.mockResolvedValue({
allowed: false,
limit: 5,
remaining: 0,
resetIn: 42,
current: 6,
});
mockCreateError.mockImplementation((error) => error);

await expect(handleRateLimiting(event, config, mockRedisClient)).rejects.toMatchObject({
statusCode: 429,
statusMessage: 'Too Many Requests',
});

expect(mockSetResponseHeaders).toHaveBeenCalledWith(event, {
'X-RateLimit-Limit': '5',
'X-RateLimit-Remaining': '0',
'X-RateLimit-Reset': '42',
});
});

it('rethrows 429 errors originating from checkRateLimit', async () => {
const config = { ...baseConfig };
const event = createEvent();
const rateLimitError = { statusCode: 429, message: 'Too many requests' };
checkRateLimitMock.mockRejectedValue(rateLimitError);

await expect(handleRateLimiting(event, config, mockRedisClient)).rejects.toBe(rateLimitError);
});

it('logs and continues when a non-429 error occurs', async () => {
const config = { ...baseConfig };
const event = createEvent();
const unexpectedError = new Error('redis unavailable');
checkRateLimitMock.mockRejectedValue(unexpectedError);
const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {});

await handleRateLimiting(event, config, mockRedisClient);

expect(consoleSpy).toHaveBeenCalledWith('Rate limiter error:', unexpectedError);
expect(mockSetResponseHeaders).not.toHaveBeenCalled();
});
});
76 changes: 76 additions & 0 deletions frontend/server/middleware/rate-limiter.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
// Copyright (c) 2025 The Linux Foundation and each contributor.
// SPDX-License-Identifier: MIT

import type { H3Event } from 'h3';
import { RedisClientType } from '@redis/client';
import { checkRateLimit } from '../utils/rate-limiter';
import { getRedisClient } from '../utils/redis-client';
import { RateLimiterConfig } from '~~/server/types/rate-limiter';

/**
* This is a rate-limiting middleware that checks incoming requests against the configured rate
* limits and blocks requests that exceed the limits. The defineEventHandler entrypoint is simple
* and delegates the actual logic to the handleRateLimiting function for easier testing, allowing
* injection of mocked dependencies and configuration.
*
* Features:
* - Uses Redis for distributed rate limiting
* - Hashes IP addresses for GDPR compliance
* - Supports per-route and per-method limits
* - Adds rate limit headers to responses
*/
export default defineEventHandler(async (event: H3Event) => {
const config = useRuntimeConfig();
const rateLimiterConfig = config.rateLimiter as RateLimiterConfig;

// getRedisClient memoizes the client instance, so it's not a problem to call it multiple times.
const redisClient = await getRedisClient(config.redisUrl, rateLimiterConfig.redisDatabase, true);

await handleRateLimiting(event, rateLimiterConfig, redisClient);
});

/**
* Handles rate limiting for the given event and rate limiter configuration.
*
* @param event - The H3 event object for the incoming request.
* @param rateLimiterConfig - The rate limiter configuration to use.
* @param redisClient - The Redis client instance to use for rate limiting.
*/
export async function handleRateLimiting(
event: H3Event,
rateLimiterConfig: RateLimiterConfig,
redisClient: RedisClientType,
) {
// Skip rate limiting if disabled
if (!rateLimiterConfig.enabled) {
return;
}

try {
const result = await checkRateLimit(event, rateLimiterConfig, redisClient);

setResponseHeaders(event, {
['X-RateLimit-Limit']: result.limit.toString(),
['X-RateLimit-Remaining']: result.remaining.toString(),
['X-RateLimit-Reset']: result.resetIn.toString(),
});

// Block request if rate limit exceeded
if (!result.allowed) {
throw createError({
statusCode: 429,
statusMessage: 'Too Many Requests',
message: `Rate limit exceeded. Please wait ${result.resetIn} seconds before trying again.`,
});
}
} catch (error) {
// If it's already a 429 error, re-throw it
if (error && typeof error === 'object' && 'statusCode' in error && error.statusCode === 429) {
throw error;
}

// Log other errors but don't block the request. This way the app keeps working even if Redis
// is down or the rate limiter fails for some other reason.
console.error('Rate limiter error:', error);
}
}
Loading