Compare commits

..

1 Commits

Author SHA1 Message Date
a1c48bb792 Merge pull request 'refactor(modules): mark DatabaseModule and CryptoModule as global and remove redundant imports' (#10) from dev into prod
Some checks failed
Backend Tests / test (push) Has been cancelled
Lint / lint (push) Has been cancelled
Deploy to Production / deploy (push) Successful in 6m10s
Reviewed-on: #10
2026-01-14 22:52:03 +01:00
30 changed files with 129 additions and 737 deletions

View File

@@ -1,12 +1,8 @@
name: Backend Tests
on:
push:
paths:
- 'backend/**'
pull_request:
paths:
- 'backend/**'
jobs:
test:
@@ -18,19 +14,9 @@ jobs:
version: 9
- uses: actions/setup-node@v4
with:
node-version: 20
- name: Get pnpm store directory
id: pnpm-cache
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> "${GITEA_OUTPUT:-$GITHUB_OUTPUT}"
- uses: actions/cache@v4
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
node-version: 22
cache: 'pnpm'
- name: Install dependencies
run: pnpm install --frozen-lockfile --prefer-offline
run: pnpm install
- name: Run Backend Tests
run: pnpm -F @memegoat/backend test

View File

@@ -1,63 +1,61 @@
name: Deploy to Production
on:
push:
branches:
- main
- prod
jobs:
validate:
name: Validate Build & Lint
deploy:
runs-on: ubuntu-latest
strategy:
matrix:
component: [backend, frontend, documentation]
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 9
uses: actions/checkout@v3
- name: Setup Node.js
uses: actions/setup-node@v4
uses: actions/setup-node@v3
with:
node-version: 20
- name: Install pnpm
uses: pnpm/action-setup@v2
with:
version: 8
- name: Get pnpm store directory
id: pnpm-cache
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> "${GITEA_OUTPUT:-$GITHUB_OUTPUT}"
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITEA_ENV
- name: Setup pnpm cache
uses: actions/cache@v4
uses: actions/cache@v3
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies
run: pnpm install --frozen-lockfile --prefer-offline
run: pnpm install
- name: Lint ${{ matrix.component }}
run: pnpm -F @memegoat/${{ matrix.component }} lint
- name: Lint - Backend
run: pnpm run lint:back
- name: Build ${{ matrix.component }}
run: pnpm -F @memegoat/${{ matrix.component }} build
- name: Build - Backend
run: pnpm run build:back
env:
NEXT_PUBLIC_API_URL: ${{ secrets.NEXT_PUBLIC_API_URL }}
deploy:
name: Deploy to Production
needs: validate
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Lint - Frontend
run: pnpm run lint:front
- name: Build - Frontend
run: pnpm run build:front
- name: Lint - Documentation
run: pnpm run lint:docs
- name: Build - Documentation
run: pnpm run build:docs
- name: Deploy with Docker Compose
run: |

View File

@@ -1,23 +1,14 @@
name: Lint
on:
push:
paths:
- 'frontend/**'
- 'backend/**'
- 'documentation/**'
pull_request:
paths:
- 'frontend/**'
- 'backend/**'
- 'documentation/**'
jobs:
lint:
runs-on: ubuntu-latest
strategy:
matrix:
component: [backend, frontend, documentation]
steps:
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v4
@@ -25,19 +16,16 @@ jobs:
version: 9
- uses: actions/setup-node@v4
with:
node-version: 20
- name: Get pnpm store directory
id: pnpm-cache
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> "${GITEA_OUTPUT:-$GITHUB_OUTPUT}"
- uses: actions/cache@v4
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
node-version: 22
cache: 'pnpm'
- name: Install dependencies
run: pnpm install --frozen-lockfile --prefer-offline
- name: Lint ${{ matrix.component }}
run: pnpm -F @memegoat/${{ matrix.component }} lint
run: pnpm install
- name: Lint Frontend
if: success() || failure()
run: pnpm -F @memegoat/frontend lint
- name: Lint Backend
if: success() || failure()
run: pnpm -F @memegoat/backend lint
- name: Lint Documentation
if: success() || failure()
run: pnpm -F @bypass/documentation lint

View File

@@ -1,225 +0,0 @@
{
"name": "@memegoat/source",
"version": "0.0.1",
"description": "",
"scripts": {
"build": "pnpm run build:back && pnpm run build:front && pnpm run build:docs",
"build:front": "pnpm run -F @memegoat/frontend build",
"build:back": "pnpm run -F @memegoat/backend build",
"build:docs": "pnpm run -F @memegoat/documentation build",
"lint": "pnpm run lint:back && pnpm run lint:front && pnpm run lint:docs",
"lint:back": "pnpm run -F @memegoat/backend lint",
"lint:front": "pnpm run -F @memegoat/frontend lint",
"lint:docs": "pnpm run -F @memegoat/documentation lint",
"test": "pnpm run test:back && pnpm run test:front",
"test:back": "pnpm run -F @memegoat/backend test",
"test:front": "pnpm run -F @memegoat/frontend test",
"format": "pnpm run format:back && pnpm run format:front && pnpm run format:docs",
"format:back": "pnpm run -F @memegoat/backend format",
"format:front": "pnpm run -F @memegoat/frontend format",
"format:docs": "pnpm run -F @memegoat/documentation format",
"upgrade": "pnpm dlx taze minor"
},
"keywords": [],
"author": {
"name": "Mathis HERRIOT",
"email": "mherriot.pro@proton.me",
"role": "Author"
},
"license": "AGPL-3.0-only",
"devDependencies": {
"@biomejs/biome": "2.3.11"
}
}
{
"name": "@memegoat/backend",
"version": "0.0.1",
"description": "",
"author": "",
"private": true,
"license": "UNLICENSED",
"files": [
"dist",
".migrations",
"drizzle.config.ts"
],
"scripts": {
"build": "nest build",
"lint": "biome check",
"lint:write": "biome check --write",
"format": "biome format --write",
"start": "nest start",
"start:dev": "nest start --watch",
"start:debug": "nest start --debug --watch",
"start:prod": "node dist/main",
"test": "jest",
"test:watch": "jest --watch",
"test:cov": "jest --coverage",
"test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand",
"test:e2e": "jest --config ./test/jest-e2e.json",
"db:generate": "drizzle-kit generate",
"db:migrate": "drizzle-kit migrate",
"db:studio": "drizzle-kit studio"
},
"dependencies": {
"@nestjs-modules/mailer": "^2.0.2",
"@nestjs/cache-manager": "^3.1.0",
"@nestjs/common": "^11.0.1",
"@nestjs/config": "^4.0.2",
"@nestjs/core": "^11.0.1",
"@nestjs/mapped-types": "^2.1.0",
"@nestjs/platform-express": "^11.0.1",
"@nestjs/schedule": "^6.1.0",
"@nestjs/throttler": "^6.5.0",
"@noble/post-quantum": "^0.5.4",
"@node-rs/argon2": "^2.0.2",
"@sentry/nestjs": "^10.32.1",
"@sentry/profiling-node": "^10.32.1",
"cache-manager": "^7.2.7",
"cache-manager-redis-yet": "^5.1.5",
"clamscan": "^2.4.0",
"class-transformer": "^0.5.1",
"class-validator": "^0.14.3",
"dotenv": "^17.2.3",
"drizzle-orm": "^0.45.1",
"fluent-ffmpeg": "^2.1.3",
"helmet": "^8.1.0",
"iron-session": "^8.0.4",
"jose": "^6.1.3",
"minio": "^8.0.6",
"nodemailer": "^7.0.12",
"otplib": "^12.0.1",
"pg": "^8.16.3",
"qrcode": "^1.5.4",
"reflect-metadata": "^0.2.2",
"rxjs": "^7.8.1",
"sharp": "^0.34.5",
"uuid": "^13.0.0",
"zod": "^4.3.5",
"drizzle-kit": "^0.31.8"
},
"devDependencies": {
"@nestjs/cli": "^11.0.0",
"globals": "^16.0.0",
"jest": "^30.0.0",
"source-map-support": "^0.5.21",
"supertest": "^7.0.0",
"ts-jest": "^29.2.5",
"ts-loader": "^9.5.2",
"ts-node": "^10.9.2",
"tsconfig-paths": "^4.2.0",
"tsx": "^4.21.0",
"typescript": "^5.7.3",
"typescript-eslint": "^8.20.0",
"@nestjs/schematics": "^11.0.0",
"@nestjs/testing": "^11.0.1",
"@types/express": "^5.0.0",
"@types/fluent-ffmpeg": "^2.1.28",
"@types/jest": "^30.0.0",
"@types/multer": "^2.0.0",
"@types/node": "^22.10.7",
"@types/nodemailer": "^7.0.4",
"@types/pg": "^8.16.0",
"@types/qrcode": "^1.5.6",
"@types/sharp": "^0.32.0",
"@types/supertest": "^6.0.2",
"@types/uuid": "^11.0.0",
"drizzle-kit": "^0.31.8"
},
"jest": {
"moduleFileExtensions": [
"js",
"json",
"ts"
],
"rootDir": "src",
"testRegex": ".*\\.spec\\.ts$",
"collectCoverageFrom": [
"**/*.(t|j)s"
],
"coverageDirectory": "../coverage",
"testEnvironment": "node",
"transformIgnorePatterns": [
"node_modules/(?!(.pnpm/)?(jose|@noble|uuid)/)"
],
"transform": {
"^.+\\.(t|j)sx?$": "ts-jest"
},
"moduleNameMapper": {
"^@noble/post-quantum/(.*)$": "<rootDir>/../node_modules/@noble/post-quantum/$1",
"^@noble/hashes/(.*)$": "<rootDir>/../node_modules/@noble/hashes/$1"
}
}
}
{
"name": "@memegoat/frontend",
"version": "0.0.1",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "biome check",
"format": "biome format --write"
},
"dependencies": {
"@hookform/resolvers": "^5.2.2",
"@radix-ui/react-accordion": "^1.2.12",
"@radix-ui/react-alert-dialog": "^1.1.15",
"@radix-ui/react-aspect-ratio": "^1.1.8",
"@radix-ui/react-avatar": "^1.1.11",
"@radix-ui/react-checkbox": "^1.3.3",
"@radix-ui/react-collapsible": "^1.1.12",
"@radix-ui/react-context-menu": "^2.2.16",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-dropdown-menu": "^2.1.16",
"@radix-ui/react-hover-card": "^1.1.15",
"@radix-ui/react-label": "^2.1.8",
"@radix-ui/react-menubar": "^1.1.16",
"@radix-ui/react-navigation-menu": "^1.2.14",
"@radix-ui/react-popover": "^1.1.15",
"@radix-ui/react-progress": "^1.1.8",
"@radix-ui/react-radio-group": "^1.3.8",
"@radix-ui/react-scroll-area": "^1.2.10",
"@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-separator": "^1.1.8",
"@radix-ui/react-slider": "^1.3.6",
"@radix-ui/react-slot": "^1.2.4",
"@radix-ui/react-switch": "^1.2.6",
"@radix-ui/react-tabs": "^1.1.13",
"@radix-ui/react-toggle": "^1.1.10",
"@radix-ui/react-toggle-group": "^1.1.11",
"@radix-ui/react-tooltip": "^1.2.8",
"axios": "^1.13.2",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"cmdk": "^1.1.1",
"date-fns": "^4.1.0",
"embla-carousel-react": "^8.6.0",
"input-otp": "^1.4.2",
"lucide-react": "^0.562.0",
"next": "16.1.1",
"next-themes": "^0.4.6",
"react": "19.2.3",
"react-day-picker": "^9.13.0",
"react-dom": "19.2.3",
"react-hook-form": "^7.71.1",
"react-resizable-panels": "^4.4.1",
"recharts": "2.15.4",
"sonner": "^2.0.7",
"tailwind-merge": "^3.4.0",
"vaul": "^1.1.2",
"zod": "^4.3.5"
},
"devDependencies": {
"@biomejs/biome": "2.3.11",
"@tailwindcss/postcss": "^4",
"@types/node": "^20",
"@types/react": "^19",
"@types/react-dom": "^19",
"babel-plugin-react-compiler": "1.0.0",
"tailwindcss": "^4",
"tw-animate-css": "^1.4.0",
"typescript": "^5"
}
}

View File

@@ -1,50 +0,0 @@
# 🐐 Memegoat - Roadmap & Critères de Production
Ce document définit les objectifs, les critères techniques et les fonctionnalités à atteindre pour que le projet Memegoat soit considéré comme prêt pour la production et conforme aux normes européennes (RGPD) et françaises.
## 1. 🏗️ Architecture & Infrastructure
- [x] Backend NestJS (TypeScript)
- [x] Base de données PostgreSQL avec Drizzle ORM
- [x] Stockage d'objets compatible S3 (MinIO)
- [x] Service d'Emailing (Nodemailer / SMTPS)
- [x] Documentation Technique & Référence API (`docs.memegoat.fr`)
- [x] Health Checks (`/health`)
- [x] Gestion des variables d'environnement (Validation avec Zod)
- [ ] CI/CD (Build, Lint, Test, Deploy)
## 2. 🔐 Sécurité & Authentification
- [x] Hachage des mots de passe (Argon2id)
- [x] Gestion des sessions robuste (JWT avec Refresh Token et Rotation)
- [x] RBAC (Role Based Access Control) fonctionnel
- [x] Système de Clés API (Hachées en base)
- [x] Double Authentification (2FA / TOTP)
- [x] Limitation de débit (Rate Limiting / Throttler)
- [x] Validation stricte des entrées (DTOs + ValidationPipe)
- [x] Protection contre les vulnérabilités OWASP (Helmet, CORS)
## 3. ⚖️ Conformité RGPD (EU & France)
- [x] Chiffrement natif des données personnelles (PII) via PGP (pgcrypto)
- [x] Hachage aveugle (Blind Indexing) pour l'email (recherche/unicité)
- [x] Journalisation d'audit complète (Audit Logs) pour les actions sensibles
- [x] Gestion du consentement (Versionnage CGU/Politique de Confidentialité)
- [x] Droit à l'effacement : Flux de suppression (Soft Delete -> Purge définitive)
- [x] Droit à la portabilité : Export des données utilisateur (JSON)
- [x] Purge automatique des données obsolètes (Signalements, Sessions expirées)
- [x] Anonymisation des adresses IP (Hachage) dans les logs
## 4. 🖼️ Fonctionnalités Coeur (Media & Galerie)
- [x] Exploration (Trends, Recent, Favoris)
- [x] Recherche par Tags, Catégories, Auteur, Texte
- [x] Gestion des Favoris
- [x] Upload sécurisé via S3 (URLs présignées)
- [x] Scan Antivirus (ClamAV) et traitement des médias (WebP, WebM, AVIF, AV1)
- [x] Limitation de la taille et des formats de fichiers entrants (Configurable)
- [x] Système de Signalement (Reports) et workflow de modération
- [ ] SEO : Metatags dynamiques et slugs sémantiques
## 5. ✅ Qualité & Robustesse
- [ ] Couverture de tests unitaires (Jest) > 80%
- [ ] Tests d'intégration et E2E
- [x] Gestion centralisée des erreurs (Filters NestJS)
- [ ] Monitoring et centralisation des logs (ex: Sentry, ELK/Loki)
- [ ] Performance : Cache (Redis) pour les tendances et recherches fréquentes

View File

@@ -1,5 +1,4 @@
# syntax=docker/dockerfile:1
FROM node:22-alpine AS base
FROM node:22-slim AS base
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable && corepack prepare pnpm@latest --activate
@@ -10,17 +9,10 @@ COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./
COPY backend/package.json ./backend/
COPY frontend/package.json ./frontend/
COPY documentation/package.json ./documentation/
# Utilisation du cache pour pnpm et installation figée
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
RUN pnpm install --no-frozen-lockfile
COPY . .
# Deuxième passe avec cache pour les scripts/liens
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
# On réinstalle après COPY pour s'assurer que tous les scripts de cycle de vie et les liens sont corrects
RUN pnpm install --no-frozen-lockfile
RUN pnpm run --filter @memegoat/backend build
RUN pnpm deploy --filter=@memegoat/backend --prod --legacy /app
RUN cp -r backend/dist /app/dist

View File

@@ -1,6 +1,6 @@
{
"name": "@memegoat/backend",
"version": "0.0.0",
"version": "0.0.1",
"description": "",
"author": "",
"private": true,

View File

@@ -12,7 +12,6 @@ import { AuthModule } from "./auth/auth.module";
import { CategoriesModule } from "./categories/categories.module";
import { CommonModule } from "./common/common.module";
import { CrawlerDetectionMiddleware } from "./common/middlewares/crawler-detection.middleware";
import { HTTPLoggerMiddleware } from "./common/middlewares/http-logger.middleware";
import { validateEnv } from "./config/env.schema";
import { ContentsModule } from "./contents/contents.module";
import { CryptoModule } from "./crypto/crypto.module";
@@ -77,8 +76,6 @@ import { UsersModule } from "./users/users.module";
})
export class AppModule implements NestModule {
configure(consumer: MiddlewareConsumer) {
consumer
.apply(HTTPLoggerMiddleware, CrawlerDetectionMiddleware)
.forRoutes("*");
consumer.apply(CrawlerDetectionMiddleware).forRoutes("*");
}
}

View File

@@ -110,7 +110,6 @@ export class AuthService {
const user = await this.usersService.findByEmailHash(emailHash);
if (!user) {
this.logger.warn(`Login failed: user not found for email hash`);
throw new UnauthorizedException("Invalid credentials");
}
@@ -120,12 +119,10 @@ export class AuthService {
);
if (!isPasswordValid) {
this.logger.warn(`Login failed: invalid password for user ${user.uuid}`);
throw new UnauthorizedException("Invalid credentials");
}
if (user.isTwoFactorEnabled) {
this.logger.log(`2FA required for user ${user.uuid}`);
return {
message: "2FA required",
requires2FA: true,
@@ -144,7 +141,6 @@ export class AuthService {
ip,
);
this.logger.log(`User ${user.uuid} logged in successfully`);
return {
message: "User logged in successfully",
access_token: accessToken,
@@ -169,9 +165,6 @@ export class AuthService {
const isValid = authenticator.verify({ token, secret });
if (!isValid) {
this.logger.warn(
`2FA verification failed for user ${userId}: invalid token`,
);
throw new UnauthorizedException("Invalid 2FA token");
}
@@ -186,7 +179,6 @@ export class AuthService {
ip,
);
this.logger.log(`User ${userId} logged in successfully via 2FA`);
return {
message: "User logged in successfully (2FA)",
access_token: accessToken,

View File

@@ -9,14 +9,6 @@ import {
import * as Sentry from "@sentry/nestjs";
import { Request, Response } from "express";
interface RequestWithUser extends Request {
user?: {
sub?: string;
username?: string;
id?: string;
};
}
@Catch()
export class AllExceptionsFilter implements ExceptionFilter {
private readonly logger = new Logger("ExceptionFilter");
@@ -24,7 +16,7 @@ export class AllExceptionsFilter implements ExceptionFilter {
catch(exception: unknown, host: ArgumentsHost) {
const ctx = host.switchToHttp();
const response = ctx.getResponse<Response>();
const request = ctx.getRequest<RequestWithUser>();
const request = ctx.getRequest<Request>();
const status =
exception instanceof HttpException
@@ -36,9 +28,6 @@ export class AllExceptionsFilter implements ExceptionFilter {
? exception.getResponse()
: "Internal server error";
const userId = request.user?.sub || request.user?.id;
const userPart = userId ? `[User: ${userId}] ` : "";
const errorResponse = {
statusCode: status,
timestamp: new Date().toISOString(),
@@ -53,12 +42,12 @@ export class AllExceptionsFilter implements ExceptionFilter {
if (status === HttpStatus.INTERNAL_SERVER_ERROR) {
Sentry.captureException(exception);
this.logger.error(
`${userPart}${request.method} ${request.url} - Error: ${exception instanceof Error ? exception.message : "Unknown error"}`,
`${request.method} ${request.url} - Error: ${exception instanceof Error ? exception.message : "Unknown error"}`,
exception instanceof Error ? exception.stack : "",
);
} else {
this.logger.warn(
`${userPart}${request.method} ${request.url} - Status: ${status} - Message: ${JSON.stringify(message)}`,
`${request.method} ${request.url} - Status: ${status} - Message: ${JSON.stringify(message)}`,
);
}

View File

@@ -33,6 +33,4 @@ export interface IStorageService {
sourceBucketName?: string,
destinationBucketName?: string,
): Promise<string>;
getPublicUrl(storageKey: string): string;
}

View File

@@ -1,37 +0,0 @@
import { createHash } from "node:crypto";
import { Injectable, Logger, NestMiddleware } from "@nestjs/common";
import { NextFunction, Request, Response } from "express";
@Injectable()
export class HTTPLoggerMiddleware implements NestMiddleware {
private readonly logger = new Logger("HTTP");
use(request: Request, response: Response, next: NextFunction): void {
const { method, originalUrl, ip } = request;
const userAgent = request.get("user-agent") || "";
const startTime = Date.now();
response.on("finish", () => {
const { statusCode } = response;
const contentLength = response.get("content-length");
const duration = Date.now() - startTime;
const hashedIp = createHash("sha256")
.update(ip as string)
.digest("hex");
const message = `${method} ${originalUrl} ${statusCode} ${contentLength || 0} - ${userAgent} ${hashedIp} +${duration}ms`;
if (statusCode >= 500) {
return this.logger.error(message);
}
if (statusCode >= 400) {
return this.logger.warn(message);
}
return this.logger.log(message);
});
next();
}
}

View File

@@ -33,7 +33,6 @@ export const envSchema = z.object({
MAIL_FROM: z.string().email(),
DOMAIN_NAME: z.string(),
API_URL: z.string().url().optional(),
// Sentry
SENTRY_DSN: z.string().optional(),

View File

@@ -30,7 +30,6 @@ describe("ContentsService", () => {
const mockS3Service = {
getUploadUrl: jest.fn(),
uploadFile: jest.fn(),
getPublicUrl: jest.fn(),
};
const mockMediaService = {

View File

@@ -100,7 +100,6 @@ export class ContentsService {
// 3. Upload vers S3
const key = `contents/${userId}/${Date.now()}-${uuidv4()}.${processed.extension}`;
await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType);
this.logger.log(`File uploaded successfully to S3: ${key}`);
// 4. Création en base de données
return await this.create(userId, {
@@ -129,11 +128,11 @@ export class ContentsService {
const processedData = data.map((content) => ({
...content,
url: this.s3Service.getPublicUrl(content.storageKey),
url: this.getFileUrl(content.storageKey),
author: {
...content.author,
avatarUrl: content.author?.avatarUrl
? this.s3Service.getPublicUrl(content.author.avatarUrl)
? this.getFileUrl(content.author.avatarUrl)
: null,
},
}));
@@ -190,18 +189,18 @@ export class ContentsService {
return {
...content,
url: this.s3Service.getPublicUrl(content.storageKey),
url: this.getFileUrl(content.storageKey),
author: {
...content.author,
avatarUrl: content.author?.avatarUrl
? this.s3Service.getPublicUrl(content.author.avatarUrl)
? this.getFileUrl(content.author.avatarUrl)
: null,
},
};
}
generateBotHtml(content: { title: string; storageKey: string }): string {
const imageUrl = this.s3Service.getPublicUrl(content.storageKey);
const imageUrl = this.getFileUrl(content.storageKey);
return `<!DOCTYPE html>
<html>
<head>
@@ -222,6 +221,19 @@ export class ContentsService {
</html>`;
}
getFileUrl(storageKey: string): string {
const endpoint = this.configService.get("S3_ENDPOINT");
const port = this.configService.get("S3_PORT");
const protocol =
this.configService.get("S3_USE_SSL") === true ? "https" : "http";
const bucket = this.configService.get("S3_BUCKET_NAME");
if (endpoint === "localhost" || endpoint === "127.0.0.1") {
return `${protocol}://${endpoint}:${port}/${bucket}/${storageKey}`;
}
return `${protocol}://${endpoint}/${bucket}/${storageKey}`;
}
private generateSlug(text: string): string {
return text
.toLowerCase()

View File

@@ -1,61 +0,0 @@
import { Readable } from "node:stream";
import { NotFoundException } from "@nestjs/common";
import { Test, TestingModule } from "@nestjs/testing";
import type { Response } from "express";
import { S3Service } from "../s3/s3.service";
import { MediaController } from "./media.controller";
describe("MediaController", () => {
let controller: MediaController;
const mockS3Service = {
getFileInfo: jest.fn(),
getFile: jest.fn(),
};
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
controllers: [MediaController],
providers: [{ provide: S3Service, useValue: mockS3Service }],
}).compile();
controller = module.get<MediaController>(MediaController);
});
it("should be defined", () => {
expect(controller).toBeDefined();
});
describe("getFile", () => {
it("should stream the file and set headers with path containing slashes", async () => {
const res = {
setHeader: jest.fn(),
} as unknown as Response;
const stream = new Readable();
stream.pipe = jest.fn();
const key = "contents/user-id/test.webp";
mockS3Service.getFileInfo.mockResolvedValue({
size: 100,
metaData: { "content-type": "image/webp" },
});
mockS3Service.getFile.mockResolvedValue(stream);
await controller.getFile(key, res);
expect(mockS3Service.getFileInfo).toHaveBeenCalledWith(key);
expect(res.setHeader).toHaveBeenCalledWith("Content-Type", "image/webp");
expect(res.setHeader).toHaveBeenCalledWith("Content-Length", 100);
expect(stream.pipe).toHaveBeenCalledWith(res);
});
it("should throw NotFoundException if file is not found", async () => {
mockS3Service.getFileInfo.mockRejectedValue(new Error("Not found"));
const res = {} as unknown as Response;
await expect(controller.getFile("invalid", res)).rejects.toThrow(
NotFoundException,
);
});
});
});

View File

@@ -1,30 +0,0 @@
import { Controller, Get, NotFoundException, Param, Res } from "@nestjs/common";
import type { Response } from "express";
import type { BucketItemStat } from "minio";
import { S3Service } from "../s3/s3.service";
@Controller("media")
export class MediaController {
constructor(private readonly s3Service: S3Service) {}
@Get("*key")
async getFile(@Param("key") key: string, @Res() res: Response) {
try {
const stats = (await this.s3Service.getFileInfo(key)) as BucketItemStat;
const stream = await this.s3Service.getFile(key);
const contentType =
stats.metaData?.["content-type"] ||
stats.metadata?.["content-type"] ||
"application/octet-stream";
res.setHeader("Content-Type", contentType);
res.setHeader("Content-Length", stats.size);
res.setHeader("Cache-Control", "public, max-age=31536000, immutable");
stream.pipe(res);
} catch (_error) {
throw new NotFoundException("Fichier non trouvé");
}
}
}

View File

@@ -1,13 +1,9 @@
import { Module } from "@nestjs/common";
import { S3Module } from "../s3/s3.module";
import { MediaController } from "./media.controller";
import { MediaService } from "./media.service";
import { ImageProcessorStrategy } from "./strategies/image-processor.strategy";
import { VideoProcessorStrategy } from "./strategies/video-processor.strategy";
@Module({
imports: [S3Module],
controllers: [MediaController],
providers: [MediaService, ImageProcessorStrategy, VideoProcessorStrategy],
exports: [MediaService],
})

View File

@@ -7,7 +7,7 @@ jest.mock("minio");
describe("S3Service", () => {
let service: S3Service;
let configService: ConfigService;
let _configService: ConfigService;
// biome-ignore lint/suspicious/noExplicitAny: Fine for testing purposes
let minioClient: any;
@@ -42,7 +42,7 @@ describe("S3Service", () => {
}).compile();
service = module.get<S3Service>(S3Service);
configService = module.get<ConfigService>(ConfigService);
_configService = module.get<ConfigService>(ConfigService);
});
it("should be defined", () => {
@@ -185,39 +185,35 @@ describe("S3Service", () => {
});
});
describe("getPublicUrl", () => {
it("should use API_URL if provided", () => {
(configService.get as jest.Mock).mockImplementation((key: string) => {
if (key === "API_URL") return "https://api.test.com";
return null;
});
const url = service.getPublicUrl("test.webp");
expect(url).toBe("https://api.test.com/media/test.webp");
describe("moveFile", () => {
it("should move file within default bucket", async () => {
const source = "source.txt";
const dest = "dest.txt";
await service.moveFile(source, dest);
expect(minioClient.copyObject).toHaveBeenCalledWith(
"memegoat",
dest,
"/memegoat/source.txt",
expect.any(Minio.CopyConditions),
);
expect(minioClient.removeObject).toHaveBeenCalledWith("memegoat", source);
});
it("should use DOMAIN_NAME and PORT for localhost", () => {
(configService.get as jest.Mock).mockImplementation(
(key: string, def: unknown) => {
if (key === "API_URL") return null;
if (key === "DOMAIN_NAME") return "localhost";
if (key === "PORT") return 3000;
return def;
},
);
const url = service.getPublicUrl("test.webp");
expect(url).toBe("http://localhost:3000/media/test.webp");
});
it("should move file between different buckets", async () => {
const source = "source.txt";
const dest = "dest.txt";
const sBucket = "source-bucket";
const dBucket = "dest-bucket";
await service.moveFile(source, dest, sBucket, dBucket);
it("should use api.DOMAIN_NAME for production", () => {
(configService.get as jest.Mock).mockImplementation(
(key: string, def: unknown) => {
if (key === "API_URL") return null;
if (key === "DOMAIN_NAME") return "memegoat.fr";
return def;
},
expect(minioClient.copyObject).toHaveBeenCalledWith(
dBucket,
dest,
`/${sBucket}/${source}`,
expect.any(Minio.CopyConditions),
);
const url = service.getPublicUrl("test.webp");
expect(url).toBe("https://api.memegoat.fr/media/test.webp");
expect(minioClient.removeObject).toHaveBeenCalledWith(sBucket, source);
});
});
});

View File

@@ -54,7 +54,6 @@ export class S3Service implements OnModuleInit, IStorageService {
...metaData,
"Content-Type": mimeType,
});
this.logger.log(`File uploaded successfully: ${fileName} to ${bucketName}`);
return fileName;
} catch (error) {
this.logger.error(`Error uploading file to ${bucketName}: ${error.message}`);
@@ -114,7 +113,6 @@ export class S3Service implements OnModuleInit, IStorageService {
async deleteFile(fileName: string, bucketName: string = this.bucketName) {
try {
await this.minioClient.removeObject(bucketName, fileName);
this.logger.log(`File deleted successfully: ${fileName} from ${bucketName}`);
} catch (error) {
this.logger.error(
`Error deleting file from ${bucketName}: ${error.message}`,
@@ -157,22 +155,4 @@ export class S3Service implements OnModuleInit, IStorageService {
throw error;
}
}
getPublicUrl(storageKey: string): string {
const apiUrl = this.configService.get<string>("API_URL");
const domain = this.configService.get<string>("DOMAIN_NAME", "localhost");
const port = this.configService.get<number>("PORT", 3000);
let baseUrl: string;
if (apiUrl) {
baseUrl = apiUrl.replace(/\/$/, "");
} else if (domain === "localhost" || domain === "127.0.0.1") {
baseUrl = `http://${domain}:${port}`;
} else {
baseUrl = `https://api.${domain}`;
}
return `${baseUrl}/media/${storageKey}`;
}
}

View File

@@ -58,7 +58,6 @@ describe("UsersService", () => {
const mockS3Service = {
uploadFile: jest.fn(),
getPublicUrl: jest.fn(),
};
const mockConfigService = {

View File

@@ -6,6 +6,7 @@ import {
Injectable,
Logger,
} from "@nestjs/common";
import { ConfigService } from "@nestjs/config";
import type { Cache } from "cache-manager";
import { v4 as uuidv4 } from "uuid";
import { RbacService } from "../auth/rbac.service";
@@ -27,6 +28,7 @@ export class UsersService {
private readonly rbacService: RbacService,
@Inject(MediaService) private readonly mediaService: IMediaService,
@Inject(S3Service) private readonly s3Service: IStorageService,
private readonly configService: ConfigService,
) {}
private async clearUserCache(username?: string) {
@@ -58,9 +60,7 @@ export class UsersService {
return {
...user,
avatarUrl: user.avatarUrl
? this.s3Service.getPublicUrl(user.avatarUrl)
: null,
avatarUrl: user.avatarUrl ? this.getFileUrl(user.avatarUrl) : null,
role: roles.includes("admin") ? "admin" : "user",
roles,
};
@@ -74,9 +74,7 @@ export class UsersService {
const processedData = data.map((user) => ({
...user,
avatarUrl: user.avatarUrl
? this.s3Service.getPublicUrl(user.avatarUrl)
: null,
avatarUrl: user.avatarUrl ? this.getFileUrl(user.avatarUrl) : null,
}));
return { data: processedData, totalCount };
@@ -88,9 +86,7 @@ export class UsersService {
return {
...user,
avatarUrl: user.avatarUrl
? this.s3Service.getPublicUrl(user.avatarUrl)
: null,
avatarUrl: user.avatarUrl ? this.getFileUrl(user.avatarUrl) : null,
};
}
@@ -143,7 +139,6 @@ export class UsersService {
// 3. Upload vers S3
const key = `avatars/${uuid}/${Date.now()}-${uuidv4()}.${processed.extension}`;
await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType);
this.logger.log(`Avatar uploaded successfully to S3: ${key}`);
// 4. Mise à jour de la base de données
const user = await this.update(uuid, { avatarUrl: key });
@@ -198,4 +193,17 @@ export class UsersService {
async remove(uuid: string) {
return await this.usersRepository.softDeleteUserAndContents(uuid);
}
private getFileUrl(storageKey: string): string {
const endpoint = this.configService.get("S3_ENDPOINT");
const port = this.configService.get("S3_PORT");
const protocol =
this.configService.get("S3_USE_SSL") === true ? "https" : "http";
const bucket = this.configService.get("S3_BUCKET_NAME");
if (endpoint === "localhost" || endpoint === "127.0.0.1") {
return `${protocol}://${endpoint}:${port}/${bucket}/${storageKey}`;
}
return `${protocol}://${endpoint}/${bucket}/${storageKey}`;
}
}

View File

@@ -1,4 +1,4 @@
# syntax=docker/dockerfile:1
# syntax=docker.io/docker/dockerfile:1
FROM node:22-alpine AS base
ENV PNPM_HOME="/pnpm"
@@ -11,20 +11,11 @@ COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./
COPY backend/package.json ./backend/
COPY frontend/package.json ./frontend/
COPY documentation/package.json ./documentation/
# Montage du cache pnpm
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
RUN pnpm install --no-frozen-lockfile
COPY . .
# Deuxième passe avec cache pour les scripts/liens
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
# Build avec cache Next.js
RUN --mount=type=cache,id=next-docs-cache,target=/usr/src/app/documentation/.next/cache \
pnpm run --filter @memegoat/documentation build
# On réinstalle après COPY pour s'assurer que tous les scripts de cycle de vie et les liens sont corrects
RUN pnpm install --no-frozen-lockfile
RUN pnpm run --filter @memegoat/documentation build
FROM node:22-alpine AS runner
WORKDIR /app

View File

@@ -1,4 +1,4 @@
# syntax=docker/dockerfile:1
# syntax=docker.io/docker/dockerfile:1
FROM node:22-alpine AS base
ENV PNPM_HOME="/pnpm"
@@ -11,20 +11,11 @@ COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./
COPY backend/package.json ./backend/
COPY frontend/package.json ./frontend/
COPY documentation/package.json ./documentation/
# Montage du cache pnpm
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
RUN pnpm install --no-frozen-lockfile
COPY . .
# Deuxième passe avec cache pour les scripts/liens
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
# Build avec cache Next.js
RUN --mount=type=cache,id=next-cache,target=/usr/src/app/frontend/.next/cache \
pnpm run --filter @memegoat/frontend build
# On réinstalle après COPY pour s'assurer que tous les scripts de cycle de vie et les liens sont corrects
RUN pnpm install --no-frozen-lockfile
RUN pnpm run --filter @memegoat/frontend build
FROM node:22-alpine AS runner
WORKDIR /app

View File

@@ -1,6 +1,6 @@
{
"name": "@memegoat/frontend",
"version": "0.0.0",
"version": "0.0.1",
"private": true,
"scripts": {
"dev": "next dev",

View File

@@ -63,7 +63,7 @@ export default function HelpPage() {
<p className="text-muted-foreground">
N'hésitez pas à nous contacter sur nos réseaux sociaux ou par email.
</p>
<p className="font-semibold text-primary">contact@memegoat.fr</p>
<p className="font-semibold text-primary">contact@memegoat.local</p>
</div>
</div>
);

View File

@@ -228,7 +228,7 @@ export function AppSidebar() {
<span className="truncate font-semibold">
{user.displayName || user.username}
</span>
<span className="truncate text-xs">{user.role}</span>
<span className="truncate text-xs">{user.email}</span>
</div>
<ChevronRight className="ml-auto size-4 group-data-[collapsible=icon]:hidden" />
</SidebarMenuButton>

View File

@@ -1,30 +0,0 @@
Réalisation du frontend :
# Exigences
- Responsive dans tout les formats tailwindcss
- Accessibilité A11Y
- Implémentation réel uniquement
- Site en français
- SEO parfaitement réalisé, robot.txt, sitemap.xml...
- Utilisation des composants shadcn/ui
- Réalisation d'une page d'erreur customisé
- Utilisation des fonctionalités de NextJS suivantes :
- Nested routes
- Dynamic routes
- Route groups
- Private folders
- Parralel and intercepted routes
- Prefetching pages
- Streaming pages
- Server and Client Components
- Cache Components
- Image optimization
- Incremental Static Regeneration
- Custom hooks
- Axios
Toute l'application est basé sur un système dashboard/sidebar intégrant le routing.
La page principale est la page de navigation du contennu.
En mode desktop nous retrouvons la sidebar à gauche, le contennu en scroll infini au milieu et les paramètres de recherche sur la droite.
En mode mobile la sidebar est replié, les paramètres de recherche sont représenté comme une icône de filtrage flotante en haut à droite

View File

@@ -1,10 +1,8 @@
{
"name": "@memegoat/source",
"version": "0.0.0",
"version": "0.0.1",
"description": "",
"scripts": {
"version:get": "cmake -P version.cmake GET",
"version:set": "cmake -P version.cmake SET",
"build": "pnpm run build:back && pnpm run build:front && pnpm run build:docs",
"build:front": "pnpm run -F @memegoat/frontend build",
"build:back": "pnpm run -F @memegoat/backend build",

View File

@@ -1,84 +0,0 @@
# version.cmake - Script pour gérer la version SemVer de manière centralisée
# Usage: cmake -P version.cmake [GET|SET] [new_version]
set(PACKAGE_JSON_FILES
"${CMAKE_CURRENT_LIST_DIR}/package.json"
"${CMAKE_CURRENT_LIST_DIR}/backend/package.json"
"${CMAKE_CURRENT_LIST_DIR}/frontend/package.json"
)
# Fonction pour lire la version depuis le package.json racine
function(get_current_version OUT_VAR)
file(READ "${CMAKE_CURRENT_LIST_DIR}/package.json" ROOT_JSON)
string(JSON CURRENT_VERSION GET "${ROOT_JSON}" "version")
set(${OUT_VAR} ${CURRENT_VERSION} PARENT_SCOPE)
endfunction()
# Fonction pour créer un tag git
function(create_git_tag VERSION)
find_package(Git QUIET)
if(GIT_FOUND)
execute_process(
COMMAND ${GIT_EXECUTABLE} tag -a "v${VERSION}" -m "Release v${VERSION}"
WORKING_DIRECTORY "${CMAKE_CURRENT_LIST_DIR}"
RESULT_VARIABLE TAG_RESULT
)
if(TAG_RESULT EQUAL 0)
message(STATUS "Tag v${VERSION} créé avec succès")
else()
message(WARNING "Échec de la création du tag v${VERSION}. Il existe peut-être déjà.")
endif()
else()
message(WARNING "Git non trouvé, impossible de créer le tag.")
endif()
endfunction()
# Fonction pour mettre à jour la version dans tous les fichiers package.json
function(set_new_version NEW_VERSION)
foreach(JSON_FILE ${PACKAGE_JSON_FILES})
if(EXISTS "${JSON_FILE}")
message(STATUS "Mise à jour de ${JSON_FILE} vers la version ${NEW_VERSION}")
file(READ "${JSON_FILE}" CONTENT)
# Utilisation de string(JSON ...) pour modifier la version si disponible (CMake >= 3.19)
# Sinon on peut utiliser une regex simple pour package.json
string(REGEX REPLACE "\"version\": \"[^\"]+\"" "\"version\": \"${NEW_VERSION}\"" NEW_CONTENT "${CONTENT}")
file(WRITE "${JSON_FILE}" "${NEW_CONTENT}")
else()
message(WARNING "Fichier non trouvé: ${JSON_FILE}")
endif()
endforeach()
# Demander à l'utilisateur s'il veut tagger (ou le faire par défaut si spécifié)
create_git_tag(${NEW_VERSION})
endfunction()
# Logique principale
set(ARG_OFFSET 0)
while(ARG_OFFSET LESS CMAKE_ARGC)
if("${CMAKE_ARGV${ARG_OFFSET}}" STREQUAL "-P")
math(EXPR COMMAND_INDEX "${ARG_OFFSET} + 2")
math(EXPR VERSION_INDEX "${ARG_OFFSET} + 3")
break()
endif()
math(EXPR ARG_OFFSET "${ARG_OFFSET} + 1")
endwhile()
if(NOT DEFINED COMMAND_INDEX OR COMMAND_INDEX GREATER_EQUAL CMAKE_ARGC)
message(FATAL_ERROR "Usage: cmake -P version.cmake [GET|SET] [new_version]")
endif()
set(COMMAND "${CMAKE_ARGV${COMMAND_INDEX}}")
if("${COMMAND}" STREQUAL "GET")
get_current_version(VERSION)
message("${VERSION}")
elseif("${COMMAND}" STREQUAL "SET")
if(VERSION_INDEX GREATER_EQUAL CMAKE_ARGC)
message(FATAL_ERROR "Veuillez spécifier la nouvelle version: cmake -P version.cmake SET 0.0.0")
endif()
set(NEW_VERSION "${CMAKE_ARGV${VERSION_INDEX}}")
set_new_version("${NEW_VERSION}")
else()
message(FATAL_ERROR "Commande inconnue: ${COMMAND}. Utilisez GET ou SET.")
endif()