Compare commits

..

1 Commits

Author SHA1 Message Date
a1c48bb792 Merge pull request 'refactor(modules): mark DatabaseModule and CryptoModule as global and remove redundant imports' (#10) from dev into prod
Some checks failed
Backend Tests / test (push) Has been cancelled
Lint / lint (push) Has been cancelled
Deploy to Production / deploy (push) Successful in 6m10s
Reviewed-on: #10
2026-01-14 22:52:03 +01:00
30 changed files with 129 additions and 737 deletions

View File

@@ -1,12 +1,8 @@
name: Backend Tests name: Backend Tests
on: on:
push: push:
paths: paths:
- 'backend/**' - 'backend/**'
pull_request:
paths:
- 'backend/**'
jobs: jobs:
test: test:
@@ -18,19 +14,9 @@ jobs:
version: 9 version: 9
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version: 20 node-version: 22
- name: Get pnpm store directory cache: 'pnpm'
id: pnpm-cache
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> "${GITEA_OUTPUT:-$GITHUB_OUTPUT}"
- uses: actions/cache@v4
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies - name: Install dependencies
run: pnpm install --frozen-lockfile --prefer-offline run: pnpm install
- name: Run Backend Tests - name: Run Backend Tests
run: pnpm -F @memegoat/backend test run: pnpm -F @memegoat/backend test

View File

@@ -1,63 +1,61 @@
name: Deploy to Production name: Deploy to Production
on: on:
push: push:
branches: branches:
- main - prod
jobs: jobs:
validate: deploy:
name: Validate Build & Lint
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
matrix:
component: [backend, frontend, documentation]
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v4 uses: actions/checkout@v3
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 9
- name: Setup Node.js - name: Setup Node.js
uses: actions/setup-node@v4 uses: actions/setup-node@v3
with: with:
node-version: 20 node-version: 20
- name: Install pnpm
uses: pnpm/action-setup@v2
with:
version: 8
- name: Get pnpm store directory - name: Get pnpm store directory
id: pnpm-cache
shell: bash shell: bash
run: | run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> "${GITEA_OUTPUT:-$GITHUB_OUTPUT}" echo "STORE_PATH=$(pnpm store path --silent)" >> $GITEA_ENV
- name: Setup pnpm cache - name: Setup pnpm cache
uses: actions/cache@v4 uses: actions/cache@v3
with: with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: | restore-keys: |
${{ runner.os }}-pnpm-store- ${{ runner.os }}-pnpm-store-
- name: Install dependencies - name: Install dependencies
run: pnpm install --frozen-lockfile --prefer-offline run: pnpm install
- name: Lint ${{ matrix.component }} - name: Lint - Backend
run: pnpm -F @memegoat/${{ matrix.component }} lint run: pnpm run lint:back
- name: Build ${{ matrix.component }} - name: Build - Backend
run: pnpm -F @memegoat/${{ matrix.component }} build run: pnpm run build:back
env: env:
NEXT_PUBLIC_API_URL: ${{ secrets.NEXT_PUBLIC_API_URL }} NEXT_PUBLIC_API_URL: ${{ secrets.NEXT_PUBLIC_API_URL }}
deploy: - name: Lint - Frontend
name: Deploy to Production run: pnpm run lint:front
needs: validate
runs-on: ubuntu-latest - name: Build - Frontend
steps: run: pnpm run build:front
- name: Checkout code
uses: actions/checkout@v4 - name: Lint - Documentation
run: pnpm run lint:docs
- name: Build - Documentation
run: pnpm run build:docs
- name: Deploy with Docker Compose - name: Deploy with Docker Compose
run: | run: |

View File

@@ -1,23 +1,14 @@
name: Lint name: Lint
on: on:
push: push:
paths: paths:
- 'frontend/**' - 'frontend/**'
- 'backend/**' - 'backend/**'
- 'documentation/**' - 'documentation/**'
pull_request:
paths:
- 'frontend/**'
- 'backend/**'
- 'documentation/**'
jobs: jobs:
lint: lint:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
matrix:
component: [backend, frontend, documentation]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: pnpm/action-setup@v4 - uses: pnpm/action-setup@v4
@@ -25,19 +16,16 @@ jobs:
version: 9 version: 9
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version: 20 node-version: 22
- name: Get pnpm store directory cache: 'pnpm'
id: pnpm-cache
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> "${GITEA_OUTPUT:-$GITHUB_OUTPUT}"
- uses: actions/cache@v4
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies - name: Install dependencies
run: pnpm install --frozen-lockfile --prefer-offline run: pnpm install
- name: Lint ${{ matrix.component }} - name: Lint Frontend
run: pnpm -F @memegoat/${{ matrix.component }} lint if: success() || failure()
run: pnpm -F @memegoat/frontend lint
- name: Lint Backend
if: success() || failure()
run: pnpm -F @memegoat/backend lint
- name: Lint Documentation
if: success() || failure()
run: pnpm -F @bypass/documentation lint

View File

@@ -1,225 +0,0 @@
{
"name": "@memegoat/source",
"version": "0.0.1",
"description": "",
"scripts": {
"build": "pnpm run build:back && pnpm run build:front && pnpm run build:docs",
"build:front": "pnpm run -F @memegoat/frontend build",
"build:back": "pnpm run -F @memegoat/backend build",
"build:docs": "pnpm run -F @memegoat/documentation build",
"lint": "pnpm run lint:back && pnpm run lint:front && pnpm run lint:docs",
"lint:back": "pnpm run -F @memegoat/backend lint",
"lint:front": "pnpm run -F @memegoat/frontend lint",
"lint:docs": "pnpm run -F @memegoat/documentation lint",
"test": "pnpm run test:back && pnpm run test:front",
"test:back": "pnpm run -F @memegoat/backend test",
"test:front": "pnpm run -F @memegoat/frontend test",
"format": "pnpm run format:back && pnpm run format:front && pnpm run format:docs",
"format:back": "pnpm run -F @memegoat/backend format",
"format:front": "pnpm run -F @memegoat/frontend format",
"format:docs": "pnpm run -F @memegoat/documentation format",
"upgrade": "pnpm dlx taze minor"
},
"keywords": [],
"author": {
"name": "Mathis HERRIOT",
"email": "mherriot.pro@proton.me",
"role": "Author"
},
"license": "AGPL-3.0-only",
"devDependencies": {
"@biomejs/biome": "2.3.11"
}
}
{
"name": "@memegoat/backend",
"version": "0.0.1",
"description": "",
"author": "",
"private": true,
"license": "UNLICENSED",
"files": [
"dist",
".migrations",
"drizzle.config.ts"
],
"scripts": {
"build": "nest build",
"lint": "biome check",
"lint:write": "biome check --write",
"format": "biome format --write",
"start": "nest start",
"start:dev": "nest start --watch",
"start:debug": "nest start --debug --watch",
"start:prod": "node dist/main",
"test": "jest",
"test:watch": "jest --watch",
"test:cov": "jest --coverage",
"test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand",
"test:e2e": "jest --config ./test/jest-e2e.json",
"db:generate": "drizzle-kit generate",
"db:migrate": "drizzle-kit migrate",
"db:studio": "drizzle-kit studio"
},
"dependencies": {
"@nestjs-modules/mailer": "^2.0.2",
"@nestjs/cache-manager": "^3.1.0",
"@nestjs/common": "^11.0.1",
"@nestjs/config": "^4.0.2",
"@nestjs/core": "^11.0.1",
"@nestjs/mapped-types": "^2.1.0",
"@nestjs/platform-express": "^11.0.1",
"@nestjs/schedule": "^6.1.0",
"@nestjs/throttler": "^6.5.0",
"@noble/post-quantum": "^0.5.4",
"@node-rs/argon2": "^2.0.2",
"@sentry/nestjs": "^10.32.1",
"@sentry/profiling-node": "^10.32.1",
"cache-manager": "^7.2.7",
"cache-manager-redis-yet": "^5.1.5",
"clamscan": "^2.4.0",
"class-transformer": "^0.5.1",
"class-validator": "^0.14.3",
"dotenv": "^17.2.3",
"drizzle-orm": "^0.45.1",
"fluent-ffmpeg": "^2.1.3",
"helmet": "^8.1.0",
"iron-session": "^8.0.4",
"jose": "^6.1.3",
"minio": "^8.0.6",
"nodemailer": "^7.0.12",
"otplib": "^12.0.1",
"pg": "^8.16.3",
"qrcode": "^1.5.4",
"reflect-metadata": "^0.2.2",
"rxjs": "^7.8.1",
"sharp": "^0.34.5",
"uuid": "^13.0.0",
"zod": "^4.3.5",
"drizzle-kit": "^0.31.8"
},
"devDependencies": {
"@nestjs/cli": "^11.0.0",
"globals": "^16.0.0",
"jest": "^30.0.0",
"source-map-support": "^0.5.21",
"supertest": "^7.0.0",
"ts-jest": "^29.2.5",
"ts-loader": "^9.5.2",
"ts-node": "^10.9.2",
"tsconfig-paths": "^4.2.0",
"tsx": "^4.21.0",
"typescript": "^5.7.3",
"typescript-eslint": "^8.20.0",
"@nestjs/schematics": "^11.0.0",
"@nestjs/testing": "^11.0.1",
"@types/express": "^5.0.0",
"@types/fluent-ffmpeg": "^2.1.28",
"@types/jest": "^30.0.0",
"@types/multer": "^2.0.0",
"@types/node": "^22.10.7",
"@types/nodemailer": "^7.0.4",
"@types/pg": "^8.16.0",
"@types/qrcode": "^1.5.6",
"@types/sharp": "^0.32.0",
"@types/supertest": "^6.0.2",
"@types/uuid": "^11.0.0",
"drizzle-kit": "^0.31.8"
},
"jest": {
"moduleFileExtensions": [
"js",
"json",
"ts"
],
"rootDir": "src",
"testRegex": ".*\\.spec\\.ts$",
"collectCoverageFrom": [
"**/*.(t|j)s"
],
"coverageDirectory": "../coverage",
"testEnvironment": "node",
"transformIgnorePatterns": [
"node_modules/(?!(.pnpm/)?(jose|@noble|uuid)/)"
],
"transform": {
"^.+\\.(t|j)sx?$": "ts-jest"
},
"moduleNameMapper": {
"^@noble/post-quantum/(.*)$": "<rootDir>/../node_modules/@noble/post-quantum/$1",
"^@noble/hashes/(.*)$": "<rootDir>/../node_modules/@noble/hashes/$1"
}
}
}
{
"name": "@memegoat/frontend",
"version": "0.0.1",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "biome check",
"format": "biome format --write"
},
"dependencies": {
"@hookform/resolvers": "^5.2.2",
"@radix-ui/react-accordion": "^1.2.12",
"@radix-ui/react-alert-dialog": "^1.1.15",
"@radix-ui/react-aspect-ratio": "^1.1.8",
"@radix-ui/react-avatar": "^1.1.11",
"@radix-ui/react-checkbox": "^1.3.3",
"@radix-ui/react-collapsible": "^1.1.12",
"@radix-ui/react-context-menu": "^2.2.16",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-dropdown-menu": "^2.1.16",
"@radix-ui/react-hover-card": "^1.1.15",
"@radix-ui/react-label": "^2.1.8",
"@radix-ui/react-menubar": "^1.1.16",
"@radix-ui/react-navigation-menu": "^1.2.14",
"@radix-ui/react-popover": "^1.1.15",
"@radix-ui/react-progress": "^1.1.8",
"@radix-ui/react-radio-group": "^1.3.8",
"@radix-ui/react-scroll-area": "^1.2.10",
"@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-separator": "^1.1.8",
"@radix-ui/react-slider": "^1.3.6",
"@radix-ui/react-slot": "^1.2.4",
"@radix-ui/react-switch": "^1.2.6",
"@radix-ui/react-tabs": "^1.1.13",
"@radix-ui/react-toggle": "^1.1.10",
"@radix-ui/react-toggle-group": "^1.1.11",
"@radix-ui/react-tooltip": "^1.2.8",
"axios": "^1.13.2",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"cmdk": "^1.1.1",
"date-fns": "^4.1.0",
"embla-carousel-react": "^8.6.0",
"input-otp": "^1.4.2",
"lucide-react": "^0.562.0",
"next": "16.1.1",
"next-themes": "^0.4.6",
"react": "19.2.3",
"react-day-picker": "^9.13.0",
"react-dom": "19.2.3",
"react-hook-form": "^7.71.1",
"react-resizable-panels": "^4.4.1",
"recharts": "2.15.4",
"sonner": "^2.0.7",
"tailwind-merge": "^3.4.0",
"vaul": "^1.1.2",
"zod": "^4.3.5"
},
"devDependencies": {
"@biomejs/biome": "2.3.11",
"@tailwindcss/postcss": "^4",
"@types/node": "^20",
"@types/react": "^19",
"@types/react-dom": "^19",
"babel-plugin-react-compiler": "1.0.0",
"tailwindcss": "^4",
"tw-animate-css": "^1.4.0",
"typescript": "^5"
}
}

View File

@@ -1,50 +0,0 @@
# 🐐 Memegoat - Roadmap & Critères de Production
Ce document définit les objectifs, les critères techniques et les fonctionnalités à atteindre pour que le projet Memegoat soit considéré comme prêt pour la production et conforme aux normes européennes (RGPD) et françaises.
## 1. 🏗️ Architecture & Infrastructure
- [x] Backend NestJS (TypeScript)
- [x] Base de données PostgreSQL avec Drizzle ORM
- [x] Stockage d'objets compatible S3 (MinIO)
- [x] Service d'Emailing (Nodemailer / SMTPS)
- [x] Documentation Technique & Référence API (`docs.memegoat.fr`)
- [x] Health Checks (`/health`)
- [x] Gestion des variables d'environnement (Validation avec Zod)
- [ ] CI/CD (Build, Lint, Test, Deploy)
## 2. 🔐 Sécurité & Authentification
- [x] Hachage des mots de passe (Argon2id)
- [x] Gestion des sessions robuste (JWT avec Refresh Token et Rotation)
- [x] RBAC (Role Based Access Control) fonctionnel
- [x] Système de Clés API (Hachées en base)
- [x] Double Authentification (2FA / TOTP)
- [x] Limitation de débit (Rate Limiting / Throttler)
- [x] Validation stricte des entrées (DTOs + ValidationPipe)
- [x] Protection contre les vulnérabilités OWASP (Helmet, CORS)
## 3. ⚖️ Conformité RGPD (EU & France)
- [x] Chiffrement natif des données personnelles (PII) via PGP (pgcrypto)
- [x] Hachage aveugle (Blind Indexing) pour l'email (recherche/unicité)
- [x] Journalisation d'audit complète (Audit Logs) pour les actions sensibles
- [x] Gestion du consentement (Versionnage CGU/Politique de Confidentialité)
- [x] Droit à l'effacement : Flux de suppression (Soft Delete -> Purge définitive)
- [x] Droit à la portabilité : Export des données utilisateur (JSON)
- [x] Purge automatique des données obsolètes (Signalements, Sessions expirées)
- [x] Anonymisation des adresses IP (Hachage) dans les logs
## 4. 🖼️ Fonctionnalités Coeur (Media & Galerie)
- [x] Exploration (Trends, Recent, Favoris)
- [x] Recherche par Tags, Catégories, Auteur, Texte
- [x] Gestion des Favoris
- [x] Upload sécurisé via S3 (URLs présignées)
- [x] Scan Antivirus (ClamAV) et traitement des médias (WebP, WebM, AVIF, AV1)
- [x] Limitation de la taille et des formats de fichiers entrants (Configurable)
- [x] Système de Signalement (Reports) et workflow de modération
- [ ] SEO : Metatags dynamiques et slugs sémantiques
## 5. ✅ Qualité & Robustesse
- [ ] Couverture de tests unitaires (Jest) > 80%
- [ ] Tests d'intégration et E2E
- [x] Gestion centralisée des erreurs (Filters NestJS)
- [ ] Monitoring et centralisation des logs (ex: Sentry, ELK/Loki)
- [ ] Performance : Cache (Redis) pour les tendances et recherches fréquentes

View File

@@ -1,5 +1,4 @@
# syntax=docker/dockerfile:1 FROM node:22-slim AS base
FROM node:22-alpine AS base
ENV PNPM_HOME="/pnpm" ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH" ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable && corepack prepare pnpm@latest --activate RUN corepack enable && corepack prepare pnpm@latest --activate
@@ -10,17 +9,10 @@ COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./
COPY backend/package.json ./backend/ COPY backend/package.json ./backend/
COPY frontend/package.json ./frontend/ COPY frontend/package.json ./frontend/
COPY documentation/package.json ./documentation/ COPY documentation/package.json ./documentation/
RUN pnpm install --no-frozen-lockfile
# Utilisation du cache pour pnpm et installation figée
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
COPY . . COPY . .
# On réinstalle après COPY pour s'assurer que tous les scripts de cycle de vie et les liens sont corrects
# Deuxième passe avec cache pour les scripts/liens RUN pnpm install --no-frozen-lockfile
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
RUN pnpm run --filter @memegoat/backend build RUN pnpm run --filter @memegoat/backend build
RUN pnpm deploy --filter=@memegoat/backend --prod --legacy /app RUN pnpm deploy --filter=@memegoat/backend --prod --legacy /app
RUN cp -r backend/dist /app/dist RUN cp -r backend/dist /app/dist

View File

@@ -1,6 +1,6 @@
{ {
"name": "@memegoat/backend", "name": "@memegoat/backend",
"version": "0.0.0", "version": "0.0.1",
"description": "", "description": "",
"author": "", "author": "",
"private": true, "private": true,

View File

@@ -12,7 +12,6 @@ import { AuthModule } from "./auth/auth.module";
import { CategoriesModule } from "./categories/categories.module"; import { CategoriesModule } from "./categories/categories.module";
import { CommonModule } from "./common/common.module"; import { CommonModule } from "./common/common.module";
import { CrawlerDetectionMiddleware } from "./common/middlewares/crawler-detection.middleware"; import { CrawlerDetectionMiddleware } from "./common/middlewares/crawler-detection.middleware";
import { HTTPLoggerMiddleware } from "./common/middlewares/http-logger.middleware";
import { validateEnv } from "./config/env.schema"; import { validateEnv } from "./config/env.schema";
import { ContentsModule } from "./contents/contents.module"; import { ContentsModule } from "./contents/contents.module";
import { CryptoModule } from "./crypto/crypto.module"; import { CryptoModule } from "./crypto/crypto.module";
@@ -77,8 +76,6 @@ import { UsersModule } from "./users/users.module";
}) })
export class AppModule implements NestModule { export class AppModule implements NestModule {
configure(consumer: MiddlewareConsumer) { configure(consumer: MiddlewareConsumer) {
consumer consumer.apply(CrawlerDetectionMiddleware).forRoutes("*");
.apply(HTTPLoggerMiddleware, CrawlerDetectionMiddleware)
.forRoutes("*");
} }
} }

View File

@@ -110,7 +110,6 @@ export class AuthService {
const user = await this.usersService.findByEmailHash(emailHash); const user = await this.usersService.findByEmailHash(emailHash);
if (!user) { if (!user) {
this.logger.warn(`Login failed: user not found for email hash`);
throw new UnauthorizedException("Invalid credentials"); throw new UnauthorizedException("Invalid credentials");
} }
@@ -120,12 +119,10 @@ export class AuthService {
); );
if (!isPasswordValid) { if (!isPasswordValid) {
this.logger.warn(`Login failed: invalid password for user ${user.uuid}`);
throw new UnauthorizedException("Invalid credentials"); throw new UnauthorizedException("Invalid credentials");
} }
if (user.isTwoFactorEnabled) { if (user.isTwoFactorEnabled) {
this.logger.log(`2FA required for user ${user.uuid}`);
return { return {
message: "2FA required", message: "2FA required",
requires2FA: true, requires2FA: true,
@@ -144,7 +141,6 @@ export class AuthService {
ip, ip,
); );
this.logger.log(`User ${user.uuid} logged in successfully`);
return { return {
message: "User logged in successfully", message: "User logged in successfully",
access_token: accessToken, access_token: accessToken,
@@ -169,9 +165,6 @@ export class AuthService {
const isValid = authenticator.verify({ token, secret }); const isValid = authenticator.verify({ token, secret });
if (!isValid) { if (!isValid) {
this.logger.warn(
`2FA verification failed for user ${userId}: invalid token`,
);
throw new UnauthorizedException("Invalid 2FA token"); throw new UnauthorizedException("Invalid 2FA token");
} }
@@ -186,7 +179,6 @@ export class AuthService {
ip, ip,
); );
this.logger.log(`User ${userId} logged in successfully via 2FA`);
return { return {
message: "User logged in successfully (2FA)", message: "User logged in successfully (2FA)",
access_token: accessToken, access_token: accessToken,

View File

@@ -9,14 +9,6 @@ import {
import * as Sentry from "@sentry/nestjs"; import * as Sentry from "@sentry/nestjs";
import { Request, Response } from "express"; import { Request, Response } from "express";
interface RequestWithUser extends Request {
user?: {
sub?: string;
username?: string;
id?: string;
};
}
@Catch() @Catch()
export class AllExceptionsFilter implements ExceptionFilter { export class AllExceptionsFilter implements ExceptionFilter {
private readonly logger = new Logger("ExceptionFilter"); private readonly logger = new Logger("ExceptionFilter");
@@ -24,7 +16,7 @@ export class AllExceptionsFilter implements ExceptionFilter {
catch(exception: unknown, host: ArgumentsHost) { catch(exception: unknown, host: ArgumentsHost) {
const ctx = host.switchToHttp(); const ctx = host.switchToHttp();
const response = ctx.getResponse<Response>(); const response = ctx.getResponse<Response>();
const request = ctx.getRequest<RequestWithUser>(); const request = ctx.getRequest<Request>();
const status = const status =
exception instanceof HttpException exception instanceof HttpException
@@ -36,9 +28,6 @@ export class AllExceptionsFilter implements ExceptionFilter {
? exception.getResponse() ? exception.getResponse()
: "Internal server error"; : "Internal server error";
const userId = request.user?.sub || request.user?.id;
const userPart = userId ? `[User: ${userId}] ` : "";
const errorResponse = { const errorResponse = {
statusCode: status, statusCode: status,
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
@@ -53,12 +42,12 @@ export class AllExceptionsFilter implements ExceptionFilter {
if (status === HttpStatus.INTERNAL_SERVER_ERROR) { if (status === HttpStatus.INTERNAL_SERVER_ERROR) {
Sentry.captureException(exception); Sentry.captureException(exception);
this.logger.error( this.logger.error(
`${userPart}${request.method} ${request.url} - Error: ${exception instanceof Error ? exception.message : "Unknown error"}`, `${request.method} ${request.url} - Error: ${exception instanceof Error ? exception.message : "Unknown error"}`,
exception instanceof Error ? exception.stack : "", exception instanceof Error ? exception.stack : "",
); );
} else { } else {
this.logger.warn( this.logger.warn(
`${userPart}${request.method} ${request.url} - Status: ${status} - Message: ${JSON.stringify(message)}`, `${request.method} ${request.url} - Status: ${status} - Message: ${JSON.stringify(message)}`,
); );
} }

View File

@@ -33,6 +33,4 @@ export interface IStorageService {
sourceBucketName?: string, sourceBucketName?: string,
destinationBucketName?: string, destinationBucketName?: string,
): Promise<string>; ): Promise<string>;
getPublicUrl(storageKey: string): string;
} }

View File

@@ -1,37 +0,0 @@
import { createHash } from "node:crypto";
import { Injectable, Logger, NestMiddleware } from "@nestjs/common";
import { NextFunction, Request, Response } from "express";
@Injectable()
export class HTTPLoggerMiddleware implements NestMiddleware {
private readonly logger = new Logger("HTTP");
use(request: Request, response: Response, next: NextFunction): void {
const { method, originalUrl, ip } = request;
const userAgent = request.get("user-agent") || "";
const startTime = Date.now();
response.on("finish", () => {
const { statusCode } = response;
const contentLength = response.get("content-length");
const duration = Date.now() - startTime;
const hashedIp = createHash("sha256")
.update(ip as string)
.digest("hex");
const message = `${method} ${originalUrl} ${statusCode} ${contentLength || 0} - ${userAgent} ${hashedIp} +${duration}ms`;
if (statusCode >= 500) {
return this.logger.error(message);
}
if (statusCode >= 400) {
return this.logger.warn(message);
}
return this.logger.log(message);
});
next();
}
}

View File

@@ -33,7 +33,6 @@ export const envSchema = z.object({
MAIL_FROM: z.string().email(), MAIL_FROM: z.string().email(),
DOMAIN_NAME: z.string(), DOMAIN_NAME: z.string(),
API_URL: z.string().url().optional(),
// Sentry // Sentry
SENTRY_DSN: z.string().optional(), SENTRY_DSN: z.string().optional(),

View File

@@ -30,7 +30,6 @@ describe("ContentsService", () => {
const mockS3Service = { const mockS3Service = {
getUploadUrl: jest.fn(), getUploadUrl: jest.fn(),
uploadFile: jest.fn(), uploadFile: jest.fn(),
getPublicUrl: jest.fn(),
}; };
const mockMediaService = { const mockMediaService = {

View File

@@ -100,7 +100,6 @@ export class ContentsService {
// 3. Upload vers S3 // 3. Upload vers S3
const key = `contents/${userId}/${Date.now()}-${uuidv4()}.${processed.extension}`; const key = `contents/${userId}/${Date.now()}-${uuidv4()}.${processed.extension}`;
await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType); await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType);
this.logger.log(`File uploaded successfully to S3: ${key}`);
// 4. Création en base de données // 4. Création en base de données
return await this.create(userId, { return await this.create(userId, {
@@ -129,11 +128,11 @@ export class ContentsService {
const processedData = data.map((content) => ({ const processedData = data.map((content) => ({
...content, ...content,
url: this.s3Service.getPublicUrl(content.storageKey), url: this.getFileUrl(content.storageKey),
author: { author: {
...content.author, ...content.author,
avatarUrl: content.author?.avatarUrl avatarUrl: content.author?.avatarUrl
? this.s3Service.getPublicUrl(content.author.avatarUrl) ? this.getFileUrl(content.author.avatarUrl)
: null, : null,
}, },
})); }));
@@ -190,18 +189,18 @@ export class ContentsService {
return { return {
...content, ...content,
url: this.s3Service.getPublicUrl(content.storageKey), url: this.getFileUrl(content.storageKey),
author: { author: {
...content.author, ...content.author,
avatarUrl: content.author?.avatarUrl avatarUrl: content.author?.avatarUrl
? this.s3Service.getPublicUrl(content.author.avatarUrl) ? this.getFileUrl(content.author.avatarUrl)
: null, : null,
}, },
}; };
} }
generateBotHtml(content: { title: string; storageKey: string }): string { generateBotHtml(content: { title: string; storageKey: string }): string {
const imageUrl = this.s3Service.getPublicUrl(content.storageKey); const imageUrl = this.getFileUrl(content.storageKey);
return `<!DOCTYPE html> return `<!DOCTYPE html>
<html> <html>
<head> <head>
@@ -222,6 +221,19 @@ export class ContentsService {
</html>`; </html>`;
} }
getFileUrl(storageKey: string): string {
const endpoint = this.configService.get("S3_ENDPOINT");
const port = this.configService.get("S3_PORT");
const protocol =
this.configService.get("S3_USE_SSL") === true ? "https" : "http";
const bucket = this.configService.get("S3_BUCKET_NAME");
if (endpoint === "localhost" || endpoint === "127.0.0.1") {
return `${protocol}://${endpoint}:${port}/${bucket}/${storageKey}`;
}
return `${protocol}://${endpoint}/${bucket}/${storageKey}`;
}
private generateSlug(text: string): string { private generateSlug(text: string): string {
return text return text
.toLowerCase() .toLowerCase()

View File

@@ -1,61 +0,0 @@
import { Readable } from "node:stream";
import { NotFoundException } from "@nestjs/common";
import { Test, TestingModule } from "@nestjs/testing";
import type { Response } from "express";
import { S3Service } from "../s3/s3.service";
import { MediaController } from "./media.controller";
describe("MediaController", () => {
let controller: MediaController;
const mockS3Service = {
getFileInfo: jest.fn(),
getFile: jest.fn(),
};
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
controllers: [MediaController],
providers: [{ provide: S3Service, useValue: mockS3Service }],
}).compile();
controller = module.get<MediaController>(MediaController);
});
it("should be defined", () => {
expect(controller).toBeDefined();
});
describe("getFile", () => {
it("should stream the file and set headers with path containing slashes", async () => {
const res = {
setHeader: jest.fn(),
} as unknown as Response;
const stream = new Readable();
stream.pipe = jest.fn();
const key = "contents/user-id/test.webp";
mockS3Service.getFileInfo.mockResolvedValue({
size: 100,
metaData: { "content-type": "image/webp" },
});
mockS3Service.getFile.mockResolvedValue(stream);
await controller.getFile(key, res);
expect(mockS3Service.getFileInfo).toHaveBeenCalledWith(key);
expect(res.setHeader).toHaveBeenCalledWith("Content-Type", "image/webp");
expect(res.setHeader).toHaveBeenCalledWith("Content-Length", 100);
expect(stream.pipe).toHaveBeenCalledWith(res);
});
it("should throw NotFoundException if file is not found", async () => {
mockS3Service.getFileInfo.mockRejectedValue(new Error("Not found"));
const res = {} as unknown as Response;
await expect(controller.getFile("invalid", res)).rejects.toThrow(
NotFoundException,
);
});
});
});

View File

@@ -1,30 +0,0 @@
import { Controller, Get, NotFoundException, Param, Res } from "@nestjs/common";
import type { Response } from "express";
import type { BucketItemStat } from "minio";
import { S3Service } from "../s3/s3.service";
@Controller("media")
export class MediaController {
constructor(private readonly s3Service: S3Service) {}
@Get("*key")
async getFile(@Param("key") key: string, @Res() res: Response) {
try {
const stats = (await this.s3Service.getFileInfo(key)) as BucketItemStat;
const stream = await this.s3Service.getFile(key);
const contentType =
stats.metaData?.["content-type"] ||
stats.metadata?.["content-type"] ||
"application/octet-stream";
res.setHeader("Content-Type", contentType);
res.setHeader("Content-Length", stats.size);
res.setHeader("Cache-Control", "public, max-age=31536000, immutable");
stream.pipe(res);
} catch (_error) {
throw new NotFoundException("Fichier non trouvé");
}
}
}

View File

@@ -1,13 +1,9 @@
import { Module } from "@nestjs/common"; import { Module } from "@nestjs/common";
import { S3Module } from "../s3/s3.module";
import { MediaController } from "./media.controller";
import { MediaService } from "./media.service"; import { MediaService } from "./media.service";
import { ImageProcessorStrategy } from "./strategies/image-processor.strategy"; import { ImageProcessorStrategy } from "./strategies/image-processor.strategy";
import { VideoProcessorStrategy } from "./strategies/video-processor.strategy"; import { VideoProcessorStrategy } from "./strategies/video-processor.strategy";
@Module({ @Module({
imports: [S3Module],
controllers: [MediaController],
providers: [MediaService, ImageProcessorStrategy, VideoProcessorStrategy], providers: [MediaService, ImageProcessorStrategy, VideoProcessorStrategy],
exports: [MediaService], exports: [MediaService],
}) })

View File

@@ -7,7 +7,7 @@ jest.mock("minio");
describe("S3Service", () => { describe("S3Service", () => {
let service: S3Service; let service: S3Service;
let configService: ConfigService; let _configService: ConfigService;
// biome-ignore lint/suspicious/noExplicitAny: Fine for testing purposes // biome-ignore lint/suspicious/noExplicitAny: Fine for testing purposes
let minioClient: any; let minioClient: any;
@@ -42,7 +42,7 @@ describe("S3Service", () => {
}).compile(); }).compile();
service = module.get<S3Service>(S3Service); service = module.get<S3Service>(S3Service);
configService = module.get<ConfigService>(ConfigService); _configService = module.get<ConfigService>(ConfigService);
}); });
it("should be defined", () => { it("should be defined", () => {
@@ -185,39 +185,35 @@ describe("S3Service", () => {
}); });
}); });
describe("getPublicUrl", () => { describe("moveFile", () => {
it("should use API_URL if provided", () => { it("should move file within default bucket", async () => {
(configService.get as jest.Mock).mockImplementation((key: string) => { const source = "source.txt";
if (key === "API_URL") return "https://api.test.com"; const dest = "dest.txt";
return null; await service.moveFile(source, dest);
});
const url = service.getPublicUrl("test.webp"); expect(minioClient.copyObject).toHaveBeenCalledWith(
expect(url).toBe("https://api.test.com/media/test.webp"); "memegoat",
dest,
"/memegoat/source.txt",
expect.any(Minio.CopyConditions),
);
expect(minioClient.removeObject).toHaveBeenCalledWith("memegoat", source);
}); });
it("should use DOMAIN_NAME and PORT for localhost", () => { it("should move file between different buckets", async () => {
(configService.get as jest.Mock).mockImplementation( const source = "source.txt";
(key: string, def: unknown) => { const dest = "dest.txt";
if (key === "API_URL") return null; const sBucket = "source-bucket";
if (key === "DOMAIN_NAME") return "localhost"; const dBucket = "dest-bucket";
if (key === "PORT") return 3000; await service.moveFile(source, dest, sBucket, dBucket);
return def;
},
);
const url = service.getPublicUrl("test.webp");
expect(url).toBe("http://localhost:3000/media/test.webp");
});
it("should use api.DOMAIN_NAME for production", () => { expect(minioClient.copyObject).toHaveBeenCalledWith(
(configService.get as jest.Mock).mockImplementation( dBucket,
(key: string, def: unknown) => { dest,
if (key === "API_URL") return null; `/${sBucket}/${source}`,
if (key === "DOMAIN_NAME") return "memegoat.fr"; expect.any(Minio.CopyConditions),
return def;
},
); );
const url = service.getPublicUrl("test.webp"); expect(minioClient.removeObject).toHaveBeenCalledWith(sBucket, source);
expect(url).toBe("https://api.memegoat.fr/media/test.webp");
}); });
}); });
}); });

View File

@@ -54,7 +54,6 @@ export class S3Service implements OnModuleInit, IStorageService {
...metaData, ...metaData,
"Content-Type": mimeType, "Content-Type": mimeType,
}); });
this.logger.log(`File uploaded successfully: ${fileName} to ${bucketName}`);
return fileName; return fileName;
} catch (error) { } catch (error) {
this.logger.error(`Error uploading file to ${bucketName}: ${error.message}`); this.logger.error(`Error uploading file to ${bucketName}: ${error.message}`);
@@ -114,7 +113,6 @@ export class S3Service implements OnModuleInit, IStorageService {
async deleteFile(fileName: string, bucketName: string = this.bucketName) { async deleteFile(fileName: string, bucketName: string = this.bucketName) {
try { try {
await this.minioClient.removeObject(bucketName, fileName); await this.minioClient.removeObject(bucketName, fileName);
this.logger.log(`File deleted successfully: ${fileName} from ${bucketName}`);
} catch (error) { } catch (error) {
this.logger.error( this.logger.error(
`Error deleting file from ${bucketName}: ${error.message}`, `Error deleting file from ${bucketName}: ${error.message}`,
@@ -157,22 +155,4 @@ export class S3Service implements OnModuleInit, IStorageService {
throw error; throw error;
} }
} }
getPublicUrl(storageKey: string): string {
const apiUrl = this.configService.get<string>("API_URL");
const domain = this.configService.get<string>("DOMAIN_NAME", "localhost");
const port = this.configService.get<number>("PORT", 3000);
let baseUrl: string;
if (apiUrl) {
baseUrl = apiUrl.replace(/\/$/, "");
} else if (domain === "localhost" || domain === "127.0.0.1") {
baseUrl = `http://${domain}:${port}`;
} else {
baseUrl = `https://api.${domain}`;
}
return `${baseUrl}/media/${storageKey}`;
}
} }

View File

@@ -58,7 +58,6 @@ describe("UsersService", () => {
const mockS3Service = { const mockS3Service = {
uploadFile: jest.fn(), uploadFile: jest.fn(),
getPublicUrl: jest.fn(),
}; };
const mockConfigService = { const mockConfigService = {

View File

@@ -6,6 +6,7 @@ import {
Injectable, Injectable,
Logger, Logger,
} from "@nestjs/common"; } from "@nestjs/common";
import { ConfigService } from "@nestjs/config";
import type { Cache } from "cache-manager"; import type { Cache } from "cache-manager";
import { v4 as uuidv4 } from "uuid"; import { v4 as uuidv4 } from "uuid";
import { RbacService } from "../auth/rbac.service"; import { RbacService } from "../auth/rbac.service";
@@ -27,6 +28,7 @@ export class UsersService {
private readonly rbacService: RbacService, private readonly rbacService: RbacService,
@Inject(MediaService) private readonly mediaService: IMediaService, @Inject(MediaService) private readonly mediaService: IMediaService,
@Inject(S3Service) private readonly s3Service: IStorageService, @Inject(S3Service) private readonly s3Service: IStorageService,
private readonly configService: ConfigService,
) {} ) {}
private async clearUserCache(username?: string) { private async clearUserCache(username?: string) {
@@ -58,9 +60,7 @@ export class UsersService {
return { return {
...user, ...user,
avatarUrl: user.avatarUrl avatarUrl: user.avatarUrl ? this.getFileUrl(user.avatarUrl) : null,
? this.s3Service.getPublicUrl(user.avatarUrl)
: null,
role: roles.includes("admin") ? "admin" : "user", role: roles.includes("admin") ? "admin" : "user",
roles, roles,
}; };
@@ -74,9 +74,7 @@ export class UsersService {
const processedData = data.map((user) => ({ const processedData = data.map((user) => ({
...user, ...user,
avatarUrl: user.avatarUrl avatarUrl: user.avatarUrl ? this.getFileUrl(user.avatarUrl) : null,
? this.s3Service.getPublicUrl(user.avatarUrl)
: null,
})); }));
return { data: processedData, totalCount }; return { data: processedData, totalCount };
@@ -88,9 +86,7 @@ export class UsersService {
return { return {
...user, ...user,
avatarUrl: user.avatarUrl avatarUrl: user.avatarUrl ? this.getFileUrl(user.avatarUrl) : null,
? this.s3Service.getPublicUrl(user.avatarUrl)
: null,
}; };
} }
@@ -143,7 +139,6 @@ export class UsersService {
// 3. Upload vers S3 // 3. Upload vers S3
const key = `avatars/${uuid}/${Date.now()}-${uuidv4()}.${processed.extension}`; const key = `avatars/${uuid}/${Date.now()}-${uuidv4()}.${processed.extension}`;
await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType); await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType);
this.logger.log(`Avatar uploaded successfully to S3: ${key}`);
// 4. Mise à jour de la base de données // 4. Mise à jour de la base de données
const user = await this.update(uuid, { avatarUrl: key }); const user = await this.update(uuid, { avatarUrl: key });
@@ -198,4 +193,17 @@ export class UsersService {
async remove(uuid: string) { async remove(uuid: string) {
return await this.usersRepository.softDeleteUserAndContents(uuid); return await this.usersRepository.softDeleteUserAndContents(uuid);
} }
private getFileUrl(storageKey: string): string {
const endpoint = this.configService.get("S3_ENDPOINT");
const port = this.configService.get("S3_PORT");
const protocol =
this.configService.get("S3_USE_SSL") === true ? "https" : "http";
const bucket = this.configService.get("S3_BUCKET_NAME");
if (endpoint === "localhost" || endpoint === "127.0.0.1") {
return `${protocol}://${endpoint}:${port}/${bucket}/${storageKey}`;
}
return `${protocol}://${endpoint}/${bucket}/${storageKey}`;
}
} }

View File

@@ -1,4 +1,4 @@
# syntax=docker/dockerfile:1 # syntax=docker.io/docker/dockerfile:1
FROM node:22-alpine AS base FROM node:22-alpine AS base
ENV PNPM_HOME="/pnpm" ENV PNPM_HOME="/pnpm"
@@ -11,20 +11,11 @@ COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./
COPY backend/package.json ./backend/ COPY backend/package.json ./backend/
COPY frontend/package.json ./frontend/ COPY frontend/package.json ./frontend/
COPY documentation/package.json ./documentation/ COPY documentation/package.json ./documentation/
RUN pnpm install --no-frozen-lockfile
# Montage du cache pnpm
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
COPY . . COPY . .
# On réinstalle après COPY pour s'assurer que tous les scripts de cycle de vie et les liens sont corrects
# Deuxième passe avec cache pour les scripts/liens RUN pnpm install --no-frozen-lockfile
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \ RUN pnpm run --filter @memegoat/documentation build
pnpm install --frozen-lockfile
# Build avec cache Next.js
RUN --mount=type=cache,id=next-docs-cache,target=/usr/src/app/documentation/.next/cache \
pnpm run --filter @memegoat/documentation build
FROM node:22-alpine AS runner FROM node:22-alpine AS runner
WORKDIR /app WORKDIR /app

View File

@@ -1,4 +1,4 @@
# syntax=docker/dockerfile:1 # syntax=docker.io/docker/dockerfile:1
FROM node:22-alpine AS base FROM node:22-alpine AS base
ENV PNPM_HOME="/pnpm" ENV PNPM_HOME="/pnpm"
@@ -11,20 +11,11 @@ COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./
COPY backend/package.json ./backend/ COPY backend/package.json ./backend/
COPY frontend/package.json ./frontend/ COPY frontend/package.json ./frontend/
COPY documentation/package.json ./documentation/ COPY documentation/package.json ./documentation/
RUN pnpm install --no-frozen-lockfile
# Montage du cache pnpm
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
COPY . . COPY . .
# On réinstalle après COPY pour s'assurer que tous les scripts de cycle de vie et les liens sont corrects
# Deuxième passe avec cache pour les scripts/liens RUN pnpm install --no-frozen-lockfile
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \ RUN pnpm run --filter @memegoat/frontend build
pnpm install --frozen-lockfile
# Build avec cache Next.js
RUN --mount=type=cache,id=next-cache,target=/usr/src/app/frontend/.next/cache \
pnpm run --filter @memegoat/frontend build
FROM node:22-alpine AS runner FROM node:22-alpine AS runner
WORKDIR /app WORKDIR /app

View File

@@ -1,6 +1,6 @@
{ {
"name": "@memegoat/frontend", "name": "@memegoat/frontend",
"version": "0.0.0", "version": "0.0.1",
"private": true, "private": true,
"scripts": { "scripts": {
"dev": "next dev", "dev": "next dev",

View File

@@ -63,7 +63,7 @@ export default function HelpPage() {
<p className="text-muted-foreground"> <p className="text-muted-foreground">
N'hésitez pas à nous contacter sur nos réseaux sociaux ou par email. N'hésitez pas à nous contacter sur nos réseaux sociaux ou par email.
</p> </p>
<p className="font-semibold text-primary">contact@memegoat.fr</p> <p className="font-semibold text-primary">contact@memegoat.local</p>
</div> </div>
</div> </div>
); );

View File

@@ -228,7 +228,7 @@ export function AppSidebar() {
<span className="truncate font-semibold"> <span className="truncate font-semibold">
{user.displayName || user.username} {user.displayName || user.username}
</span> </span>
<span className="truncate text-xs">{user.role}</span> <span className="truncate text-xs">{user.email}</span>
</div> </div>
<ChevronRight className="ml-auto size-4 group-data-[collapsible=icon]:hidden" /> <ChevronRight className="ml-auto size-4 group-data-[collapsible=icon]:hidden" />
</SidebarMenuButton> </SidebarMenuButton>

View File

@@ -1,30 +0,0 @@
Réalisation du frontend :
# Exigences
- Responsive dans tout les formats tailwindcss
- Accessibilité A11Y
- Implémentation réel uniquement
- Site en français
- SEO parfaitement réalisé, robot.txt, sitemap.xml...
- Utilisation des composants shadcn/ui
- Réalisation d'une page d'erreur customisé
- Utilisation des fonctionalités de NextJS suivantes :
- Nested routes
- Dynamic routes
- Route groups
- Private folders
- Parralel and intercepted routes
- Prefetching pages
- Streaming pages
- Server and Client Components
- Cache Components
- Image optimization
- Incremental Static Regeneration
- Custom hooks
- Axios
Toute l'application est basé sur un système dashboard/sidebar intégrant le routing.
La page principale est la page de navigation du contennu.
En mode desktop nous retrouvons la sidebar à gauche, le contennu en scroll infini au milieu et les paramètres de recherche sur la droite.
En mode mobile la sidebar est replié, les paramètres de recherche sont représenté comme une icône de filtrage flotante en haut à droite

View File

@@ -1,10 +1,8 @@
{ {
"name": "@memegoat/source", "name": "@memegoat/source",
"version": "0.0.0", "version": "0.0.1",
"description": "", "description": "",
"scripts": { "scripts": {
"version:get": "cmake -P version.cmake GET",
"version:set": "cmake -P version.cmake SET",
"build": "pnpm run build:back && pnpm run build:front && pnpm run build:docs", "build": "pnpm run build:back && pnpm run build:front && pnpm run build:docs",
"build:front": "pnpm run -F @memegoat/frontend build", "build:front": "pnpm run -F @memegoat/frontend build",
"build:back": "pnpm run -F @memegoat/backend build", "build:back": "pnpm run -F @memegoat/backend build",

View File

@@ -1,84 +0,0 @@
# version.cmake - Script pour gérer la version SemVer de manière centralisée
# Usage: cmake -P version.cmake [GET|SET] [new_version]
set(PACKAGE_JSON_FILES
"${CMAKE_CURRENT_LIST_DIR}/package.json"
"${CMAKE_CURRENT_LIST_DIR}/backend/package.json"
"${CMAKE_CURRENT_LIST_DIR}/frontend/package.json"
)
# Fonction pour lire la version depuis le package.json racine
function(get_current_version OUT_VAR)
file(READ "${CMAKE_CURRENT_LIST_DIR}/package.json" ROOT_JSON)
string(JSON CURRENT_VERSION GET "${ROOT_JSON}" "version")
set(${OUT_VAR} ${CURRENT_VERSION} PARENT_SCOPE)
endfunction()
# Fonction pour créer un tag git
function(create_git_tag VERSION)
find_package(Git QUIET)
if(GIT_FOUND)
execute_process(
COMMAND ${GIT_EXECUTABLE} tag -a "v${VERSION}" -m "Release v${VERSION}"
WORKING_DIRECTORY "${CMAKE_CURRENT_LIST_DIR}"
RESULT_VARIABLE TAG_RESULT
)
if(TAG_RESULT EQUAL 0)
message(STATUS "Tag v${VERSION} créé avec succès")
else()
message(WARNING "Échec de la création du tag v${VERSION}. Il existe peut-être déjà.")
endif()
else()
message(WARNING "Git non trouvé, impossible de créer le tag.")
endif()
endfunction()
# Fonction pour mettre à jour la version dans tous les fichiers package.json
function(set_new_version NEW_VERSION)
foreach(JSON_FILE ${PACKAGE_JSON_FILES})
if(EXISTS "${JSON_FILE}")
message(STATUS "Mise à jour de ${JSON_FILE} vers la version ${NEW_VERSION}")
file(READ "${JSON_FILE}" CONTENT)
# Utilisation de string(JSON ...) pour modifier la version si disponible (CMake >= 3.19)
# Sinon on peut utiliser une regex simple pour package.json
string(REGEX REPLACE "\"version\": \"[^\"]+\"" "\"version\": \"${NEW_VERSION}\"" NEW_CONTENT "${CONTENT}")
file(WRITE "${JSON_FILE}" "${NEW_CONTENT}")
else()
message(WARNING "Fichier non trouvé: ${JSON_FILE}")
endif()
endforeach()
# Demander à l'utilisateur s'il veut tagger (ou le faire par défaut si spécifié)
create_git_tag(${NEW_VERSION})
endfunction()
# Logique principale
set(ARG_OFFSET 0)
while(ARG_OFFSET LESS CMAKE_ARGC)
if("${CMAKE_ARGV${ARG_OFFSET}}" STREQUAL "-P")
math(EXPR COMMAND_INDEX "${ARG_OFFSET} + 2")
math(EXPR VERSION_INDEX "${ARG_OFFSET} + 3")
break()
endif()
math(EXPR ARG_OFFSET "${ARG_OFFSET} + 1")
endwhile()
if(NOT DEFINED COMMAND_INDEX OR COMMAND_INDEX GREATER_EQUAL CMAKE_ARGC)
message(FATAL_ERROR "Usage: cmake -P version.cmake [GET|SET] [new_version]")
endif()
set(COMMAND "${CMAKE_ARGV${COMMAND_INDEX}}")
if("${COMMAND}" STREQUAL "GET")
get_current_version(VERSION)
message("${VERSION}")
elseif("${COMMAND}" STREQUAL "SET")
if(VERSION_INDEX GREATER_EQUAL CMAKE_ARGC)
message(FATAL_ERROR "Veuillez spécifier la nouvelle version: cmake -P version.cmake SET 0.0.0")
endif()
set(NEW_VERSION "${CMAKE_ARGV${VERSION_INDEX}}")
set_new_version("${NEW_VERSION}")
else()
message(FATAL_ERROR "Commande inconnue: ${COMMAND}. Utilisez GET ou SET.")
endif()