Compare commits

...

22 Commits

Author SHA1 Message Date
Mathis HERRIOT
597a4d615e Changement système de branches: passage à main et unification des versions via CMake
All checks were successful
Lint / lint (backend) (push) Successful in 1m18s
Backend Tests / test (push) Successful in 1m18s
Lint / lint (documentation) (push) Successful in 1m18s
Lint / lint (frontend) (push) Successful in 1m15s
2026-01-20 10:39:53 +01:00
Mathis HERRIOT
2df45af305 style(logging): reformat hashed IP computation for improved readability
All checks were successful
Lint / lint (documentation) (push) Successful in 1m18s
Lint / lint (backend) (push) Successful in 1m21s
Backend Tests / test (push) Successful in 1m23s
Lint / lint (frontend) (push) Successful in 1m10s
Lint / lint (backend) (pull_request) Successful in 1m20s
Lint / lint (documentation) (pull_request) Successful in 1m22s
Backend Tests / test (pull_request) Successful in 1m24s
Lint / lint (frontend) (pull_request) Successful in 1m10s
2026-01-20 10:01:40 +01:00
Mathis HERRIOT
863a4bf528 style(app): reformat middleware configuration for improved readability
Some checks failed
Lint / lint (backend) (push) Failing after 52s
Backend Tests / test (push) Successful in 1m15s
Lint / lint (frontend) (push) Successful in 1m10s
Lint / lint (documentation) (push) Successful in 2m39s
2026-01-20 09:58:10 +01:00
Mathis HERRIOT
9a1cdb05a4 fix(auth): adjust 2FA verification log formatting for consistency 2026-01-20 09:57:59 +01:00
Mathis HERRIOT
28caf92f9a fix(media): update S3 file info type casting for stricter type safety
Replace `any` with `BucketItemStat` for `getFileInfo` response in MediaController to ensure accurate type definition.
2026-01-20 09:57:38 +01:00
Mathis HERRIOT
8b2728dc5a test(s3): update mock implementation types for stricter type safety
Refactor mock implementations in S3 service tests to replace `any` with `unknown` for improved type safety and consistency.
2026-01-20 09:57:27 +01:00
Mathis HERRIOT
3bbbbc307f test(media): fix type casting in MediaController unit tests
Update type casting for `Response` object in MediaController tests to use `unknown as Response` for stricter type safety. Remove unused `s3Service` variable for cleanup.
2026-01-20 09:57:11 +01:00
Mathis HERRIOT
f080919563 fix(logging): resolve type issue in hashed IP logging
Ensure `ip` parameter is explicitly cast to string before creating a SHA-256 hash to prevent runtime errors.
2026-01-20 09:56:44 +01:00
Mathis HERRIOT
edc1ab2438 feat(logging): introduce HTTP logging middleware
Some checks failed
Lint / lint (backend) (push) Failing after 2m22s
Backend Tests / test (push) Successful in 2m47s
Lint / lint (documentation) (push) Successful in 1m11s
Lint / lint (frontend) (push) Successful in 1m9s
Add middleware to log HTTP request and response details, including method, URL, status, duration, user agent, and hashed IP address. Logs categorized by severity based on response status code.
2026-01-20 09:45:06 +01:00
Mathis HERRIOT
01b66d6f2f feat(logging): enhance exception filter with user context in logs
Integrate user context (`userId`) into exception filter logging for improved traceability. Adjust log messages to include `[User: <ID>]` when user data is available.
2026-01-20 09:44:57 +01:00
Mathis HERRIOT
9a70dd02bb feat(s3): add detailed logging for upload and delete operations 2026-01-20 09:44:45 +01:00
Mathis HERRIOT
e285a4e634 feat(auth): add detailed logging for login and 2FA operations
Introduce warnings for failed login attempts and invalid 2FA tokens. Add logs for successful logins and 2FA requirements to improve authentication traceability.
2026-01-20 09:44:12 +01:00
Mathis HERRIOT
f247a01ac7 feat(middleware): add HTTP logging middleware to application configuration 2026-01-20 09:43:52 +01:00
Mathis HERRIOT
bb640cd8f9 ci(workflows): remove Next.js build caching from deployment workflow 2026-01-20 09:31:30 +01:00
Mathis HERRIOT
c1118e9f25 test(s3): fix formatting of mock implementation in unit tests
All checks were successful
Backend Tests / test (push) Successful in 1m10s
Lint / lint (backend) (push) Successful in 1m7s
Lint / lint (documentation) (push) Successful in 1m8s
Lint / lint (frontend) (push) Successful in 1m6s
Backend Tests / test (pull_request) Successful in 1m10s
Lint / lint (backend) (pull_request) Successful in 1m7s
Lint / lint (documentation) (pull_request) Successful in 1m6s
Lint / lint (frontend) (pull_request) Successful in 1m7s
2026-01-15 00:44:55 +01:00
Mathis HERRIOT
eae1f84b92 ci(docker): optimize Dockerfiles with pnpm and build cache integration
Switch to `node:22-alpine` for smaller base images. Introduce pnpm cache mounts and utilize `--frozen-lockfile` for faster and more reliable builds. Add Next.js build cache optimizations for `frontend` and `documentation`.
2026-01-15 00:44:44 +01:00
Mathis HERRIOT
8d27532dc0 feat(s3): enhance logging and public URL generation
Some checks failed
Backend Tests / test (push) Successful in 1m11s
Lint / lint (backend) (push) Failing after 46s
Lint / lint (documentation) (push) Successful in 1m7s
Lint / lint (frontend) (push) Has been cancelled
Add detailed logging for S3 uploads in user and content services. Improve public URL generation logic in `S3Service` by providing better handling for `API_URL`, `DOMAIN_NAME`, and `PORT`. Update relevant tests to cover all scenarios.
2026-01-15 00:40:36 +01:00
Mathis HERRIOT
f79507730e ci(workflows): improve caching and optimize dependency installation
Add Next.js build cache to deployment workflow for improved performance. Update all workflows to use `pnpm install --frozen-lockfile --prefer-offline` for faster and more reliable dependency management.
2026-01-15 00:39:56 +01:00
Mathis HERRIOT
7048c2731e fix(media): correct route param handling in media controller
All checks were successful
Backend Tests / test (push) Successful in 1m48s
Lint / lint (backend) (push) Successful in 1m7s
Lint / lint (documentation) (push) Successful in 1m7s
Lint / lint (frontend) (push) Successful in 1m8s
Backend Tests / test (pull_request) Successful in 1m10s
Lint / lint (backend) (pull_request) Successful in 1m8s
Lint / lint (documentation) (pull_request) Successful in 1m7s
Lint / lint (frontend) (pull_request) Successful in 1m9s
Adjust `@Get` decorator route pattern to properly handle file keys with special characters.
2026-01-14 23:51:24 +01:00
Mathis HERRIOT
d74fd15036 ci(workflows): enhance workflows with matrix builds and caching optimizations
Refactor GitHub Actions workflows to introduce matrix builds for `backend`, `frontend`, and `documentation` components. Upgrade actions versions, add pull request triggers, and improve caching with pnpm store integration. Adjust Node.js version to 20 and enforce `--frozen-lockfile` for dependency installation.
2026-01-14 23:51:07 +01:00
Mathis HERRIOT
86a697c392 Merge remote-tracking branch 'origin/dev' into dev
Some checks failed
Backend Tests / test (push) Has been cancelled
Lint / lint (push) Has been cancelled
2026-01-14 23:14:03 +01:00
Mathis HERRIOT
38adbb6e77 feat(media): add public URL generation for media files and improve S3 integration
Introduce `getPublicUrl` in `S3Service` for generating public URLs. Replace custom file URL generation logic across services with the new method. Add media controller for file streaming and update related tests. Adjust frontend to display user roles instead of email in the sidebar. Update environment schema to include optional `API_URL`. Fix help page contact email.
2026-01-14 23:13:28 +01:00
30 changed files with 738 additions and 130 deletions

View File

@@ -1,8 +1,12 @@
name: Backend Tests name: Backend Tests
on: on:
push: push:
paths: paths:
- 'backend/**' - 'backend/**'
pull_request:
paths:
- 'backend/**'
jobs: jobs:
test: test:
@@ -14,9 +18,19 @@ jobs:
version: 9 version: 9
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version: 22 node-version: 20
cache: 'pnpm' - name: Get pnpm store directory
id: pnpm-cache
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> "${GITEA_OUTPUT:-$GITHUB_OUTPUT}"
- uses: actions/cache@v4
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies - name: Install dependencies
run: pnpm install run: pnpm install --frozen-lockfile --prefer-offline
- name: Run Backend Tests - name: Run Backend Tests
run: pnpm -F @memegoat/backend test run: pnpm -F @memegoat/backend test

View File

@@ -1,61 +1,63 @@
name: Deploy to Production name: Deploy to Production
on: on:
push: push:
branches: branches:
- prod - main
jobs: jobs:
deploy: validate:
name: Validate Build & Lint
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
matrix:
component: [backend, frontend, documentation]
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 9
- name: Setup Node.js - name: Setup Node.js
uses: actions/setup-node@v3 uses: actions/setup-node@v4
with: with:
node-version: 20 node-version: 20
- name: Install pnpm
uses: pnpm/action-setup@v2
with:
version: 8
- name: Get pnpm store directory - name: Get pnpm store directory
id: pnpm-cache
shell: bash shell: bash
run: | run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITEA_ENV echo "STORE_PATH=$(pnpm store path --silent)" >> "${GITEA_OUTPUT:-$GITHUB_OUTPUT}"
- name: Setup pnpm cache - name: Setup pnpm cache
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: ${{ env.STORE_PATH }} path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: | restore-keys: |
${{ runner.os }}-pnpm-store- ${{ runner.os }}-pnpm-store-
- name: Install dependencies - name: Install dependencies
run: pnpm install run: pnpm install --frozen-lockfile --prefer-offline
- name: Lint - Backend - name: Lint ${{ matrix.component }}
run: pnpm run lint:back run: pnpm -F @memegoat/${{ matrix.component }} lint
- name: Build - Backend - name: Build ${{ matrix.component }}
run: pnpm run build:back run: pnpm -F @memegoat/${{ matrix.component }} build
env: env:
NEXT_PUBLIC_API_URL: ${{ secrets.NEXT_PUBLIC_API_URL }} NEXT_PUBLIC_API_URL: ${{ secrets.NEXT_PUBLIC_API_URL }}
- name: Lint - Frontend deploy:
run: pnpm run lint:front name: Deploy to Production
needs: validate
- name: Build - Frontend runs-on: ubuntu-latest
run: pnpm run build:front steps:
- name: Checkout code
- name: Lint - Documentation uses: actions/checkout@v4
run: pnpm run lint:docs
- name: Build - Documentation
run: pnpm run build:docs
- name: Deploy with Docker Compose - name: Deploy with Docker Compose
run: | run: |

View File

@@ -1,14 +1,23 @@
name: Lint name: Lint
on: on:
push: push:
paths: paths:
- 'frontend/**' - 'frontend/**'
- 'backend/**' - 'backend/**'
- 'documentation/**' - 'documentation/**'
pull_request:
paths:
- 'frontend/**'
- 'backend/**'
- 'documentation/**'
jobs: jobs:
lint: lint:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
matrix:
component: [backend, frontend, documentation]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: pnpm/action-setup@v4 - uses: pnpm/action-setup@v4
@@ -16,16 +25,19 @@ jobs:
version: 9 version: 9
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version: 22 node-version: 20
cache: 'pnpm' - name: Get pnpm store directory
id: pnpm-cache
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> "${GITEA_OUTPUT:-$GITHUB_OUTPUT}"
- uses: actions/cache@v4
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies - name: Install dependencies
run: pnpm install run: pnpm install --frozen-lockfile --prefer-offline
- name: Lint Frontend - name: Lint ${{ matrix.component }}
if: success() || failure() run: pnpm -F @memegoat/${{ matrix.component }} lint
run: pnpm -F @memegoat/frontend lint
- name: Lint Backend
if: success() || failure()
run: pnpm -F @memegoat/backend lint
- name: Lint Documentation
if: success() || failure()
run: pnpm -F @bypass/documentation lint

225
.output.txt Normal file
View File

@@ -0,0 +1,225 @@
{
"name": "@memegoat/source",
"version": "0.0.1",
"description": "",
"scripts": {
"build": "pnpm run build:back && pnpm run build:front && pnpm run build:docs",
"build:front": "pnpm run -F @memegoat/frontend build",
"build:back": "pnpm run -F @memegoat/backend build",
"build:docs": "pnpm run -F @memegoat/documentation build",
"lint": "pnpm run lint:back && pnpm run lint:front && pnpm run lint:docs",
"lint:back": "pnpm run -F @memegoat/backend lint",
"lint:front": "pnpm run -F @memegoat/frontend lint",
"lint:docs": "pnpm run -F @memegoat/documentation lint",
"test": "pnpm run test:back && pnpm run test:front",
"test:back": "pnpm run -F @memegoat/backend test",
"test:front": "pnpm run -F @memegoat/frontend test",
"format": "pnpm run format:back && pnpm run format:front && pnpm run format:docs",
"format:back": "pnpm run -F @memegoat/backend format",
"format:front": "pnpm run -F @memegoat/frontend format",
"format:docs": "pnpm run -F @memegoat/documentation format",
"upgrade": "pnpm dlx taze minor"
},
"keywords": [],
"author": {
"name": "Mathis HERRIOT",
"email": "mherriot.pro@proton.me",
"role": "Author"
},
"license": "AGPL-3.0-only",
"devDependencies": {
"@biomejs/biome": "2.3.11"
}
}
{
"name": "@memegoat/backend",
"version": "0.0.1",
"description": "",
"author": "",
"private": true,
"license": "UNLICENSED",
"files": [
"dist",
".migrations",
"drizzle.config.ts"
],
"scripts": {
"build": "nest build",
"lint": "biome check",
"lint:write": "biome check --write",
"format": "biome format --write",
"start": "nest start",
"start:dev": "nest start --watch",
"start:debug": "nest start --debug --watch",
"start:prod": "node dist/main",
"test": "jest",
"test:watch": "jest --watch",
"test:cov": "jest --coverage",
"test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand",
"test:e2e": "jest --config ./test/jest-e2e.json",
"db:generate": "drizzle-kit generate",
"db:migrate": "drizzle-kit migrate",
"db:studio": "drizzle-kit studio"
},
"dependencies": {
"@nestjs-modules/mailer": "^2.0.2",
"@nestjs/cache-manager": "^3.1.0",
"@nestjs/common": "^11.0.1",
"@nestjs/config": "^4.0.2",
"@nestjs/core": "^11.0.1",
"@nestjs/mapped-types": "^2.1.0",
"@nestjs/platform-express": "^11.0.1",
"@nestjs/schedule": "^6.1.0",
"@nestjs/throttler": "^6.5.0",
"@noble/post-quantum": "^0.5.4",
"@node-rs/argon2": "^2.0.2",
"@sentry/nestjs": "^10.32.1",
"@sentry/profiling-node": "^10.32.1",
"cache-manager": "^7.2.7",
"cache-manager-redis-yet": "^5.1.5",
"clamscan": "^2.4.0",
"class-transformer": "^0.5.1",
"class-validator": "^0.14.3",
"dotenv": "^17.2.3",
"drizzle-orm": "^0.45.1",
"fluent-ffmpeg": "^2.1.3",
"helmet": "^8.1.0",
"iron-session": "^8.0.4",
"jose": "^6.1.3",
"minio": "^8.0.6",
"nodemailer": "^7.0.12",
"otplib": "^12.0.1",
"pg": "^8.16.3",
"qrcode": "^1.5.4",
"reflect-metadata": "^0.2.2",
"rxjs": "^7.8.1",
"sharp": "^0.34.5",
"uuid": "^13.0.0",
"zod": "^4.3.5",
"drizzle-kit": "^0.31.8"
},
"devDependencies": {
"@nestjs/cli": "^11.0.0",
"globals": "^16.0.0",
"jest": "^30.0.0",
"source-map-support": "^0.5.21",
"supertest": "^7.0.0",
"ts-jest": "^29.2.5",
"ts-loader": "^9.5.2",
"ts-node": "^10.9.2",
"tsconfig-paths": "^4.2.0",
"tsx": "^4.21.0",
"typescript": "^5.7.3",
"typescript-eslint": "^8.20.0",
"@nestjs/schematics": "^11.0.0",
"@nestjs/testing": "^11.0.1",
"@types/express": "^5.0.0",
"@types/fluent-ffmpeg": "^2.1.28",
"@types/jest": "^30.0.0",
"@types/multer": "^2.0.0",
"@types/node": "^22.10.7",
"@types/nodemailer": "^7.0.4",
"@types/pg": "^8.16.0",
"@types/qrcode": "^1.5.6",
"@types/sharp": "^0.32.0",
"@types/supertest": "^6.0.2",
"@types/uuid": "^11.0.0",
"drizzle-kit": "^0.31.8"
},
"jest": {
"moduleFileExtensions": [
"js",
"json",
"ts"
],
"rootDir": "src",
"testRegex": ".*\\.spec\\.ts$",
"collectCoverageFrom": [
"**/*.(t|j)s"
],
"coverageDirectory": "../coverage",
"testEnvironment": "node",
"transformIgnorePatterns": [
"node_modules/(?!(.pnpm/)?(jose|@noble|uuid)/)"
],
"transform": {
"^.+\\.(t|j)sx?$": "ts-jest"
},
"moduleNameMapper": {
"^@noble/post-quantum/(.*)$": "<rootDir>/../node_modules/@noble/post-quantum/$1",
"^@noble/hashes/(.*)$": "<rootDir>/../node_modules/@noble/hashes/$1"
}
}
}
{
"name": "@memegoat/frontend",
"version": "0.0.1",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "biome check",
"format": "biome format --write"
},
"dependencies": {
"@hookform/resolvers": "^5.2.2",
"@radix-ui/react-accordion": "^1.2.12",
"@radix-ui/react-alert-dialog": "^1.1.15",
"@radix-ui/react-aspect-ratio": "^1.1.8",
"@radix-ui/react-avatar": "^1.1.11",
"@radix-ui/react-checkbox": "^1.3.3",
"@radix-ui/react-collapsible": "^1.1.12",
"@radix-ui/react-context-menu": "^2.2.16",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-dropdown-menu": "^2.1.16",
"@radix-ui/react-hover-card": "^1.1.15",
"@radix-ui/react-label": "^2.1.8",
"@radix-ui/react-menubar": "^1.1.16",
"@radix-ui/react-navigation-menu": "^1.2.14",
"@radix-ui/react-popover": "^1.1.15",
"@radix-ui/react-progress": "^1.1.8",
"@radix-ui/react-radio-group": "^1.3.8",
"@radix-ui/react-scroll-area": "^1.2.10",
"@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-separator": "^1.1.8",
"@radix-ui/react-slider": "^1.3.6",
"@radix-ui/react-slot": "^1.2.4",
"@radix-ui/react-switch": "^1.2.6",
"@radix-ui/react-tabs": "^1.1.13",
"@radix-ui/react-toggle": "^1.1.10",
"@radix-ui/react-toggle-group": "^1.1.11",
"@radix-ui/react-tooltip": "^1.2.8",
"axios": "^1.13.2",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"cmdk": "^1.1.1",
"date-fns": "^4.1.0",
"embla-carousel-react": "^8.6.0",
"input-otp": "^1.4.2",
"lucide-react": "^0.562.0",
"next": "16.1.1",
"next-themes": "^0.4.6",
"react": "19.2.3",
"react-day-picker": "^9.13.0",
"react-dom": "19.2.3",
"react-hook-form": "^7.71.1",
"react-resizable-panels": "^4.4.1",
"recharts": "2.15.4",
"sonner": "^2.0.7",
"tailwind-merge": "^3.4.0",
"vaul": "^1.1.2",
"zod": "^4.3.5"
},
"devDependencies": {
"@biomejs/biome": "2.3.11",
"@tailwindcss/postcss": "^4",
"@types/node": "^20",
"@types/react": "^19",
"@types/react-dom": "^19",
"babel-plugin-react-compiler": "1.0.0",
"tailwindcss": "^4",
"tw-animate-css": "^1.4.0",
"typescript": "^5"
}
}

50
ROADMAP.md Normal file
View File

@@ -0,0 +1,50 @@
# 🐐 Memegoat - Roadmap & Critères de Production
Ce document définit les objectifs, les critères techniques et les fonctionnalités à atteindre pour que le projet Memegoat soit considéré comme prêt pour la production et conforme aux normes européennes (RGPD) et françaises.
## 1. 🏗️ Architecture & Infrastructure
- [x] Backend NestJS (TypeScript)
- [x] Base de données PostgreSQL avec Drizzle ORM
- [x] Stockage d'objets compatible S3 (MinIO)
- [x] Service d'Emailing (Nodemailer / SMTPS)
- [x] Documentation Technique & Référence API (`docs.memegoat.fr`)
- [x] Health Checks (`/health`)
- [x] Gestion des variables d'environnement (Validation avec Zod)
- [ ] CI/CD (Build, Lint, Test, Deploy)
## 2. 🔐 Sécurité & Authentification
- [x] Hachage des mots de passe (Argon2id)
- [x] Gestion des sessions robuste (JWT avec Refresh Token et Rotation)
- [x] RBAC (Role Based Access Control) fonctionnel
- [x] Système de Clés API (Hachées en base)
- [x] Double Authentification (2FA / TOTP)
- [x] Limitation de débit (Rate Limiting / Throttler)
- [x] Validation stricte des entrées (DTOs + ValidationPipe)
- [x] Protection contre les vulnérabilités OWASP (Helmet, CORS)
## 3. ⚖️ Conformité RGPD (EU & France)
- [x] Chiffrement natif des données personnelles (PII) via PGP (pgcrypto)
- [x] Hachage aveugle (Blind Indexing) pour l'email (recherche/unicité)
- [x] Journalisation d'audit complète (Audit Logs) pour les actions sensibles
- [x] Gestion du consentement (Versionnage CGU/Politique de Confidentialité)
- [x] Droit à l'effacement : Flux de suppression (Soft Delete -> Purge définitive)
- [x] Droit à la portabilité : Export des données utilisateur (JSON)
- [x] Purge automatique des données obsolètes (Signalements, Sessions expirées)
- [x] Anonymisation des adresses IP (Hachage) dans les logs
## 4. 🖼️ Fonctionnalités Coeur (Media & Galerie)
- [x] Exploration (Trends, Recent, Favoris)
- [x] Recherche par Tags, Catégories, Auteur, Texte
- [x] Gestion des Favoris
- [x] Upload sécurisé via S3 (URLs présignées)
- [x] Scan Antivirus (ClamAV) et traitement des médias (WebP, WebM, AVIF, AV1)
- [x] Limitation de la taille et des formats de fichiers entrants (Configurable)
- [x] Système de Signalement (Reports) et workflow de modération
- [ ] SEO : Metatags dynamiques et slugs sémantiques
## 5. ✅ Qualité & Robustesse
- [ ] Couverture de tests unitaires (Jest) > 80%
- [ ] Tests d'intégration et E2E
- [x] Gestion centralisée des erreurs (Filters NestJS)
- [ ] Monitoring et centralisation des logs (ex: Sentry, ELK/Loki)
- [ ] Performance : Cache (Redis) pour les tendances et recherches fréquentes

View File

@@ -1,4 +1,5 @@
FROM node:22-slim AS base # syntax=docker/dockerfile:1
FROM node:22-alpine AS base
ENV PNPM_HOME="/pnpm" ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH" ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable && corepack prepare pnpm@latest --activate RUN corepack enable && corepack prepare pnpm@latest --activate
@@ -9,10 +10,17 @@ COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./
COPY backend/package.json ./backend/ COPY backend/package.json ./backend/
COPY frontend/package.json ./frontend/ COPY frontend/package.json ./frontend/
COPY documentation/package.json ./documentation/ COPY documentation/package.json ./documentation/
RUN pnpm install --no-frozen-lockfile
# Utilisation du cache pour pnpm et installation figée
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
COPY . . COPY . .
# On réinstalle après COPY pour s'assurer que tous les scripts de cycle de vie et les liens sont corrects
RUN pnpm install --no-frozen-lockfile # Deuxième passe avec cache pour les scripts/liens
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
RUN pnpm run --filter @memegoat/backend build RUN pnpm run --filter @memegoat/backend build
RUN pnpm deploy --filter=@memegoat/backend --prod --legacy /app RUN pnpm deploy --filter=@memegoat/backend --prod --legacy /app
RUN cp -r backend/dist /app/dist RUN cp -r backend/dist /app/dist

View File

@@ -1,6 +1,6 @@
{ {
"name": "@memegoat/backend", "name": "@memegoat/backend",
"version": "0.0.1", "version": "0.0.0",
"description": "", "description": "",
"author": "", "author": "",
"private": true, "private": true,

View File

@@ -12,6 +12,7 @@ import { AuthModule } from "./auth/auth.module";
import { CategoriesModule } from "./categories/categories.module"; import { CategoriesModule } from "./categories/categories.module";
import { CommonModule } from "./common/common.module"; import { CommonModule } from "./common/common.module";
import { CrawlerDetectionMiddleware } from "./common/middlewares/crawler-detection.middleware"; import { CrawlerDetectionMiddleware } from "./common/middlewares/crawler-detection.middleware";
import { HTTPLoggerMiddleware } from "./common/middlewares/http-logger.middleware";
import { validateEnv } from "./config/env.schema"; import { validateEnv } from "./config/env.schema";
import { ContentsModule } from "./contents/contents.module"; import { ContentsModule } from "./contents/contents.module";
import { CryptoModule } from "./crypto/crypto.module"; import { CryptoModule } from "./crypto/crypto.module";
@@ -76,6 +77,8 @@ import { UsersModule } from "./users/users.module";
}) })
export class AppModule implements NestModule { export class AppModule implements NestModule {
configure(consumer: MiddlewareConsumer) { configure(consumer: MiddlewareConsumer) {
consumer.apply(CrawlerDetectionMiddleware).forRoutes("*"); consumer
.apply(HTTPLoggerMiddleware, CrawlerDetectionMiddleware)
.forRoutes("*");
} }
} }

View File

@@ -110,6 +110,7 @@ export class AuthService {
const user = await this.usersService.findByEmailHash(emailHash); const user = await this.usersService.findByEmailHash(emailHash);
if (!user) { if (!user) {
this.logger.warn(`Login failed: user not found for email hash`);
throw new UnauthorizedException("Invalid credentials"); throw new UnauthorizedException("Invalid credentials");
} }
@@ -119,10 +120,12 @@ export class AuthService {
); );
if (!isPasswordValid) { if (!isPasswordValid) {
this.logger.warn(`Login failed: invalid password for user ${user.uuid}`);
throw new UnauthorizedException("Invalid credentials"); throw new UnauthorizedException("Invalid credentials");
} }
if (user.isTwoFactorEnabled) { if (user.isTwoFactorEnabled) {
this.logger.log(`2FA required for user ${user.uuid}`);
return { return {
message: "2FA required", message: "2FA required",
requires2FA: true, requires2FA: true,
@@ -141,6 +144,7 @@ export class AuthService {
ip, ip,
); );
this.logger.log(`User ${user.uuid} logged in successfully`);
return { return {
message: "User logged in successfully", message: "User logged in successfully",
access_token: accessToken, access_token: accessToken,
@@ -165,6 +169,9 @@ export class AuthService {
const isValid = authenticator.verify({ token, secret }); const isValid = authenticator.verify({ token, secret });
if (!isValid) { if (!isValid) {
this.logger.warn(
`2FA verification failed for user ${userId}: invalid token`,
);
throw new UnauthorizedException("Invalid 2FA token"); throw new UnauthorizedException("Invalid 2FA token");
} }
@@ -179,6 +186,7 @@ export class AuthService {
ip, ip,
); );
this.logger.log(`User ${userId} logged in successfully via 2FA`);
return { return {
message: "User logged in successfully (2FA)", message: "User logged in successfully (2FA)",
access_token: accessToken, access_token: accessToken,

View File

@@ -9,6 +9,14 @@ import {
import * as Sentry from "@sentry/nestjs"; import * as Sentry from "@sentry/nestjs";
import { Request, Response } from "express"; import { Request, Response } from "express";
interface RequestWithUser extends Request {
user?: {
sub?: string;
username?: string;
id?: string;
};
}
@Catch() @Catch()
export class AllExceptionsFilter implements ExceptionFilter { export class AllExceptionsFilter implements ExceptionFilter {
private readonly logger = new Logger("ExceptionFilter"); private readonly logger = new Logger("ExceptionFilter");
@@ -16,7 +24,7 @@ export class AllExceptionsFilter implements ExceptionFilter {
catch(exception: unknown, host: ArgumentsHost) { catch(exception: unknown, host: ArgumentsHost) {
const ctx = host.switchToHttp(); const ctx = host.switchToHttp();
const response = ctx.getResponse<Response>(); const response = ctx.getResponse<Response>();
const request = ctx.getRequest<Request>(); const request = ctx.getRequest<RequestWithUser>();
const status = const status =
exception instanceof HttpException exception instanceof HttpException
@@ -28,6 +36,9 @@ export class AllExceptionsFilter implements ExceptionFilter {
? exception.getResponse() ? exception.getResponse()
: "Internal server error"; : "Internal server error";
const userId = request.user?.sub || request.user?.id;
const userPart = userId ? `[User: ${userId}] ` : "";
const errorResponse = { const errorResponse = {
statusCode: status, statusCode: status,
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
@@ -42,12 +53,12 @@ export class AllExceptionsFilter implements ExceptionFilter {
if (status === HttpStatus.INTERNAL_SERVER_ERROR) { if (status === HttpStatus.INTERNAL_SERVER_ERROR) {
Sentry.captureException(exception); Sentry.captureException(exception);
this.logger.error( this.logger.error(
`${request.method} ${request.url} - Error: ${exception instanceof Error ? exception.message : "Unknown error"}`, `${userPart}${request.method} ${request.url} - Error: ${exception instanceof Error ? exception.message : "Unknown error"}`,
exception instanceof Error ? exception.stack : "", exception instanceof Error ? exception.stack : "",
); );
} else { } else {
this.logger.warn( this.logger.warn(
`${request.method} ${request.url} - Status: ${status} - Message: ${JSON.stringify(message)}`, `${userPart}${request.method} ${request.url} - Status: ${status} - Message: ${JSON.stringify(message)}`,
); );
} }

View File

@@ -33,4 +33,6 @@ export interface IStorageService {
sourceBucketName?: string, sourceBucketName?: string,
destinationBucketName?: string, destinationBucketName?: string,
): Promise<string>; ): Promise<string>;
getPublicUrl(storageKey: string): string;
} }

View File

@@ -0,0 +1,37 @@
import { createHash } from "node:crypto";
import { Injectable, Logger, NestMiddleware } from "@nestjs/common";
import { NextFunction, Request, Response } from "express";
@Injectable()
export class HTTPLoggerMiddleware implements NestMiddleware {
private readonly logger = new Logger("HTTP");
use(request: Request, response: Response, next: NextFunction): void {
const { method, originalUrl, ip } = request;
const userAgent = request.get("user-agent") || "";
const startTime = Date.now();
response.on("finish", () => {
const { statusCode } = response;
const contentLength = response.get("content-length");
const duration = Date.now() - startTime;
const hashedIp = createHash("sha256")
.update(ip as string)
.digest("hex");
const message = `${method} ${originalUrl} ${statusCode} ${contentLength || 0} - ${userAgent} ${hashedIp} +${duration}ms`;
if (statusCode >= 500) {
return this.logger.error(message);
}
if (statusCode >= 400) {
return this.logger.warn(message);
}
return this.logger.log(message);
});
next();
}
}

View File

@@ -33,6 +33,7 @@ export const envSchema = z.object({
MAIL_FROM: z.string().email(), MAIL_FROM: z.string().email(),
DOMAIN_NAME: z.string(), DOMAIN_NAME: z.string(),
API_URL: z.string().url().optional(),
// Sentry // Sentry
SENTRY_DSN: z.string().optional(), SENTRY_DSN: z.string().optional(),

View File

@@ -30,6 +30,7 @@ describe("ContentsService", () => {
const mockS3Service = { const mockS3Service = {
getUploadUrl: jest.fn(), getUploadUrl: jest.fn(),
uploadFile: jest.fn(), uploadFile: jest.fn(),
getPublicUrl: jest.fn(),
}; };
const mockMediaService = { const mockMediaService = {

View File

@@ -100,6 +100,7 @@ export class ContentsService {
// 3. Upload vers S3 // 3. Upload vers S3
const key = `contents/${userId}/${Date.now()}-${uuidv4()}.${processed.extension}`; const key = `contents/${userId}/${Date.now()}-${uuidv4()}.${processed.extension}`;
await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType); await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType);
this.logger.log(`File uploaded successfully to S3: ${key}`);
// 4. Création en base de données // 4. Création en base de données
return await this.create(userId, { return await this.create(userId, {
@@ -128,11 +129,11 @@ export class ContentsService {
const processedData = data.map((content) => ({ const processedData = data.map((content) => ({
...content, ...content,
url: this.getFileUrl(content.storageKey), url: this.s3Service.getPublicUrl(content.storageKey),
author: { author: {
...content.author, ...content.author,
avatarUrl: content.author?.avatarUrl avatarUrl: content.author?.avatarUrl
? this.getFileUrl(content.author.avatarUrl) ? this.s3Service.getPublicUrl(content.author.avatarUrl)
: null, : null,
}, },
})); }));
@@ -189,18 +190,18 @@ export class ContentsService {
return { return {
...content, ...content,
url: this.getFileUrl(content.storageKey), url: this.s3Service.getPublicUrl(content.storageKey),
author: { author: {
...content.author, ...content.author,
avatarUrl: content.author?.avatarUrl avatarUrl: content.author?.avatarUrl
? this.getFileUrl(content.author.avatarUrl) ? this.s3Service.getPublicUrl(content.author.avatarUrl)
: null, : null,
}, },
}; };
} }
generateBotHtml(content: { title: string; storageKey: string }): string { generateBotHtml(content: { title: string; storageKey: string }): string {
const imageUrl = this.getFileUrl(content.storageKey); const imageUrl = this.s3Service.getPublicUrl(content.storageKey);
return `<!DOCTYPE html> return `<!DOCTYPE html>
<html> <html>
<head> <head>
@@ -221,19 +222,6 @@ export class ContentsService {
</html>`; </html>`;
} }
getFileUrl(storageKey: string): string {
const endpoint = this.configService.get("S3_ENDPOINT");
const port = this.configService.get("S3_PORT");
const protocol =
this.configService.get("S3_USE_SSL") === true ? "https" : "http";
const bucket = this.configService.get("S3_BUCKET_NAME");
if (endpoint === "localhost" || endpoint === "127.0.0.1") {
return `${protocol}://${endpoint}:${port}/${bucket}/${storageKey}`;
}
return `${protocol}://${endpoint}/${bucket}/${storageKey}`;
}
private generateSlug(text: string): string { private generateSlug(text: string): string {
return text return text
.toLowerCase() .toLowerCase()

View File

@@ -0,0 +1,61 @@
import { Readable } from "node:stream";
import { NotFoundException } from "@nestjs/common";
import { Test, TestingModule } from "@nestjs/testing";
import type { Response } from "express";
import { S3Service } from "../s3/s3.service";
import { MediaController } from "./media.controller";
describe("MediaController", () => {
let controller: MediaController;
const mockS3Service = {
getFileInfo: jest.fn(),
getFile: jest.fn(),
};
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
controllers: [MediaController],
providers: [{ provide: S3Service, useValue: mockS3Service }],
}).compile();
controller = module.get<MediaController>(MediaController);
});
it("should be defined", () => {
expect(controller).toBeDefined();
});
describe("getFile", () => {
it("should stream the file and set headers with path containing slashes", async () => {
const res = {
setHeader: jest.fn(),
} as unknown as Response;
const stream = new Readable();
stream.pipe = jest.fn();
const key = "contents/user-id/test.webp";
mockS3Service.getFileInfo.mockResolvedValue({
size: 100,
metaData: { "content-type": "image/webp" },
});
mockS3Service.getFile.mockResolvedValue(stream);
await controller.getFile(key, res);
expect(mockS3Service.getFileInfo).toHaveBeenCalledWith(key);
expect(res.setHeader).toHaveBeenCalledWith("Content-Type", "image/webp");
expect(res.setHeader).toHaveBeenCalledWith("Content-Length", 100);
expect(stream.pipe).toHaveBeenCalledWith(res);
});
it("should throw NotFoundException if file is not found", async () => {
mockS3Service.getFileInfo.mockRejectedValue(new Error("Not found"));
const res = {} as unknown as Response;
await expect(controller.getFile("invalid", res)).rejects.toThrow(
NotFoundException,
);
});
});
});

View File

@@ -0,0 +1,30 @@
import { Controller, Get, NotFoundException, Param, Res } from "@nestjs/common";
import type { Response } from "express";
import type { BucketItemStat } from "minio";
import { S3Service } from "../s3/s3.service";
@Controller("media")
export class MediaController {
constructor(private readonly s3Service: S3Service) {}
@Get("*key")
async getFile(@Param("key") key: string, @Res() res: Response) {
try {
const stats = (await this.s3Service.getFileInfo(key)) as BucketItemStat;
const stream = await this.s3Service.getFile(key);
const contentType =
stats.metaData?.["content-type"] ||
stats.metadata?.["content-type"] ||
"application/octet-stream";
res.setHeader("Content-Type", contentType);
res.setHeader("Content-Length", stats.size);
res.setHeader("Cache-Control", "public, max-age=31536000, immutable");
stream.pipe(res);
} catch (_error) {
throw new NotFoundException("Fichier non trouvé");
}
}
}

View File

@@ -1,9 +1,13 @@
import { Module } from "@nestjs/common"; import { Module } from "@nestjs/common";
import { S3Module } from "../s3/s3.module";
import { MediaController } from "./media.controller";
import { MediaService } from "./media.service"; import { MediaService } from "./media.service";
import { ImageProcessorStrategy } from "./strategies/image-processor.strategy"; import { ImageProcessorStrategy } from "./strategies/image-processor.strategy";
import { VideoProcessorStrategy } from "./strategies/video-processor.strategy"; import { VideoProcessorStrategy } from "./strategies/video-processor.strategy";
@Module({ @Module({
imports: [S3Module],
controllers: [MediaController],
providers: [MediaService, ImageProcessorStrategy, VideoProcessorStrategy], providers: [MediaService, ImageProcessorStrategy, VideoProcessorStrategy],
exports: [MediaService], exports: [MediaService],
}) })

View File

@@ -7,7 +7,7 @@ jest.mock("minio");
describe("S3Service", () => { describe("S3Service", () => {
let service: S3Service; let service: S3Service;
let _configService: ConfigService; let configService: ConfigService;
// biome-ignore lint/suspicious/noExplicitAny: Fine for testing purposes // biome-ignore lint/suspicious/noExplicitAny: Fine for testing purposes
let minioClient: any; let minioClient: any;
@@ -42,7 +42,7 @@ describe("S3Service", () => {
}).compile(); }).compile();
service = module.get<S3Service>(S3Service); service = module.get<S3Service>(S3Service);
_configService = module.get<ConfigService>(ConfigService); configService = module.get<ConfigService>(ConfigService);
}); });
it("should be defined", () => { it("should be defined", () => {
@@ -185,35 +185,39 @@ describe("S3Service", () => {
}); });
}); });
describe("moveFile", () => { describe("getPublicUrl", () => {
it("should move file within default bucket", async () => { it("should use API_URL if provided", () => {
const source = "source.txt"; (configService.get as jest.Mock).mockImplementation((key: string) => {
const dest = "dest.txt"; if (key === "API_URL") return "https://api.test.com";
await service.moveFile(source, dest); return null;
});
expect(minioClient.copyObject).toHaveBeenCalledWith( const url = service.getPublicUrl("test.webp");
"memegoat", expect(url).toBe("https://api.test.com/media/test.webp");
dest,
"/memegoat/source.txt",
expect.any(Minio.CopyConditions),
);
expect(minioClient.removeObject).toHaveBeenCalledWith("memegoat", source);
}); });
it("should move file between different buckets", async () => { it("should use DOMAIN_NAME and PORT for localhost", () => {
const source = "source.txt"; (configService.get as jest.Mock).mockImplementation(
const dest = "dest.txt"; (key: string, def: unknown) => {
const sBucket = "source-bucket"; if (key === "API_URL") return null;
const dBucket = "dest-bucket"; if (key === "DOMAIN_NAME") return "localhost";
await service.moveFile(source, dest, sBucket, dBucket); if (key === "PORT") return 3000;
return def;
expect(minioClient.copyObject).toHaveBeenCalledWith( },
dBucket,
dest,
`/${sBucket}/${source}`,
expect.any(Minio.CopyConditions),
); );
expect(minioClient.removeObject).toHaveBeenCalledWith(sBucket, source); const url = service.getPublicUrl("test.webp");
expect(url).toBe("http://localhost:3000/media/test.webp");
});
it("should use api.DOMAIN_NAME for production", () => {
(configService.get as jest.Mock).mockImplementation(
(key: string, def: unknown) => {
if (key === "API_URL") return null;
if (key === "DOMAIN_NAME") return "memegoat.fr";
return def;
},
);
const url = service.getPublicUrl("test.webp");
expect(url).toBe("https://api.memegoat.fr/media/test.webp");
}); });
}); });
}); });

View File

@@ -54,6 +54,7 @@ export class S3Service implements OnModuleInit, IStorageService {
...metaData, ...metaData,
"Content-Type": mimeType, "Content-Type": mimeType,
}); });
this.logger.log(`File uploaded successfully: ${fileName} to ${bucketName}`);
return fileName; return fileName;
} catch (error) { } catch (error) {
this.logger.error(`Error uploading file to ${bucketName}: ${error.message}`); this.logger.error(`Error uploading file to ${bucketName}: ${error.message}`);
@@ -113,6 +114,7 @@ export class S3Service implements OnModuleInit, IStorageService {
async deleteFile(fileName: string, bucketName: string = this.bucketName) { async deleteFile(fileName: string, bucketName: string = this.bucketName) {
try { try {
await this.minioClient.removeObject(bucketName, fileName); await this.minioClient.removeObject(bucketName, fileName);
this.logger.log(`File deleted successfully: ${fileName} from ${bucketName}`);
} catch (error) { } catch (error) {
this.logger.error( this.logger.error(
`Error deleting file from ${bucketName}: ${error.message}`, `Error deleting file from ${bucketName}: ${error.message}`,
@@ -155,4 +157,22 @@ export class S3Service implements OnModuleInit, IStorageService {
throw error; throw error;
} }
} }
getPublicUrl(storageKey: string): string {
const apiUrl = this.configService.get<string>("API_URL");
const domain = this.configService.get<string>("DOMAIN_NAME", "localhost");
const port = this.configService.get<number>("PORT", 3000);
let baseUrl: string;
if (apiUrl) {
baseUrl = apiUrl.replace(/\/$/, "");
} else if (domain === "localhost" || domain === "127.0.0.1") {
baseUrl = `http://${domain}:${port}`;
} else {
baseUrl = `https://api.${domain}`;
}
return `${baseUrl}/media/${storageKey}`;
}
} }

View File

@@ -58,6 +58,7 @@ describe("UsersService", () => {
const mockS3Service = { const mockS3Service = {
uploadFile: jest.fn(), uploadFile: jest.fn(),
getPublicUrl: jest.fn(),
}; };
const mockConfigService = { const mockConfigService = {

View File

@@ -6,7 +6,6 @@ import {
Injectable, Injectable,
Logger, Logger,
} from "@nestjs/common"; } from "@nestjs/common";
import { ConfigService } from "@nestjs/config";
import type { Cache } from "cache-manager"; import type { Cache } from "cache-manager";
import { v4 as uuidv4 } from "uuid"; import { v4 as uuidv4 } from "uuid";
import { RbacService } from "../auth/rbac.service"; import { RbacService } from "../auth/rbac.service";
@@ -28,7 +27,6 @@ export class UsersService {
private readonly rbacService: RbacService, private readonly rbacService: RbacService,
@Inject(MediaService) private readonly mediaService: IMediaService, @Inject(MediaService) private readonly mediaService: IMediaService,
@Inject(S3Service) private readonly s3Service: IStorageService, @Inject(S3Service) private readonly s3Service: IStorageService,
private readonly configService: ConfigService,
) {} ) {}
private async clearUserCache(username?: string) { private async clearUserCache(username?: string) {
@@ -60,7 +58,9 @@ export class UsersService {
return { return {
...user, ...user,
avatarUrl: user.avatarUrl ? this.getFileUrl(user.avatarUrl) : null, avatarUrl: user.avatarUrl
? this.s3Service.getPublicUrl(user.avatarUrl)
: null,
role: roles.includes("admin") ? "admin" : "user", role: roles.includes("admin") ? "admin" : "user",
roles, roles,
}; };
@@ -74,7 +74,9 @@ export class UsersService {
const processedData = data.map((user) => ({ const processedData = data.map((user) => ({
...user, ...user,
avatarUrl: user.avatarUrl ? this.getFileUrl(user.avatarUrl) : null, avatarUrl: user.avatarUrl
? this.s3Service.getPublicUrl(user.avatarUrl)
: null,
})); }));
return { data: processedData, totalCount }; return { data: processedData, totalCount };
@@ -86,7 +88,9 @@ export class UsersService {
return { return {
...user, ...user,
avatarUrl: user.avatarUrl ? this.getFileUrl(user.avatarUrl) : null, avatarUrl: user.avatarUrl
? this.s3Service.getPublicUrl(user.avatarUrl)
: null,
}; };
} }
@@ -139,6 +143,7 @@ export class UsersService {
// 3. Upload vers S3 // 3. Upload vers S3
const key = `avatars/${uuid}/${Date.now()}-${uuidv4()}.${processed.extension}`; const key = `avatars/${uuid}/${Date.now()}-${uuidv4()}.${processed.extension}`;
await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType); await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType);
this.logger.log(`Avatar uploaded successfully to S3: ${key}`);
// 4. Mise à jour de la base de données // 4. Mise à jour de la base de données
const user = await this.update(uuid, { avatarUrl: key }); const user = await this.update(uuid, { avatarUrl: key });
@@ -193,17 +198,4 @@ export class UsersService {
async remove(uuid: string) { async remove(uuid: string) {
return await this.usersRepository.softDeleteUserAndContents(uuid); return await this.usersRepository.softDeleteUserAndContents(uuid);
} }
private getFileUrl(storageKey: string): string {
const endpoint = this.configService.get("S3_ENDPOINT");
const port = this.configService.get("S3_PORT");
const protocol =
this.configService.get("S3_USE_SSL") === true ? "https" : "http";
const bucket = this.configService.get("S3_BUCKET_NAME");
if (endpoint === "localhost" || endpoint === "127.0.0.1") {
return `${protocol}://${endpoint}:${port}/${bucket}/${storageKey}`;
}
return `${protocol}://${endpoint}/${bucket}/${storageKey}`;
}
} }

View File

@@ -1,4 +1,4 @@
# syntax=docker.io/docker/dockerfile:1 # syntax=docker/dockerfile:1
FROM node:22-alpine AS base FROM node:22-alpine AS base
ENV PNPM_HOME="/pnpm" ENV PNPM_HOME="/pnpm"
@@ -11,11 +11,20 @@ COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./
COPY backend/package.json ./backend/ COPY backend/package.json ./backend/
COPY frontend/package.json ./frontend/ COPY frontend/package.json ./frontend/
COPY documentation/package.json ./documentation/ COPY documentation/package.json ./documentation/
RUN pnpm install --no-frozen-lockfile
# Montage du cache pnpm
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
COPY . . COPY . .
# On réinstalle après COPY pour s'assurer que tous les scripts de cycle de vie et les liens sont corrects
RUN pnpm install --no-frozen-lockfile # Deuxième passe avec cache pour les scripts/liens
RUN pnpm run --filter @memegoat/documentation build RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
# Build avec cache Next.js
RUN --mount=type=cache,id=next-docs-cache,target=/usr/src/app/documentation/.next/cache \
pnpm run --filter @memegoat/documentation build
FROM node:22-alpine AS runner FROM node:22-alpine AS runner
WORKDIR /app WORKDIR /app

View File

@@ -1,4 +1,4 @@
# syntax=docker.io/docker/dockerfile:1 # syntax=docker/dockerfile:1
FROM node:22-alpine AS base FROM node:22-alpine AS base
ENV PNPM_HOME="/pnpm" ENV PNPM_HOME="/pnpm"
@@ -11,11 +11,20 @@ COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./
COPY backend/package.json ./backend/ COPY backend/package.json ./backend/
COPY frontend/package.json ./frontend/ COPY frontend/package.json ./frontend/
COPY documentation/package.json ./documentation/ COPY documentation/package.json ./documentation/
RUN pnpm install --no-frozen-lockfile
# Montage du cache pnpm
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
COPY . . COPY . .
# On réinstalle après COPY pour s'assurer que tous les scripts de cycle de vie et les liens sont corrects
RUN pnpm install --no-frozen-lockfile # Deuxième passe avec cache pour les scripts/liens
RUN pnpm run --filter @memegoat/frontend build RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
pnpm install --frozen-lockfile
# Build avec cache Next.js
RUN --mount=type=cache,id=next-cache,target=/usr/src/app/frontend/.next/cache \
pnpm run --filter @memegoat/frontend build
FROM node:22-alpine AS runner FROM node:22-alpine AS runner
WORKDIR /app WORKDIR /app

View File

@@ -1,6 +1,6 @@
{ {
"name": "@memegoat/frontend", "name": "@memegoat/frontend",
"version": "0.0.1", "version": "0.0.0",
"private": true, "private": true,
"scripts": { "scripts": {
"dev": "next dev", "dev": "next dev",

View File

@@ -63,7 +63,7 @@ export default function HelpPage() {
<p className="text-muted-foreground"> <p className="text-muted-foreground">
N'hésitez pas à nous contacter sur nos réseaux sociaux ou par email. N'hésitez pas à nous contacter sur nos réseaux sociaux ou par email.
</p> </p>
<p className="font-semibold text-primary">contact@memegoat.local</p> <p className="font-semibold text-primary">contact@memegoat.fr</p>
</div> </div>
</div> </div>
); );

View File

@@ -228,7 +228,7 @@ export function AppSidebar() {
<span className="truncate font-semibold"> <span className="truncate font-semibold">
{user.displayName || user.username} {user.displayName || user.username}
</span> </span>
<span className="truncate text-xs">{user.email}</span> <span className="truncate text-xs">{user.role}</span>
</div> </div>
<ChevronRight className="ml-auto size-4 group-data-[collapsible=icon]:hidden" /> <ChevronRight className="ml-auto size-4 group-data-[collapsible=icon]:hidden" />
</SidebarMenuButton> </SidebarMenuButton>

30
frontend/todo.md Normal file
View File

@@ -0,0 +1,30 @@
Réalisation du frontend :
# Exigences
- Responsive dans tout les formats tailwindcss
- Accessibilité A11Y
- Implémentation réel uniquement
- Site en français
- SEO parfaitement réalisé, robot.txt, sitemap.xml...
- Utilisation des composants shadcn/ui
- Réalisation d'une page d'erreur customisé
- Utilisation des fonctionalités de NextJS suivantes :
- Nested routes
- Dynamic routes
- Route groups
- Private folders
- Parralel and intercepted routes
- Prefetching pages
- Streaming pages
- Server and Client Components
- Cache Components
- Image optimization
- Incremental Static Regeneration
- Custom hooks
- Axios
Toute l'application est basé sur un système dashboard/sidebar intégrant le routing.
La page principale est la page de navigation du contennu.
En mode desktop nous retrouvons la sidebar à gauche, le contennu en scroll infini au milieu et les paramètres de recherche sur la droite.
En mode mobile la sidebar est replié, les paramètres de recherche sont représenté comme une icône de filtrage flotante en haut à droite

View File

@@ -1,8 +1,10 @@
{ {
"name": "@memegoat/source", "name": "@memegoat/source",
"version": "0.0.1", "version": "0.0.0",
"description": "", "description": "",
"scripts": { "scripts": {
"version:get": "cmake -P version.cmake GET",
"version:set": "cmake -P version.cmake SET",
"build": "pnpm run build:back && pnpm run build:front && pnpm run build:docs", "build": "pnpm run build:back && pnpm run build:front && pnpm run build:docs",
"build:front": "pnpm run -F @memegoat/frontend build", "build:front": "pnpm run -F @memegoat/frontend build",
"build:back": "pnpm run -F @memegoat/backend build", "build:back": "pnpm run -F @memegoat/backend build",

84
version.cmake Normal file
View File

@@ -0,0 +1,84 @@
# version.cmake - Script pour gérer la version SemVer de manière centralisée
# Usage: cmake -P version.cmake [GET|SET] [new_version]
set(PACKAGE_JSON_FILES
"${CMAKE_CURRENT_LIST_DIR}/package.json"
"${CMAKE_CURRENT_LIST_DIR}/backend/package.json"
"${CMAKE_CURRENT_LIST_DIR}/frontend/package.json"
)
# Fonction pour lire la version depuis le package.json racine
function(get_current_version OUT_VAR)
file(READ "${CMAKE_CURRENT_LIST_DIR}/package.json" ROOT_JSON)
string(JSON CURRENT_VERSION GET "${ROOT_JSON}" "version")
set(${OUT_VAR} ${CURRENT_VERSION} PARENT_SCOPE)
endfunction()
# Fonction pour créer un tag git
function(create_git_tag VERSION)
find_package(Git QUIET)
if(GIT_FOUND)
execute_process(
COMMAND ${GIT_EXECUTABLE} tag -a "v${VERSION}" -m "Release v${VERSION}"
WORKING_DIRECTORY "${CMAKE_CURRENT_LIST_DIR}"
RESULT_VARIABLE TAG_RESULT
)
if(TAG_RESULT EQUAL 0)
message(STATUS "Tag v${VERSION} créé avec succès")
else()
message(WARNING "Échec de la création du tag v${VERSION}. Il existe peut-être déjà.")
endif()
else()
message(WARNING "Git non trouvé, impossible de créer le tag.")
endif()
endfunction()
# Fonction pour mettre à jour la version dans tous les fichiers package.json
function(set_new_version NEW_VERSION)
foreach(JSON_FILE ${PACKAGE_JSON_FILES})
if(EXISTS "${JSON_FILE}")
message(STATUS "Mise à jour de ${JSON_FILE} vers la version ${NEW_VERSION}")
file(READ "${JSON_FILE}" CONTENT)
# Utilisation de string(JSON ...) pour modifier la version si disponible (CMake >= 3.19)
# Sinon on peut utiliser une regex simple pour package.json
string(REGEX REPLACE "\"version\": \"[^\"]+\"" "\"version\": \"${NEW_VERSION}\"" NEW_CONTENT "${CONTENT}")
file(WRITE "${JSON_FILE}" "${NEW_CONTENT}")
else()
message(WARNING "Fichier non trouvé: ${JSON_FILE}")
endif()
endforeach()
# Demander à l'utilisateur s'il veut tagger (ou le faire par défaut si spécifié)
create_git_tag(${NEW_VERSION})
endfunction()
# Logique principale
set(ARG_OFFSET 0)
while(ARG_OFFSET LESS CMAKE_ARGC)
if("${CMAKE_ARGV${ARG_OFFSET}}" STREQUAL "-P")
math(EXPR COMMAND_INDEX "${ARG_OFFSET} + 2")
math(EXPR VERSION_INDEX "${ARG_OFFSET} + 3")
break()
endif()
math(EXPR ARG_OFFSET "${ARG_OFFSET} + 1")
endwhile()
if(NOT DEFINED COMMAND_INDEX OR COMMAND_INDEX GREATER_EQUAL CMAKE_ARGC)
message(FATAL_ERROR "Usage: cmake -P version.cmake [GET|SET] [new_version]")
endif()
set(COMMAND "${CMAKE_ARGV${COMMAND_INDEX}}")
if("${COMMAND}" STREQUAL "GET")
get_current_version(VERSION)
message("${VERSION}")
elseif("${COMMAND}" STREQUAL "SET")
if(VERSION_INDEX GREATER_EQUAL CMAKE_ARGC)
message(FATAL_ERROR "Veuillez spécifier la nouvelle version: cmake -P version.cmake SET 0.0.0")
endif()
set(NEW_VERSION "${CMAKE_ARGV${VERSION_INDEX}}")
set_new_version("${NEW_VERSION}")
else()
message(FATAL_ERROR "Commande inconnue: ${COMMAND}. Utilisez GET ou SET.")
endif()