Compare commits
186 Commits
a40bf9a18e
...
v0.0.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
597a4d615e
|
||
|
|
2df45af305
|
||
|
|
863a4bf528
|
||
|
|
9a1cdb05a4
|
||
|
|
28caf92f9a
|
||
|
|
8b2728dc5a
|
||
|
|
3bbbbc307f
|
||
|
|
f080919563
|
||
|
|
edc1ab2438
|
||
|
|
01b66d6f2f
|
||
|
|
9a70dd02bb
|
||
|
|
e285a4e634
|
||
|
|
f247a01ac7
|
||
|
|
bb640cd8f9
|
||
|
|
c1118e9f25
|
||
|
|
eae1f84b92
|
||
|
|
8d27532dc0
|
||
|
|
f79507730e
|
||
|
|
7048c2731e
|
||
|
|
d74fd15036
|
||
|
|
86a697c392
|
||
|
|
38adbb6e77
|
||
| 594a387712 | |||
|
|
4ca15b578d
|
||
| 2912231769 | |||
|
|
db17994bb5
|
||
|
|
f57e028178
|
||
|
|
e84aa8a8db
|
||
|
|
c6b23de481
|
||
|
|
0611ef715c
|
||
|
|
0a1391674f
|
||
|
|
2fedaca502
|
||
|
|
a6837ff7fb
|
||
|
|
74b61004e7
|
||
|
|
760343da76
|
||
|
|
14f8b8b63d
|
||
|
|
50a186da1d
|
||
|
|
3908989b39
|
||
|
|
02d70f27ea
|
||
|
|
65f8860cc0
|
||
|
|
0e9edd4bfc
|
||
|
|
6ce58d1639
|
||
|
|
47d6fcb6a0
|
||
|
|
d7c2a965a0
|
||
|
|
fb7ddde42e
|
||
|
|
026aebaee3
|
||
|
|
a30113e8e2
|
||
| f10c444957 | |||
|
|
975e29dea1
|
||
|
|
a4ce48a91c
|
||
|
|
ff6fc1c6b3
|
||
|
|
5671ba60a6
|
||
|
|
5f2672021e
|
||
| 17c2cea366 | |||
| 5665fcd98f | |||
| cb6d87eafd | |||
| 48ebc7dc36 | |||
| dbfd14b57a | |||
| 570576435c | |||
| 7c3f4050c5 | |||
| c19d86a0cb | |||
| 6d2e1ead05 | |||
| 6756cf6bc7 | |||
| 6aaf53c90b | |||
| ccec39bfa0 | |||
| a06fdbf21e | |||
| de537e5947 | |||
|
|
0cb361afb8
|
||
| 9097a3e9b5 | |||
|
|
24eb99093c
|
||
|
|
75ac95cadb
|
||
|
|
35abd0496e
|
||
|
|
03e5915fcc
|
||
|
|
77ac960411
|
||
|
|
8425ffe4fc
|
||
|
|
b81835661c
|
||
|
|
fbc231dc9a
|
||
|
|
37a23390d5
|
||
|
|
bd9dd140ab
|
||
|
|
5b6e0143b6
|
||
|
|
214bf077e5
|
||
|
|
bb9ae058db
|
||
|
|
0b07320974
|
||
|
|
0c045e8d3c
|
||
|
|
8ffeaeba05
|
||
|
|
9e37272bff
|
||
|
|
7cb5ff487d
|
||
|
|
0cef694f2b
|
||
|
|
5c4badb837
|
||
|
|
b53c51b825
|
||
|
|
76de69fc64
|
||
|
|
ec8eb8d43a
|
||
|
|
514bd354bf
|
||
|
|
9c45bf11e4
|
||
|
|
5a22ad7480
|
||
|
|
9654553940
|
||
|
|
a5a8626f5d
|
||
|
|
64adc80062
|
||
|
|
702868dec2
|
||
|
|
399bdab86c
|
||
|
|
cc2823db7d
|
||
|
|
6254c136d1
|
||
|
|
3828f170e2
|
||
|
|
ec771eb074
|
||
|
|
77263aead9
|
||
|
|
ab74dc3b30
|
||
|
|
acd53eff6a
|
||
|
|
91e23c2c02
|
||
|
|
f508e8ee6d
|
||
|
|
3c02bd6023
|
||
|
|
6e823743fc
|
||
|
|
99a350aa05
|
||
|
|
8b51b84d44
|
||
|
|
0af6f6b52a
|
||
|
|
382e39ebd0
|
||
|
|
65b7cba6b1
|
||
|
|
f7d85108e1
|
||
|
|
d5775a821e
|
||
|
|
add7cab7df
|
||
|
|
da5f18bf92
|
||
|
|
a0836c8392
|
||
|
|
9963046e41
|
||
|
|
dde1bf522f
|
||
|
|
dd875fe1ea
|
||
|
|
92ea36545a
|
||
|
|
912394477b
|
||
|
|
fe309bc1e3
|
||
|
|
342e9b99da
|
||
|
|
e210f1f95f
|
||
|
|
2218768adb
|
||
|
|
705f1ad6e0
|
||
|
|
42805e371e
|
||
|
|
9406ed9350
|
||
|
|
9ab737b8c7
|
||
|
|
b3035eb2ab
|
||
|
|
a6fdbdb06d
|
||
|
|
48b233eae4
|
||
|
|
89bd9d65e7
|
||
|
|
8cf1699717
|
||
| 93b86a6b7a | |||
| 3363ef52ef | |||
| 06d2a65567 | |||
| fd32a14221 | |||
| e3f9197abb | |||
| cee4d41ef0 | |||
|
|
187c51f932
|
||
|
|
c1bc68e3e3
|
||
|
|
810acd8ed4
|
||
|
|
adceada1b6
|
||
|
|
dfba0c0adb
|
||
|
|
6074917bfb
|
||
|
|
86543eeb4f
|
||
|
|
38e97741e0
|
||
|
|
bfce5b2964
|
||
|
|
b22129c4dd
|
||
|
|
cadc497dec
|
||
|
|
0b84e0aecc
|
||
|
|
ac5cb96f97
|
||
|
|
2389d2c2c6
|
||
|
|
694031c05b
|
||
|
|
cbf7bfcb0a
|
||
|
|
9fb890699a
|
||
|
|
9439c004e2
|
||
|
|
27954daf64
|
||
|
|
7001082fb2
|
||
|
|
04ca5090df
|
||
|
|
19ceac1303
|
||
|
|
381ca24501
|
||
|
|
eefe2906ed
|
||
|
|
8ee0491c96
|
||
|
|
73aea94d88
|
||
|
|
7761e26d32
|
||
|
|
6c4f1694ba
|
||
|
|
0a84ad1595
|
||
|
|
43b4334971
|
||
|
|
07f905d7c9
|
||
|
|
72f3bb7723
|
||
|
|
fd7409fe09
|
||
|
|
e8617b8042
|
||
|
|
824cdbe2b0
|
||
|
|
7941779451
|
||
|
|
f8a27f868c
|
||
|
|
cabefe3186
|
||
|
|
4d776c5c16
|
||
|
|
91179199f7
|
||
|
|
c1acc9f16b
|
7
.dockerignore
Normal file
7
.dockerignore
Normal file
@@ -0,0 +1,7 @@
|
||||
node_modules
|
||||
.git
|
||||
.gitignore
|
||||
.next
|
||||
dist
|
||||
.env
|
||||
*.log
|
||||
48
.env.example
Normal file
48
.env.example
Normal file
@@ -0,0 +1,48 @@
|
||||
# Global
|
||||
NODE_ENV=development
|
||||
|
||||
# Backend
|
||||
BACKEND_PORT=3001
|
||||
|
||||
# Frontend
|
||||
FRONTEND_PORT=3000
|
||||
|
||||
# Database (PostgreSQL)
|
||||
POSTGRES_HOST=db
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_DB=app
|
||||
POSTGRES_USER=app
|
||||
POSTGRES_PASSWORD=app
|
||||
|
||||
# Redis
|
||||
REDIS_HOST=redis
|
||||
REDIS_PORT=6379
|
||||
|
||||
# Storage (S3/MinIO)
|
||||
S3_ENDPOINT=s3
|
||||
S3_PORT=9000
|
||||
S3_ACCESS_KEY=minioadmin
|
||||
S3_SECRET_KEY=minioadmin
|
||||
S3_BUCKET_NAME=memegoat
|
||||
|
||||
# Security
|
||||
JWT_SECRET=super-secret-jwt-key-change-me-in-prod
|
||||
ENCRYPTION_KEY=01234567890123456789012345678901
|
||||
PGP_ENCRYPTION_KEY=super-secret-pgp-key
|
||||
SESSION_PASSWORD=super-secret-session-password-32-chars
|
||||
|
||||
# Mail
|
||||
MAIL_HOST=mail
|
||||
MAIL_PORT=1025
|
||||
MAIL_SECURE=false
|
||||
MAIL_USER=
|
||||
MAIL_PASS=
|
||||
MAIL_FROM=noreply@memegoat.local
|
||||
DOMAIN_NAME=localhost
|
||||
|
||||
ENABLE_CORS=false
|
||||
CORS_DOMAIN_NAME=localhost
|
||||
|
||||
# Media Limits (in KB)
|
||||
MAX_IMAGE_SIZE_KB=512
|
||||
MAX_GIF_SIZE_KB=1024
|
||||
36
.gitea/workflows/backend-tests.yml
Normal file
36
.gitea/workflows/backend-tests.yml
Normal file
@@ -0,0 +1,36 @@
|
||||
name: Backend Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- 'backend/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'backend/**'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path --silent)" >> "${GITEA_OUTPUT:-$GITHUB_OUTPUT}"
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile --prefer-offline
|
||||
- name: Run Backend Tests
|
||||
run: pnpm -F @memegoat/backend test
|
||||
89
.gitea/workflows/deploy.yml
Normal file
89
.gitea/workflows/deploy.yml
Normal file
@@ -0,0 +1,89 @@
|
||||
name: Deploy to Production
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
validate:
|
||||
name: Validate Build & Lint
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
component: [backend, frontend, documentation]
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path --silent)" >> "${GITEA_OUTPUT:-$GITHUB_OUTPUT}"
|
||||
|
||||
- name: Setup pnpm cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile --prefer-offline
|
||||
|
||||
- name: Lint ${{ matrix.component }}
|
||||
run: pnpm -F @memegoat/${{ matrix.component }} lint
|
||||
|
||||
- name: Build ${{ matrix.component }}
|
||||
run: pnpm -F @memegoat/${{ matrix.component }} build
|
||||
env:
|
||||
NEXT_PUBLIC_API_URL: ${{ secrets.NEXT_PUBLIC_API_URL }}
|
||||
|
||||
deploy:
|
||||
name: Deploy to Production
|
||||
needs: validate
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Deploy with Docker Compose
|
||||
run: |
|
||||
docker compose -f docker-compose.prod.yml up -d --build
|
||||
env:
|
||||
BACKEND_PORT: ${{ secrets.BACKEND_PORT }}
|
||||
FRONTEND_PORT: ${{ secrets.FRONTEND_PORT }}
|
||||
POSTGRES_HOST: ${{ secrets.POSTGRES_HOST }}
|
||||
POSTGRES_PORT: ${{ secrets.POSTGRES_PORT }}
|
||||
POSTGRES_USER: ${{ secrets.POSTGRES_USER }}
|
||||
POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
|
||||
POSTGRES_DB: ${{ secrets.POSTGRES_DB }}
|
||||
REDIS_HOST: ${{ secrets.REDIS_HOST }}
|
||||
REDIS_PORT: ${{ secrets.REDIS_PORT }}
|
||||
S3_ENDPOINT: ${{ secrets.S3_ENDPOINT }}
|
||||
S3_PORT: ${{ secrets.S3_PORT }}
|
||||
S3_ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }}
|
||||
S3_SECRET_KEY: ${{ secrets.S3_SECRET_KEY }}
|
||||
S3_BUCKET_NAME: ${{ secrets.S3_BUCKET_NAME }}
|
||||
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||
ENCRYPTION_KEY: ${{ secrets.ENCRYPTION_KEY }}
|
||||
PGP_ENCRYPTION_KEY: ${{ secrets.PGP_ENCRYPTION_KEY }}
|
||||
SESSION_PASSWORD: ${{ secrets.SESSION_PASSWORD }}
|
||||
MAIL_HOST: ${{ secrets.MAIL_HOST }}
|
||||
MAIL_PASS: ${{ secrets.MAIL_PASS }}
|
||||
MAIL_USER: ${{ secrets.MAIL_USER }}
|
||||
MAIL_FROM: ${{ secrets.MAIL_FROM }}
|
||||
DOMAIN_NAME: ${{ secrets.DOMAIN_NAME }}
|
||||
NEXT_PUBLIC_API_URL: ${{ secrets.NEXT_PUBLIC_API_URL }}
|
||||
@@ -1,25 +0,0 @@
|
||||
name: Backend Lint
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- 'backend/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'backend/**'
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: 'pnpm'
|
||||
- name: Install dependencies
|
||||
run: pnpm install
|
||||
- name: Run lint
|
||||
run: pnpm -F @memegoat/backend lint
|
||||
@@ -1,25 +0,0 @@
|
||||
name: Documentation Lint
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- 'documentation/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'documentation/**'
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: 'pnpm'
|
||||
- name: Install dependencies
|
||||
run: pnpm install
|
||||
- name: Run lint
|
||||
run: pnpm -F @memegoat/documentation lint
|
||||
@@ -1,25 +0,0 @@
|
||||
name: Frontend Lint
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: 'pnpm'
|
||||
- name: Install dependencies
|
||||
run: pnpm install
|
||||
- name: Run lint
|
||||
run: pnpm -F @memegoat/frontend lint
|
||||
43
.gitea/workflows/lint.yml
Normal file
43
.gitea/workflows/lint.yml
Normal file
@@ -0,0 +1,43 @@
|
||||
name: Lint
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
- 'backend/**'
|
||||
- 'documentation/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
- 'backend/**'
|
||||
- 'documentation/**'
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
component: [backend, frontend, documentation]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path --silent)" >> "${GITEA_OUTPUT:-$GITHUB_OUTPUT}"
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile --prefer-offline
|
||||
- name: Lint ${{ matrix.component }}
|
||||
run: pnpm -F @memegoat/${{ matrix.component }} lint
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,6 +1,7 @@
|
||||
# Dependencies
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
.pnpm-store
|
||||
|
||||
# Environment variables
|
||||
.env
|
||||
|
||||
225
.output.txt
Normal file
225
.output.txt
Normal file
@@ -0,0 +1,225 @@
|
||||
{
|
||||
"name": "@memegoat/source",
|
||||
"version": "0.0.1",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
"build": "pnpm run build:back && pnpm run build:front && pnpm run build:docs",
|
||||
"build:front": "pnpm run -F @memegoat/frontend build",
|
||||
"build:back": "pnpm run -F @memegoat/backend build",
|
||||
"build:docs": "pnpm run -F @memegoat/documentation build",
|
||||
"lint": "pnpm run lint:back && pnpm run lint:front && pnpm run lint:docs",
|
||||
"lint:back": "pnpm run -F @memegoat/backend lint",
|
||||
"lint:front": "pnpm run -F @memegoat/frontend lint",
|
||||
"lint:docs": "pnpm run -F @memegoat/documentation lint",
|
||||
"test": "pnpm run test:back && pnpm run test:front",
|
||||
"test:back": "pnpm run -F @memegoat/backend test",
|
||||
"test:front": "pnpm run -F @memegoat/frontend test",
|
||||
"format": "pnpm run format:back && pnpm run format:front && pnpm run format:docs",
|
||||
"format:back": "pnpm run -F @memegoat/backend format",
|
||||
"format:front": "pnpm run -F @memegoat/frontend format",
|
||||
"format:docs": "pnpm run -F @memegoat/documentation format",
|
||||
"upgrade": "pnpm dlx taze minor"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": {
|
||||
"name": "Mathis HERRIOT",
|
||||
"email": "mherriot.pro@proton.me",
|
||||
"role": "Author"
|
||||
},
|
||||
"license": "AGPL-3.0-only",
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.3.11"
|
||||
}
|
||||
}
|
||||
{
|
||||
"name": "@memegoat/backend",
|
||||
"version": "0.0.1",
|
||||
"description": "",
|
||||
"author": "",
|
||||
"private": true,
|
||||
"license": "UNLICENSED",
|
||||
"files": [
|
||||
"dist",
|
||||
".migrations",
|
||||
"drizzle.config.ts"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "nest build",
|
||||
"lint": "biome check",
|
||||
"lint:write": "biome check --write",
|
||||
"format": "biome format --write",
|
||||
"start": "nest start",
|
||||
"start:dev": "nest start --watch",
|
||||
"start:debug": "nest start --debug --watch",
|
||||
"start:prod": "node dist/main",
|
||||
"test": "jest",
|
||||
"test:watch": "jest --watch",
|
||||
"test:cov": "jest --coverage",
|
||||
"test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand",
|
||||
"test:e2e": "jest --config ./test/jest-e2e.json",
|
||||
"db:generate": "drizzle-kit generate",
|
||||
"db:migrate": "drizzle-kit migrate",
|
||||
"db:studio": "drizzle-kit studio"
|
||||
},
|
||||
"dependencies": {
|
||||
"@nestjs-modules/mailer": "^2.0.2",
|
||||
"@nestjs/cache-manager": "^3.1.0",
|
||||
"@nestjs/common": "^11.0.1",
|
||||
"@nestjs/config": "^4.0.2",
|
||||
"@nestjs/core": "^11.0.1",
|
||||
"@nestjs/mapped-types": "^2.1.0",
|
||||
"@nestjs/platform-express": "^11.0.1",
|
||||
"@nestjs/schedule": "^6.1.0",
|
||||
"@nestjs/throttler": "^6.5.0",
|
||||
"@noble/post-quantum": "^0.5.4",
|
||||
"@node-rs/argon2": "^2.0.2",
|
||||
"@sentry/nestjs": "^10.32.1",
|
||||
"@sentry/profiling-node": "^10.32.1",
|
||||
"cache-manager": "^7.2.7",
|
||||
"cache-manager-redis-yet": "^5.1.5",
|
||||
"clamscan": "^2.4.0",
|
||||
"class-transformer": "^0.5.1",
|
||||
"class-validator": "^0.14.3",
|
||||
"dotenv": "^17.2.3",
|
||||
"drizzle-orm": "^0.45.1",
|
||||
"fluent-ffmpeg": "^2.1.3",
|
||||
"helmet": "^8.1.0",
|
||||
"iron-session": "^8.0.4",
|
||||
"jose": "^6.1.3",
|
||||
"minio": "^8.0.6",
|
||||
"nodemailer": "^7.0.12",
|
||||
"otplib": "^12.0.1",
|
||||
"pg": "^8.16.3",
|
||||
"qrcode": "^1.5.4",
|
||||
"reflect-metadata": "^0.2.2",
|
||||
"rxjs": "^7.8.1",
|
||||
"sharp": "^0.34.5",
|
||||
"uuid": "^13.0.0",
|
||||
"zod": "^4.3.5",
|
||||
"drizzle-kit": "^0.31.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@nestjs/cli": "^11.0.0",
|
||||
"globals": "^16.0.0",
|
||||
"jest": "^30.0.0",
|
||||
"source-map-support": "^0.5.21",
|
||||
"supertest": "^7.0.0",
|
||||
"ts-jest": "^29.2.5",
|
||||
"ts-loader": "^9.5.2",
|
||||
"ts-node": "^10.9.2",
|
||||
"tsconfig-paths": "^4.2.0",
|
||||
"tsx": "^4.21.0",
|
||||
"typescript": "^5.7.3",
|
||||
"typescript-eslint": "^8.20.0",
|
||||
"@nestjs/schematics": "^11.0.0",
|
||||
"@nestjs/testing": "^11.0.1",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/fluent-ffmpeg": "^2.1.28",
|
||||
"@types/jest": "^30.0.0",
|
||||
"@types/multer": "^2.0.0",
|
||||
"@types/node": "^22.10.7",
|
||||
"@types/nodemailer": "^7.0.4",
|
||||
"@types/pg": "^8.16.0",
|
||||
"@types/qrcode": "^1.5.6",
|
||||
"@types/sharp": "^0.32.0",
|
||||
"@types/supertest": "^6.0.2",
|
||||
"@types/uuid": "^11.0.0",
|
||||
"drizzle-kit": "^0.31.8"
|
||||
},
|
||||
"jest": {
|
||||
"moduleFileExtensions": [
|
||||
"js",
|
||||
"json",
|
||||
"ts"
|
||||
],
|
||||
"rootDir": "src",
|
||||
"testRegex": ".*\\.spec\\.ts$",
|
||||
"collectCoverageFrom": [
|
||||
"**/*.(t|j)s"
|
||||
],
|
||||
"coverageDirectory": "../coverage",
|
||||
"testEnvironment": "node",
|
||||
"transformIgnorePatterns": [
|
||||
"node_modules/(?!(.pnpm/)?(jose|@noble|uuid)/)"
|
||||
],
|
||||
"transform": {
|
||||
"^.+\\.(t|j)sx?$": "ts-jest"
|
||||
},
|
||||
"moduleNameMapper": {
|
||||
"^@noble/post-quantum/(.*)$": "<rootDir>/../node_modules/@noble/post-quantum/$1",
|
||||
"^@noble/hashes/(.*)$": "<rootDir>/../node_modules/@noble/hashes/$1"
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
"name": "@memegoat/frontend",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "biome check",
|
||||
"format": "biome format --write"
|
||||
},
|
||||
"dependencies": {
|
||||
"@hookform/resolvers": "^5.2.2",
|
||||
"@radix-ui/react-accordion": "^1.2.12",
|
||||
"@radix-ui/react-alert-dialog": "^1.1.15",
|
||||
"@radix-ui/react-aspect-ratio": "^1.1.8",
|
||||
"@radix-ui/react-avatar": "^1.1.11",
|
||||
"@radix-ui/react-checkbox": "^1.3.3",
|
||||
"@radix-ui/react-collapsible": "^1.1.12",
|
||||
"@radix-ui/react-context-menu": "^2.2.16",
|
||||
"@radix-ui/react-dialog": "^1.1.15",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
||||
"@radix-ui/react-hover-card": "^1.1.15",
|
||||
"@radix-ui/react-label": "^2.1.8",
|
||||
"@radix-ui/react-menubar": "^1.1.16",
|
||||
"@radix-ui/react-navigation-menu": "^1.2.14",
|
||||
"@radix-ui/react-popover": "^1.1.15",
|
||||
"@radix-ui/react-progress": "^1.1.8",
|
||||
"@radix-ui/react-radio-group": "^1.3.8",
|
||||
"@radix-ui/react-scroll-area": "^1.2.10",
|
||||
"@radix-ui/react-select": "^2.2.6",
|
||||
"@radix-ui/react-separator": "^1.1.8",
|
||||
"@radix-ui/react-slider": "^1.3.6",
|
||||
"@radix-ui/react-slot": "^1.2.4",
|
||||
"@radix-ui/react-switch": "^1.2.6",
|
||||
"@radix-ui/react-tabs": "^1.1.13",
|
||||
"@radix-ui/react-toggle": "^1.1.10",
|
||||
"@radix-ui/react-toggle-group": "^1.1.11",
|
||||
"@radix-ui/react-tooltip": "^1.2.8",
|
||||
"axios": "^1.13.2",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.1.1",
|
||||
"date-fns": "^4.1.0",
|
||||
"embla-carousel-react": "^8.6.0",
|
||||
"input-otp": "^1.4.2",
|
||||
"lucide-react": "^0.562.0",
|
||||
"next": "16.1.1",
|
||||
"next-themes": "^0.4.6",
|
||||
"react": "19.2.3",
|
||||
"react-day-picker": "^9.13.0",
|
||||
"react-dom": "19.2.3",
|
||||
"react-hook-form": "^7.71.1",
|
||||
"react-resizable-panels": "^4.4.1",
|
||||
"recharts": "2.15.4",
|
||||
"sonner": "^2.0.7",
|
||||
"tailwind-merge": "^3.4.0",
|
||||
"vaul": "^1.1.2",
|
||||
"zod": "^4.3.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.3.11",
|
||||
"@tailwindcss/postcss": "^4",
|
||||
"@types/node": "^20",
|
||||
"@types/react": "^19",
|
||||
"@types/react-dom": "^19",
|
||||
"babel-plugin-react-compiler": "1.0.0",
|
||||
"tailwindcss": "^4",
|
||||
"tw-animate-css": "^1.4.0",
|
||||
"typescript": "^5"
|
||||
}
|
||||
}
|
||||
75
README.md
75
README.md
@@ -8,13 +8,15 @@
|
||||
<div align="center">
|
||||
<a href="https://git.yidhra.fr/Mathis/memegoat/src/branch/dev/LICENSE">
|
||||
<img src="https://img.shields.io/badge/License-AGPL3.0-green" alt="License">
|
||||
</a>
|
||||
<a href="https://git.yidhra.fr/Mathis/memegoat/commits">
|
||||
<img src="https://img.shields.io/badge/Status-Ongoing-blue" alt="Commits">
|
||||
</a>
|
||||
<a href="https://memegoat.fr?ref=git">
|
||||
<a href="https://memegoat.fr">
|
||||
<img src="https://img.shields.io/badge/Visit-memegoat.fr-orange" alt="Visit memegoat.fr">
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<p align="center">
|
||||
<a href="#">
|
||||
@@ -28,63 +30,64 @@
|
||||
|
||||
# 🐐 Memegoat
|
||||
|
||||
Lorem ipsum dolor sit amet
|
||||
Memegoat est une plateforme moderne de partage et de création de mèmes, conçue avec une architecture robuste et sécurisée.
|
||||
|
||||
_This repository is in development, and we’re still integrating core feature into the mono repo. It's not fully ready for self-hosted deployment yet, but you can run it locally._
|
||||
_Ce dépôt est en cours de développement. Nous intégrons actuellement les fonctionnalités clés dans le monorepo. Il n'est pas encore totalement prêt pour un déploiement auto-hébergé simplifié, mais vous pouvez le lancer localement._
|
||||
|
||||
## What is Memegoat ?
|
||||
## Qu'est-ce que Memegoat ?
|
||||
|
||||
[Firecrawl](https://memegoat.fr?ref=git) Lorem ipsum dolor sit amet. Check out our [documentation](https://docs.memegoat.fr).
|
||||
[Memegoat](https://memegoat.fr) est votre destination ultime pour découvrir, créer et partager les meilleurs mèmes du web. Notre plateforme se concentre sur la performance, la sécurité des données et une expérience utilisateur fluide.
|
||||
|
||||
Lorem ipsum dolor sit amet
|
||||
Retrouvez notre documentation complète sur : [docs.memegoat.fr](https://docs.memegoat.fr)
|
||||
|
||||
_Pst. hey, you, join our stargazers :)_
|
||||
## Architecture & Stack Technique
|
||||
|
||||
## How to use it?
|
||||
Le projet est structuré en monorepo :
|
||||
|
||||
Lorem ipsum dolor sit amet. You can also self host if you'd like.
|
||||
- **Frontend** : Next.js avec Tailwind CSS et Shadcn/ui.
|
||||
- **Backend** : NestJS (TypeScript) avec PostgreSQL.
|
||||
- **Base de données** : Drizzle ORM avec chiffrement natif PGP pour les données sensibles.
|
||||
- **Infrastructure** : Docker, Caddy (Reverse Proxy & TLS), stockage compatible S3.
|
||||
|
||||
Check out the following resources to get started:
|
||||
- **API**: [Documentation](#)
|
||||
- **Data Model**: [MLD/LDM](#), [MCD/CDM](#), [MPD/PDM](#)
|
||||
- **Technical choices**: [The stack](#), [Security choices](#), [Docker](#)
|
||||
## Documentation Rapide
|
||||
|
||||
To run locally, refer to guide [here](#).
|
||||
Pour approfondir vos connaissances techniques sur le projet :
|
||||
- **[Modèle de Données](https://docs.memegoat.fr/docs/database)** : MCD, MLD et MPD.
|
||||
- **[Sécurité](https://docs.memegoat.fr/docs/security)** : Chiffrement PGP, Argon2id, RBAC.
|
||||
- **[Conformité RGPD](https://docs.memegoat.fr/docs/compliance)** : Mesures techniques et droits des utilisateurs.
|
||||
- **[API & Intégrations](https://docs.memegoat.fr/docs/api)** : Authentification par sessions, clés API et 2FA.
|
||||
|
||||
### API Key
|
||||
## Comment l'utiliser ?
|
||||
|
||||
To use the API, you need to sign up on [Memegoat](https://memegoat.fr) and get an API key.
|
||||
### Installation locale
|
||||
|
||||
### Features
|
||||
1. Clonez le dépôt.
|
||||
2. Installez les dépendances avec `pnpm install`.
|
||||
3. Configurez les variables d'environnement (voir `.env.example`).
|
||||
4. Lancez les services via Docker ou manuellement.
|
||||
|
||||
- [**Blank**](#anchor): lorem ipsum
|
||||
### Clés API
|
||||
|
||||
### Powerful Capabilities
|
||||
- **The hard stuff**: proxies, anti-bot mechanisms, dynamic content (js-rendered), output parsing, orchestration
|
||||
-
|
||||
### anchor
|
||||
Pour utiliser l'API, vous pouvez générer des clés API sécurisées directement depuis votre profil sur [memegoat.fr](https://memegoat.fr).
|
||||
|
||||
lorem ipsum
|
||||
## Fonctionnalités Clés
|
||||
|
||||
## Contributing
|
||||
- **Sécurité Avancée** : Chiffrement des données personnelles au repos et hachage aveugle pour la recherche.
|
||||
- **RGPD by Design** : Mécanismes de Soft Delete, purge automatique et hachage des IPs.
|
||||
- **Multi-Authentification** : Support des sessions JWT, des clés API et de la double authentification (2FA).
|
||||
- **Gestion de Contenu** : Support des mèmes et GIFs avec système de tags et signalements.
|
||||
- **Traitement Médias Sécurisé** : Scan antivirus (ClamAV) systématique et transcodage haute performance (WebP, WebM, AVIF, AV1).
|
||||
|
||||
We love contributions! Please read our [contributing guide](CONTRIBUTING.md) before submitting a pull request. If you'd like to self-host, refer to the [self-hosting guide](SELF_HOST.md).
|
||||
## Contribution
|
||||
|
||||
## License Disclaimer
|
||||
Les contributions sont les bienvenues ! Veuillez consulter notre guide de contribution avant de soumettre une pull request.
|
||||
|
||||
This project is primarily licensed under the GNU Affero General Public License v3.0 (AGPL-3.0), as specified in the LICENSE file in the root directory of this repository. However, certain components of this project are licensed under the MIT License. Refer to the LICENSE files in these specific directories for details.
|
||||
|
||||
Please note:
|
||||
|
||||
- The AGPL-3.0 license applies to all parts of the project unless otherwise specified.
|
||||
- The SDKs and some UI components are licensed under the MIT License. Refer to the LICENSE files in these specific directories for details.
|
||||
- When using or contributing to this project, ensure you comply with the appropriate license terms for the specific component you are working with.
|
||||
|
||||
For more details on the licensing of specific components, please refer to the LICENSE files in the respective directories or contact the project maintainers.
|
||||
## Licence
|
||||
|
||||
Ce projet est principalement sous licence **GNU Affero General Public License v3.0 (AGPL-3.0)**. Certains composants, comme les SDKs, peuvent être sous licence MIT. Veuillez vous référer aux fichiers `LICENSE` dans les répertoires respectifs pour plus de détails.
|
||||
|
||||
<p align="right" style="font-size: 14px; color: #555; margin-top: 20px;">
|
||||
<a href="#readme-top" style="text-decoration: none; color: #007bff; font-weight: bold;">
|
||||
↑ Back to Top ↑
|
||||
↑ Retour en haut ↑
|
||||
</a>
|
||||
</p>
|
||||
50
ROADMAP.md
Normal file
50
ROADMAP.md
Normal file
@@ -0,0 +1,50 @@
|
||||
# 🐐 Memegoat - Roadmap & Critères de Production
|
||||
|
||||
Ce document définit les objectifs, les critères techniques et les fonctionnalités à atteindre pour que le projet Memegoat soit considéré comme prêt pour la production et conforme aux normes européennes (RGPD) et françaises.
|
||||
|
||||
## 1. 🏗️ Architecture & Infrastructure
|
||||
- [x] Backend NestJS (TypeScript)
|
||||
- [x] Base de données PostgreSQL avec Drizzle ORM
|
||||
- [x] Stockage d'objets compatible S3 (MinIO)
|
||||
- [x] Service d'Emailing (Nodemailer / SMTPS)
|
||||
- [x] Documentation Technique & Référence API (`docs.memegoat.fr`)
|
||||
- [x] Health Checks (`/health`)
|
||||
- [x] Gestion des variables d'environnement (Validation avec Zod)
|
||||
- [ ] CI/CD (Build, Lint, Test, Deploy)
|
||||
|
||||
## 2. 🔐 Sécurité & Authentification
|
||||
- [x] Hachage des mots de passe (Argon2id)
|
||||
- [x] Gestion des sessions robuste (JWT avec Refresh Token et Rotation)
|
||||
- [x] RBAC (Role Based Access Control) fonctionnel
|
||||
- [x] Système de Clés API (Hachées en base)
|
||||
- [x] Double Authentification (2FA / TOTP)
|
||||
- [x] Limitation de débit (Rate Limiting / Throttler)
|
||||
- [x] Validation stricte des entrées (DTOs + ValidationPipe)
|
||||
- [x] Protection contre les vulnérabilités OWASP (Helmet, CORS)
|
||||
|
||||
## 3. ⚖️ Conformité RGPD (EU & France)
|
||||
- [x] Chiffrement natif des données personnelles (PII) via PGP (pgcrypto)
|
||||
- [x] Hachage aveugle (Blind Indexing) pour l'email (recherche/unicité)
|
||||
- [x] Journalisation d'audit complète (Audit Logs) pour les actions sensibles
|
||||
- [x] Gestion du consentement (Versionnage CGU/Politique de Confidentialité)
|
||||
- [x] Droit à l'effacement : Flux de suppression (Soft Delete -> Purge définitive)
|
||||
- [x] Droit à la portabilité : Export des données utilisateur (JSON)
|
||||
- [x] Purge automatique des données obsolètes (Signalements, Sessions expirées)
|
||||
- [x] Anonymisation des adresses IP (Hachage) dans les logs
|
||||
|
||||
## 4. 🖼️ Fonctionnalités Coeur (Media & Galerie)
|
||||
- [x] Exploration (Trends, Recent, Favoris)
|
||||
- [x] Recherche par Tags, Catégories, Auteur, Texte
|
||||
- [x] Gestion des Favoris
|
||||
- [x] Upload sécurisé via S3 (URLs présignées)
|
||||
- [x] Scan Antivirus (ClamAV) et traitement des médias (WebP, WebM, AVIF, AV1)
|
||||
- [x] Limitation de la taille et des formats de fichiers entrants (Configurable)
|
||||
- [x] Système de Signalement (Reports) et workflow de modération
|
||||
- [ ] SEO : Metatags dynamiques et slugs sémantiques
|
||||
|
||||
## 5. ✅ Qualité & Robustesse
|
||||
- [ ] Couverture de tests unitaires (Jest) > 80%
|
||||
- [ ] Tests d'intégration et E2E
|
||||
- [x] Gestion centralisée des erreurs (Filters NestJS)
|
||||
- [ ] Monitoring et centralisation des logs (ex: Sentry, ELK/Loki)
|
||||
- [ ] Performance : Cache (Redis) pour les tendances et recherches fréquentes
|
||||
177
backend/.migrations/0000_right_sally_floyd.sql
Normal file
177
backend/.migrations/0000_right_sally_floyd.sql
Normal file
@@ -0,0 +1,177 @@
|
||||
CREATE EXTENSION IF NOT EXISTS "pgcrypto";
|
||||
CREATE TYPE "public"."user_status" AS ENUM('active', 'verification', 'suspended', 'pending', 'deleted');--> statement-breakpoint
|
||||
CREATE TYPE "public"."content_type" AS ENUM('meme', 'gif');--> statement-breakpoint
|
||||
CREATE TYPE "public"."report_reason" AS ENUM('inappropriate', 'spam', 'copyright', 'other');--> statement-breakpoint
|
||||
CREATE TYPE "public"."report_status" AS ENUM('pending', 'reviewed', 'resolved', 'dismissed');--> statement-breakpoint
|
||||
CREATE TABLE "users" (
|
||||
"uuid" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"status" "user_status" DEFAULT 'pending' NOT NULL,
|
||||
"email" "bytea" NOT NULL,
|
||||
"email_hash" varchar(64) NOT NULL,
|
||||
"display_name" varchar(32),
|
||||
"username" varchar(32) NOT NULL,
|
||||
"password_hash" varchar(72) NOT NULL,
|
||||
"two_factor_secret" "bytea",
|
||||
"is_two_factor_enabled" boolean DEFAULT false NOT NULL,
|
||||
"terms_version" varchar(16),
|
||||
"privacy_version" varchar(16),
|
||||
"gdpr_accepted_at" timestamp with time zone,
|
||||
"last_login_at" timestamp with time zone,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"deleted_at" timestamp with time zone,
|
||||
CONSTRAINT "users_email_hash_unique" UNIQUE("email_hash"),
|
||||
CONSTRAINT "users_username_unique" UNIQUE("username")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "permissions" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"name" varchar(64) NOT NULL,
|
||||
"slug" varchar(64) NOT NULL,
|
||||
"description" varchar(128),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
CONSTRAINT "permissions_name_unique" UNIQUE("name"),
|
||||
CONSTRAINT "permissions_slug_unique" UNIQUE("slug")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "roles" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"name" varchar(64) NOT NULL,
|
||||
"slug" varchar(64) NOT NULL,
|
||||
"description" varchar(128),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
CONSTRAINT "roles_name_unique" UNIQUE("name"),
|
||||
CONSTRAINT "roles_slug_unique" UNIQUE("slug")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "roles_to_permissions" (
|
||||
"role_id" uuid NOT NULL,
|
||||
"permission_id" uuid NOT NULL,
|
||||
CONSTRAINT "roles_to_permissions_role_id_permission_id_pk" PRIMARY KEY("role_id","permission_id")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "users_to_roles" (
|
||||
"user_id" uuid NOT NULL,
|
||||
"role_id" uuid NOT NULL,
|
||||
CONSTRAINT "users_to_roles_user_id_role_id_pk" PRIMARY KEY("user_id","role_id")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "sessions" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"user_id" uuid NOT NULL,
|
||||
"refresh_token" varchar(512) NOT NULL,
|
||||
"user_agent" varchar(255),
|
||||
"ip_hash" varchar(64),
|
||||
"is_valid" boolean DEFAULT true NOT NULL,
|
||||
"expires_at" timestamp with time zone NOT NULL,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
CONSTRAINT "sessions_refresh_token_unique" UNIQUE("refresh_token")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "api_keys" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"user_id" uuid NOT NULL,
|
||||
"key_hash" varchar(128) NOT NULL,
|
||||
"name" varchar(128) NOT NULL,
|
||||
"prefix" varchar(8) NOT NULL,
|
||||
"is_active" boolean DEFAULT true NOT NULL,
|
||||
"last_used_at" timestamp with time zone,
|
||||
"expires_at" timestamp with time zone,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
CONSTRAINT "api_keys_key_hash_unique" UNIQUE("key_hash")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "tags" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"name" varchar(64) NOT NULL,
|
||||
"slug" varchar(64) NOT NULL,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
CONSTRAINT "tags_name_unique" UNIQUE("name"),
|
||||
CONSTRAINT "tags_slug_unique" UNIQUE("slug")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "contents" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"user_id" uuid NOT NULL,
|
||||
"type" "content_type" NOT NULL,
|
||||
"title" varchar(255) NOT NULL,
|
||||
"storage_key" varchar(512) NOT NULL,
|
||||
"mime_type" varchar(128) NOT NULL,
|
||||
"file_size" integer NOT NULL,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"deleted_at" timestamp with time zone,
|
||||
CONSTRAINT "contents_storage_key_unique" UNIQUE("storage_key")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "contents_to_tags" (
|
||||
"content_id" uuid NOT NULL,
|
||||
"tag_id" uuid NOT NULL,
|
||||
CONSTRAINT "contents_to_tags_content_id_tag_id_pk" PRIMARY KEY("content_id","tag_id")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "reports" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"reporter_id" uuid NOT NULL,
|
||||
"content_id" uuid,
|
||||
"tag_id" uuid,
|
||||
"reason" "report_reason" NOT NULL,
|
||||
"description" text,
|
||||
"status" "report_status" DEFAULT 'pending' NOT NULL,
|
||||
"expires_at" timestamp with time zone,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "audit_logs" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"user_id" uuid,
|
||||
"action" varchar(64) NOT NULL,
|
||||
"entity_type" varchar(64) NOT NULL,
|
||||
"entity_id" uuid,
|
||||
"details" jsonb,
|
||||
"ip_hash" varchar(64),
|
||||
"user_agent" varchar(255),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "roles_to_permissions" ADD CONSTRAINT "roles_to_permissions_role_id_roles_id_fk" FOREIGN KEY ("role_id") REFERENCES "public"."roles"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "roles_to_permissions" ADD CONSTRAINT "roles_to_permissions_permission_id_permissions_id_fk" FOREIGN KEY ("permission_id") REFERENCES "public"."permissions"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "users_to_roles" ADD CONSTRAINT "users_to_roles_user_id_users_uuid_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "users_to_roles" ADD CONSTRAINT "users_to_roles_role_id_roles_id_fk" FOREIGN KEY ("role_id") REFERENCES "public"."roles"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "sessions" ADD CONSTRAINT "sessions_user_id_users_uuid_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "api_keys" ADD CONSTRAINT "api_keys_user_id_users_uuid_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "contents" ADD CONSTRAINT "contents_user_id_users_uuid_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "contents_to_tags" ADD CONSTRAINT "contents_to_tags_content_id_contents_id_fk" FOREIGN KEY ("content_id") REFERENCES "public"."contents"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "contents_to_tags" ADD CONSTRAINT "contents_to_tags_tag_id_tags_id_fk" FOREIGN KEY ("tag_id") REFERENCES "public"."tags"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "reports" ADD CONSTRAINT "reports_reporter_id_users_uuid_fk" FOREIGN KEY ("reporter_id") REFERENCES "public"."users"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "reports" ADD CONSTRAINT "reports_content_id_contents_id_fk" FOREIGN KEY ("content_id") REFERENCES "public"."contents"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "reports" ADD CONSTRAINT "reports_tag_id_tags_id_fk" FOREIGN KEY ("tag_id") REFERENCES "public"."tags"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "audit_logs" ADD CONSTRAINT "audit_logs_user_id_users_uuid_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("uuid") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "users_uuid_idx" ON "users" USING btree ("uuid");--> statement-breakpoint
|
||||
CREATE INDEX "users_email_hash_idx" ON "users" USING btree ("email_hash");--> statement-breakpoint
|
||||
CREATE INDEX "users_username_idx" ON "users" USING btree ("username");--> statement-breakpoint
|
||||
CREATE INDEX "users_status_idx" ON "users" USING btree ("status");--> statement-breakpoint
|
||||
CREATE INDEX "permissions_slug_idx" ON "permissions" USING btree ("slug");--> statement-breakpoint
|
||||
CREATE INDEX "roles_slug_idx" ON "roles" USING btree ("slug");--> statement-breakpoint
|
||||
CREATE INDEX "sessions_user_id_idx" ON "sessions" USING btree ("user_id");--> statement-breakpoint
|
||||
CREATE INDEX "sessions_refresh_token_idx" ON "sessions" USING btree ("refresh_token");--> statement-breakpoint
|
||||
CREATE INDEX "sessions_expires_at_idx" ON "sessions" USING btree ("expires_at");--> statement-breakpoint
|
||||
CREATE INDEX "api_keys_user_id_idx" ON "api_keys" USING btree ("user_id");--> statement-breakpoint
|
||||
CREATE INDEX "api_keys_key_hash_idx" ON "api_keys" USING btree ("key_hash");--> statement-breakpoint
|
||||
CREATE INDEX "tags_slug_idx" ON "tags" USING btree ("slug");--> statement-breakpoint
|
||||
CREATE INDEX "contents_user_id_idx" ON "contents" USING btree ("user_id");--> statement-breakpoint
|
||||
CREATE INDEX "contents_storage_key_idx" ON "contents" USING btree ("storage_key");--> statement-breakpoint
|
||||
CREATE INDEX "contents_deleted_at_idx" ON "contents" USING btree ("deleted_at");--> statement-breakpoint
|
||||
CREATE INDEX "reports_reporter_id_idx" ON "reports" USING btree ("reporter_id");--> statement-breakpoint
|
||||
CREATE INDEX "reports_content_id_idx" ON "reports" USING btree ("content_id");--> statement-breakpoint
|
||||
CREATE INDEX "reports_tag_id_idx" ON "reports" USING btree ("tag_id");--> statement-breakpoint
|
||||
CREATE INDEX "reports_status_idx" ON "reports" USING btree ("status");--> statement-breakpoint
|
||||
CREATE INDEX "reports_expires_at_idx" ON "reports" USING btree ("expires_at");--> statement-breakpoint
|
||||
CREATE INDEX "audit_logs_user_id_idx" ON "audit_logs" USING btree ("user_id");--> statement-breakpoint
|
||||
CREATE INDEX "audit_logs_action_idx" ON "audit_logs" USING btree ("action");--> statement-breakpoint
|
||||
CREATE INDEX "audit_logs_entity_idx" ON "audit_logs" USING btree ("entity_type","entity_id");--> statement-breakpoint
|
||||
CREATE INDEX "audit_logs_created_at_idx" ON "audit_logs" USING btree ("created_at");
|
||||
30
backend/.migrations/0001_purple_goliath.sql
Normal file
30
backend/.migrations/0001_purple_goliath.sql
Normal file
@@ -0,0 +1,30 @@
|
||||
CREATE TABLE "categories" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"name" varchar(64) NOT NULL,
|
||||
"slug" varchar(64) NOT NULL,
|
||||
"description" varchar(255),
|
||||
"icon_url" varchar(512),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
CONSTRAINT "categories_name_unique" UNIQUE("name"),
|
||||
CONSTRAINT "categories_slug_unique" UNIQUE("slug")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "favorites" (
|
||||
"user_id" uuid NOT NULL,
|
||||
"content_id" uuid NOT NULL,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
CONSTRAINT "favorites_user_id_content_id_pk" PRIMARY KEY("user_id","content_id")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "tags" ADD COLUMN "user_id" uuid;--> statement-breakpoint
|
||||
ALTER TABLE "contents" ADD COLUMN "category_id" uuid;--> statement-breakpoint
|
||||
ALTER TABLE "contents" ADD COLUMN "slug" varchar(255) NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "contents" ADD COLUMN "views" integer DEFAULT 0 NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "contents" ADD COLUMN "usage_count" integer DEFAULT 0 NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "favorites" ADD CONSTRAINT "favorites_user_id_users_uuid_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "favorites" ADD CONSTRAINT "favorites_content_id_contents_id_fk" FOREIGN KEY ("content_id") REFERENCES "public"."contents"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "categories_slug_idx" ON "categories" USING btree ("slug");--> statement-breakpoint
|
||||
ALTER TABLE "tags" ADD CONSTRAINT "tags_user_id_users_uuid_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("uuid") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "contents" ADD CONSTRAINT "contents_category_id_categories_id_fk" FOREIGN KEY ("category_id") REFERENCES "public"."categories"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "contents" ADD CONSTRAINT "contents_slug_unique" UNIQUE("slug");
|
||||
1
backend/.migrations/0002_redundant_skin.sql
Normal file
1
backend/.migrations/0002_redundant_skin.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "users" ADD COLUMN "avatar_url" varchar(255);
|
||||
2
backend/.migrations/0003_colossal_fantastic_four.sql
Normal file
2
backend/.migrations/0003_colossal_fantastic_four.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE "users" ALTER COLUMN "password_hash" SET DATA TYPE varchar(255);--> statement-breakpoint
|
||||
ALTER TABLE "users" DROP COLUMN "avatar_url";
|
||||
1
backend/.migrations/0004_cheerful_dakota_north.sql
Normal file
1
backend/.migrations/0004_cheerful_dakota_north.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "users" ALTER COLUMN "password_hash" SET DATA TYPE varchar(95);
|
||||
1
backend/.migrations/0005_perpetual_silverclaw.sql
Normal file
1
backend/.migrations/0005_perpetual_silverclaw.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "users" ALTER COLUMN "password_hash" SET DATA TYPE varchar(100);
|
||||
2
backend/.migrations/0006_friendly_adam_warlock.sql
Normal file
2
backend/.migrations/0006_friendly_adam_warlock.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE "users" ADD COLUMN "avatar_url" varchar(512);--> statement-breakpoint
|
||||
ALTER TABLE "users" ADD COLUMN "bio" varchar(255);
|
||||
1316
backend/.migrations/meta/0000_snapshot.json
Normal file
1316
backend/.migrations/meta/0000_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1513
backend/.migrations/meta/0001_snapshot.json
Normal file
1513
backend/.migrations/meta/0001_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1519
backend/.migrations/meta/0002_snapshot.json
Normal file
1519
backend/.migrations/meta/0002_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1640
backend/.migrations/meta/0003_snapshot.json
Normal file
1640
backend/.migrations/meta/0003_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1640
backend/.migrations/meta/0004_snapshot.json
Normal file
1640
backend/.migrations/meta/0004_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1640
backend/.migrations/meta/0005_snapshot.json
Normal file
1640
backend/.migrations/meta/0005_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1652
backend/.migrations/meta/0006_snapshot.json
Normal file
1652
backend/.migrations/meta/0006_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
55
backend/.migrations/meta/_journal.json
Normal file
55
backend/.migrations/meta/_journal.json
Normal file
@@ -0,0 +1,55 @@
|
||||
{
|
||||
"version": "7",
|
||||
"dialect": "postgresql",
|
||||
"entries": [
|
||||
{
|
||||
"idx": 0,
|
||||
"version": "7",
|
||||
"when": 1767618753676,
|
||||
"tag": "0000_right_sally_floyd",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 1,
|
||||
"version": "7",
|
||||
"when": 1768392191169,
|
||||
"tag": "0001_purple_goliath",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 2,
|
||||
"version": "7",
|
||||
"when": 1768393637823,
|
||||
"tag": "0002_redundant_skin",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 3,
|
||||
"version": "7",
|
||||
"when": 1768415667895,
|
||||
"tag": "0003_colossal_fantastic_four",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 4,
|
||||
"version": "7",
|
||||
"when": 1768417827439,
|
||||
"tag": "0004_cheerful_dakota_north",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 5,
|
||||
"version": "7",
|
||||
"when": 1768420201679,
|
||||
"tag": "0005_perpetual_silverclaw",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 6,
|
||||
"version": "7",
|
||||
"when": 1768423315172,
|
||||
"tag": "0006_friendly_adam_warlock",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
34
backend/Dockerfile
Normal file
34
backend/Dockerfile
Normal file
@@ -0,0 +1,34 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
FROM node:22-alpine AS base
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
RUN corepack enable && corepack prepare pnpm@latest --activate
|
||||
|
||||
FROM base AS build
|
||||
WORKDIR /usr/src/app
|
||||
COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./
|
||||
COPY backend/package.json ./backend/
|
||||
COPY frontend/package.json ./frontend/
|
||||
COPY documentation/package.json ./documentation/
|
||||
|
||||
# Utilisation du cache pour pnpm et installation figée
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
COPY . .
|
||||
|
||||
# Deuxième passe avec cache pour les scripts/liens
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
RUN pnpm run --filter @memegoat/backend build
|
||||
RUN pnpm deploy --filter=@memegoat/backend --prod --legacy /app
|
||||
RUN cp -r backend/dist /app/dist
|
||||
RUN cp -r backend/.migrations /app/.migrations
|
||||
|
||||
FROM base AS runtime
|
||||
WORKDIR /app
|
||||
COPY --from=build /app .
|
||||
EXPOSE 3000
|
||||
ENV NODE_ENV=production
|
||||
CMD [ "node", "dist/src/main" ]
|
||||
@@ -7,13 +7,18 @@
|
||||
},
|
||||
"files": {
|
||||
"ignoreUnknown": true,
|
||||
"includes": ["**", "!node_modules", "!dist", "!build"]
|
||||
"includes": ["**", "!node_modules", "!dist", "!build", "!.migrations"]
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
"indentStyle": "tab",
|
||||
"indentWidth": 1
|
||||
},
|
||||
"javascript": {
|
||||
"parser": {
|
||||
"unsafeParameterDecoratorsEnabled": true
|
||||
}
|
||||
},
|
||||
"linter": {
|
||||
"enabled": true,
|
||||
"rules": {
|
||||
@@ -23,11 +28,10 @@
|
||||
},
|
||||
"style": {
|
||||
"useImportType": "off"
|
||||
},
|
||||
"correctness": {
|
||||
"useHookAtTopLevel": "off"
|
||||
}
|
||||
},
|
||||
"domains": {
|
||||
"next": "recommended",
|
||||
"react": "recommended"
|
||||
}
|
||||
},
|
||||
"assist": {
|
||||
|
||||
19
backend/drizzle.config.ts
Normal file
19
backend/drizzle.config.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import * as process from "node:process";
|
||||
import { defineConfig } from "drizzle-kit";
|
||||
|
||||
export default defineConfig({
|
||||
schema: "./src/database/schemas/index.ts",
|
||||
out: ".migrations",
|
||||
dialect: "postgresql",
|
||||
casing: "snake_case",
|
||||
dbCredentials: {
|
||||
host: String(process.env.POSTGRES_HOST || "localhost"),
|
||||
port: Number(process.env.POSTGRES_PORT || 5432),
|
||||
database: String(process.env.POSTGRES_DB || "app"),
|
||||
user: String(process.env.POSTGRES_USER || "app"),
|
||||
password: String(process.env.POSTGRES_PASSWORD || "app"),
|
||||
ssl: false,
|
||||
},
|
||||
verbose: true,
|
||||
strict: true,
|
||||
});
|
||||
@@ -1,13 +1,19 @@
|
||||
{
|
||||
"name": "@memegoat/backend",
|
||||
"version": "0.0.1",
|
||||
"version": "0.0.0",
|
||||
"description": "",
|
||||
"author": "",
|
||||
"private": true,
|
||||
"license": "UNLICENSED",
|
||||
"files": [
|
||||
"dist",
|
||||
".migrations",
|
||||
"drizzle.config.ts"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "nest build",
|
||||
"lint": "biome check",
|
||||
"lint:write": "biome check --write",
|
||||
"format": "biome format --write",
|
||||
"start": "nest start",
|
||||
"start:dev": "nest start --watch",
|
||||
@@ -17,23 +23,50 @@
|
||||
"test:watch": "jest --watch",
|
||||
"test:cov": "jest --coverage",
|
||||
"test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand",
|
||||
"test:e2e": "jest --config ./test/jest-e2e.json"
|
||||
"test:e2e": "jest --config ./test/jest-e2e.json",
|
||||
"db:generate": "drizzle-kit generate",
|
||||
"db:migrate": "drizzle-kit migrate",
|
||||
"db:studio": "drizzle-kit studio"
|
||||
},
|
||||
"dependencies": {
|
||||
"@nestjs-modules/mailer": "^2.0.2",
|
||||
"@nestjs/cache-manager": "^3.1.0",
|
||||
"@nestjs/common": "^11.0.1",
|
||||
"@nestjs/config": "^4.0.2",
|
||||
"@nestjs/core": "^11.0.1",
|
||||
"@nestjs/mapped-types": "^2.1.0",
|
||||
"@nestjs/platform-express": "^11.0.1",
|
||||
"@nestjs/schedule": "^6.1.0",
|
||||
"@nestjs/throttler": "^6.5.0",
|
||||
"@noble/post-quantum": "^0.5.4",
|
||||
"@node-rs/argon2": "^2.0.2",
|
||||
"@sentry/nestjs": "^10.32.1",
|
||||
"@sentry/profiling-node": "^10.32.1",
|
||||
"cache-manager": "^7.2.7",
|
||||
"cache-manager-redis-yet": "^5.1.5",
|
||||
"clamscan": "^2.4.0",
|
||||
"class-transformer": "^0.5.1",
|
||||
"class-validator": "^0.14.3",
|
||||
"dotenv": "^17.2.3",
|
||||
"drizzle-orm": "^0.45.1",
|
||||
"fluent-ffmpeg": "^2.1.3",
|
||||
"helmet": "^8.1.0",
|
||||
"iron-session": "^8.0.4",
|
||||
"jose": "^6.1.3",
|
||||
"minio": "^8.0.6",
|
||||
"nodemailer": "^7.0.12",
|
||||
"otplib": "^12.0.1",
|
||||
"pg": "^8.16.3",
|
||||
"qrcode": "^1.5.4",
|
||||
"reflect-metadata": "^0.2.2",
|
||||
"rxjs": "^7.8.1"
|
||||
"rxjs": "^7.8.1",
|
||||
"sharp": "^0.34.5",
|
||||
"uuid": "^13.0.0",
|
||||
"zod": "^4.3.5",
|
||||
"drizzle-kit": "^0.31.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@nestjs/cli": "^11.0.0",
|
||||
"@nestjs/schematics": "^11.0.0",
|
||||
"@nestjs/testing": "^11.0.1",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/jest": "^30.0.0",
|
||||
"@types/node": "^22.10.7",
|
||||
"@types/supertest": "^6.0.2",
|
||||
"globals": "^16.0.0",
|
||||
"jest": "^30.0.0",
|
||||
"source-map-support": "^0.5.21",
|
||||
@@ -42,8 +75,23 @@
|
||||
"ts-loader": "^9.5.2",
|
||||
"ts-node": "^10.9.2",
|
||||
"tsconfig-paths": "^4.2.0",
|
||||
"tsx": "^4.21.0",
|
||||
"typescript": "^5.7.3",
|
||||
"typescript-eslint": "^8.20.0"
|
||||
"typescript-eslint": "^8.20.0",
|
||||
"@nestjs/schematics": "^11.0.0",
|
||||
"@nestjs/testing": "^11.0.1",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/fluent-ffmpeg": "^2.1.28",
|
||||
"@types/jest": "^30.0.0",
|
||||
"@types/multer": "^2.0.0",
|
||||
"@types/node": "^22.10.7",
|
||||
"@types/nodemailer": "^7.0.4",
|
||||
"@types/pg": "^8.16.0",
|
||||
"@types/qrcode": "^1.5.6",
|
||||
"@types/sharp": "^0.32.0",
|
||||
"@types/supertest": "^6.0.2",
|
||||
"@types/uuid": "^11.0.0",
|
||||
"drizzle-kit": "^0.31.8"
|
||||
},
|
||||
"jest": {
|
||||
"moduleFileExtensions": [
|
||||
@@ -53,13 +101,20 @@
|
||||
],
|
||||
"rootDir": "src",
|
||||
"testRegex": ".*\\.spec\\.ts$",
|
||||
"transform": {
|
||||
"^.+\\.(t|j)s$": "ts-jest"
|
||||
},
|
||||
"collectCoverageFrom": [
|
||||
"**/*.(t|j)s"
|
||||
],
|
||||
"coverageDirectory": "../coverage",
|
||||
"testEnvironment": "node"
|
||||
"testEnvironment": "node",
|
||||
"transformIgnorePatterns": [
|
||||
"node_modules/(?!(.pnpm/)?(jose|@noble|uuid)/)"
|
||||
],
|
||||
"transform": {
|
||||
"^.+\\.(t|j)sx?$": "ts-jest"
|
||||
},
|
||||
"moduleNameMapper": {
|
||||
"^@noble/post-quantum/(.*)$": "<rootDir>/../node_modules/@noble/post-quantum/$1",
|
||||
"^@noble/hashes/(.*)$": "<rootDir>/../node_modules/@noble/hashes/$1"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
17
backend/src/admin/admin.controller.ts
Normal file
17
backend/src/admin/admin.controller.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { Controller, Get, UseGuards } from "@nestjs/common";
|
||||
import { Roles } from "../auth/decorators/roles.decorator";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { RolesGuard } from "../auth/guards/roles.guard";
|
||||
import { AdminService } from "./admin.service";
|
||||
|
||||
@Controller("admin")
|
||||
@UseGuards(AuthGuard, RolesGuard)
|
||||
@Roles("admin")
|
||||
export class AdminController {
|
||||
constructor(private readonly adminService: AdminService) {}
|
||||
|
||||
@Get("stats")
|
||||
getStats() {
|
||||
return this.adminService.getStats();
|
||||
}
|
||||
}
|
||||
14
backend/src/admin/admin.module.ts
Normal file
14
backend/src/admin/admin.module.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
import { CategoriesModule } from "../categories/categories.module";
|
||||
import { ContentsModule } from "../contents/contents.module";
|
||||
import { UsersModule } from "../users/users.module";
|
||||
import { AdminController } from "./admin.controller";
|
||||
import { AdminService } from "./admin.service";
|
||||
|
||||
@Module({
|
||||
imports: [AuthModule, UsersModule, ContentsModule, CategoriesModule],
|
||||
controllers: [AdminController],
|
||||
providers: [AdminService],
|
||||
})
|
||||
export class AdminModule {}
|
||||
27
backend/src/admin/admin.service.ts
Normal file
27
backend/src/admin/admin.service.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { Injectable } from "@nestjs/common";
|
||||
import { CategoriesRepository } from "../categories/repositories/categories.repository";
|
||||
import { ContentsRepository } from "../contents/repositories/contents.repository";
|
||||
import { UsersRepository } from "../users/repositories/users.repository";
|
||||
|
||||
@Injectable()
|
||||
export class AdminService {
|
||||
constructor(
|
||||
private readonly usersRepository: UsersRepository,
|
||||
private readonly contentsRepository: ContentsRepository,
|
||||
private readonly categoriesRepository: CategoriesRepository,
|
||||
) {}
|
||||
|
||||
async getStats() {
|
||||
const [userCount, contentCount, categoryCount] = await Promise.all([
|
||||
this.usersRepository.countAll(),
|
||||
this.contentsRepository.count({}),
|
||||
this.categoriesRepository.countAll(),
|
||||
]);
|
||||
|
||||
return {
|
||||
users: userCount,
|
||||
contents: contentCount,
|
||||
categories: categoryCount,
|
||||
};
|
||||
}
|
||||
}
|
||||
42
backend/src/api-keys/api-keys.controller.ts
Normal file
42
backend/src/api-keys/api-keys.controller.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
Delete,
|
||||
Get,
|
||||
Param,
|
||||
Post,
|
||||
Req,
|
||||
UseGuards,
|
||||
} from "@nestjs/common";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import type { AuthenticatedRequest } from "../common/interfaces/request.interface";
|
||||
import { ApiKeysService } from "./api-keys.service";
|
||||
import { CreateApiKeyDto } from "./dto/create-api-key.dto";
|
||||
|
||||
@Controller("api-keys")
|
||||
@UseGuards(AuthGuard)
|
||||
export class ApiKeysController {
|
||||
constructor(private readonly apiKeysService: ApiKeysService) {}
|
||||
|
||||
@Post()
|
||||
create(
|
||||
@Req() req: AuthenticatedRequest,
|
||||
@Body() createApiKeyDto: CreateApiKeyDto,
|
||||
) {
|
||||
return this.apiKeysService.create(
|
||||
req.user.sub,
|
||||
createApiKeyDto.name,
|
||||
createApiKeyDto.expiresAt ? new Date(createApiKeyDto.expiresAt) : undefined,
|
||||
);
|
||||
}
|
||||
|
||||
@Get()
|
||||
findAll(@Req() req: AuthenticatedRequest) {
|
||||
return this.apiKeysService.findAll(req.user.sub);
|
||||
}
|
||||
|
||||
@Delete(":id")
|
||||
revoke(@Req() req: AuthenticatedRequest, @Param("id") id: string) {
|
||||
return this.apiKeysService.revoke(req.user.sub, id);
|
||||
}
|
||||
}
|
||||
13
backend/src/api-keys/api-keys.module.ts
Normal file
13
backend/src/api-keys/api-keys.module.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { forwardRef, Module } from "@nestjs/common";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
import { ApiKeysController } from "./api-keys.controller";
|
||||
import { ApiKeysService } from "./api-keys.service";
|
||||
import { ApiKeysRepository } from "./repositories/api-keys.repository";
|
||||
|
||||
@Module({
|
||||
imports: [forwardRef(() => AuthModule)],
|
||||
controllers: [ApiKeysController],
|
||||
providers: [ApiKeysService, ApiKeysRepository],
|
||||
exports: [ApiKeysService, ApiKeysRepository],
|
||||
})
|
||||
export class ApiKeysModule {}
|
||||
128
backend/src/api-keys/api-keys.service.spec.ts
Normal file
128
backend/src/api-keys/api-keys.service.spec.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { HashingService } from "../crypto/services/hashing.service";
|
||||
import { ApiKeysService } from "./api-keys.service";
|
||||
import { ApiKeysRepository } from "./repositories/api-keys.repository";
|
||||
|
||||
describe("ApiKeysService", () => {
|
||||
let service: ApiKeysService;
|
||||
let repository: ApiKeysRepository;
|
||||
|
||||
const mockApiKeysRepository = {
|
||||
create: jest.fn(),
|
||||
findAll: jest.fn(),
|
||||
revoke: jest.fn(),
|
||||
findActiveByKeyHash: jest.fn(),
|
||||
updateLastUsed: jest.fn(),
|
||||
};
|
||||
|
||||
const mockHashingService = {
|
||||
hashSha256: jest.fn().mockResolvedValue("hashed-key"),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
ApiKeysService,
|
||||
{
|
||||
provide: ApiKeysRepository,
|
||||
useValue: mockApiKeysRepository,
|
||||
},
|
||||
{
|
||||
provide: HashingService,
|
||||
useValue: mockHashingService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<ApiKeysService>(ApiKeysService);
|
||||
repository = module.get<ApiKeysRepository>(ApiKeysRepository);
|
||||
});
|
||||
|
||||
it("should be defined", () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe("create", () => {
|
||||
it("should create an API key", async () => {
|
||||
const userId = "user-id";
|
||||
const name = "Test Key";
|
||||
const expiresAt = new Date();
|
||||
|
||||
const result = await service.create(userId, name, expiresAt);
|
||||
|
||||
expect(repository.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
userId,
|
||||
name,
|
||||
prefix: "mg_live_",
|
||||
expiresAt,
|
||||
}),
|
||||
);
|
||||
expect(result).toHaveProperty("key");
|
||||
expect(result.name).toBe(name);
|
||||
expect(result.expiresAt).toBe(expiresAt);
|
||||
expect(result.key).toMatch(/^mg_live_/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
it("should find all API keys for a user", async () => {
|
||||
const userId = "user-id";
|
||||
const expectedKeys = [{ id: "1", name: "Key 1" }];
|
||||
mockApiKeysRepository.findAll.mockResolvedValue(expectedKeys);
|
||||
|
||||
const result = await service.findAll(userId);
|
||||
|
||||
expect(repository.findAll).toHaveBeenCalledWith(userId);
|
||||
expect(result).toEqual(expectedKeys);
|
||||
});
|
||||
});
|
||||
|
||||
describe("revoke", () => {
|
||||
it("should revoke an API key", async () => {
|
||||
const userId = "user-id";
|
||||
const keyId = "key-id";
|
||||
const expectedResult = [{ id: keyId, isActive: false }];
|
||||
mockApiKeysRepository.revoke.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await service.revoke(userId, keyId);
|
||||
|
||||
expect(repository.revoke).toHaveBeenCalledWith(userId, keyId);
|
||||
expect(result).toEqual(expectedResult);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateKey", () => {
|
||||
it("should validate a valid API key", async () => {
|
||||
const key = "mg_live_testkey";
|
||||
const apiKey = { id: "1", isActive: true, expiresAt: null };
|
||||
mockApiKeysRepository.findActiveByKeyHash.mockResolvedValue(apiKey);
|
||||
|
||||
const result = await service.validateKey(key);
|
||||
|
||||
expect(result).toEqual(apiKey);
|
||||
expect(repository.findActiveByKeyHash).toHaveBeenCalled();
|
||||
expect(repository.updateLastUsed).toHaveBeenCalledWith(apiKey.id);
|
||||
});
|
||||
|
||||
it("should return null for invalid API key", async () => {
|
||||
mockApiKeysRepository.findActiveByKeyHash.mockResolvedValue(null);
|
||||
const result = await service.validateKey("invalid-key");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null for expired API key", async () => {
|
||||
const key = "mg_live_testkey";
|
||||
const expiredDate = new Date();
|
||||
expiredDate.setFullYear(expiredDate.getFullYear() - 1);
|
||||
const apiKey = { id: "1", isActive: true, expiresAt: expiredDate };
|
||||
mockApiKeysRepository.findActiveByKeyHash.mockResolvedValue(apiKey);
|
||||
|
||||
const result = await service.validateKey(key);
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
63
backend/src/api-keys/api-keys.service.ts
Normal file
63
backend/src/api-keys/api-keys.service.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { randomBytes } from "node:crypto";
|
||||
import { Injectable, Logger } from "@nestjs/common";
|
||||
import { HashingService } from "../crypto/services/hashing.service";
|
||||
import { ApiKeysRepository } from "./repositories/api-keys.repository";
|
||||
|
||||
@Injectable()
|
||||
export class ApiKeysService {
|
||||
private readonly logger = new Logger(ApiKeysService.name);
|
||||
|
||||
constructor(
|
||||
private readonly apiKeysRepository: ApiKeysRepository,
|
||||
private readonly hashingService: HashingService,
|
||||
) {}
|
||||
|
||||
async create(userId: string, name: string, expiresAt?: Date) {
|
||||
this.logger.log(`Creating API key for user ${userId}: ${name}`);
|
||||
const prefix = "mg_live_";
|
||||
const randomPart = randomBytes(24).toString("hex");
|
||||
const key = `${prefix}${randomPart}`;
|
||||
|
||||
const keyHash = await this.hashingService.hashSha256(key);
|
||||
|
||||
await this.apiKeysRepository.create({
|
||||
userId,
|
||||
name,
|
||||
prefix: prefix.substring(0, 8),
|
||||
keyHash,
|
||||
expiresAt,
|
||||
});
|
||||
|
||||
return {
|
||||
name,
|
||||
key, // Retourné une seule fois à la création
|
||||
expiresAt,
|
||||
};
|
||||
}
|
||||
|
||||
async findAll(userId: string) {
|
||||
return await this.apiKeysRepository.findAll(userId);
|
||||
}
|
||||
|
||||
async revoke(userId: string, keyId: string) {
|
||||
this.logger.log(`Revoking API key ${keyId} for user ${userId}`);
|
||||
return await this.apiKeysRepository.revoke(userId, keyId);
|
||||
}
|
||||
|
||||
async validateKey(key: string) {
|
||||
const keyHash = await this.hashingService.hashSha256(key);
|
||||
|
||||
const apiKey = await this.apiKeysRepository.findActiveByKeyHash(keyHash);
|
||||
|
||||
if (!apiKey) return null;
|
||||
|
||||
if (apiKey.expiresAt && apiKey.expiresAt < new Date()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Update last used at
|
||||
await this.apiKeysRepository.updateLastUsed(apiKey.id);
|
||||
|
||||
return apiKey;
|
||||
}
|
||||
}
|
||||
18
backend/src/api-keys/dto/create-api-key.dto.ts
Normal file
18
backend/src/api-keys/dto/create-api-key.dto.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import {
|
||||
IsDateString,
|
||||
IsNotEmpty,
|
||||
IsOptional,
|
||||
IsString,
|
||||
MaxLength,
|
||||
} from "class-validator";
|
||||
|
||||
export class CreateApiKeyDto {
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@MaxLength(128)
|
||||
name!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsDateString()
|
||||
expiresAt?: string;
|
||||
}
|
||||
58
backend/src/api-keys/repositories/api-keys.repository.ts
Normal file
58
backend/src/api-keys/repositories/api-keys.repository.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { Injectable } from "@nestjs/common";
|
||||
import { and, eq } from "drizzle-orm";
|
||||
import { DatabaseService } from "../../database/database.service";
|
||||
import { apiKeys } from "../../database/schemas";
|
||||
|
||||
@Injectable()
|
||||
export class ApiKeysRepository {
|
||||
constructor(private readonly databaseService: DatabaseService) {}
|
||||
|
||||
async create(data: {
|
||||
userId: string;
|
||||
name: string;
|
||||
prefix: string;
|
||||
keyHash: string;
|
||||
expiresAt?: Date;
|
||||
}) {
|
||||
return await this.databaseService.db.insert(apiKeys).values(data);
|
||||
}
|
||||
|
||||
async findAll(userId: string) {
|
||||
return await this.databaseService.db
|
||||
.select({
|
||||
id: apiKeys.id,
|
||||
name: apiKeys.name,
|
||||
prefix: apiKeys.prefix,
|
||||
isActive: apiKeys.isActive,
|
||||
lastUsedAt: apiKeys.lastUsedAt,
|
||||
expiresAt: apiKeys.expiresAt,
|
||||
createdAt: apiKeys.createdAt,
|
||||
})
|
||||
.from(apiKeys)
|
||||
.where(eq(apiKeys.userId, userId));
|
||||
}
|
||||
|
||||
async revoke(userId: string, keyId: string) {
|
||||
return await this.databaseService.db
|
||||
.update(apiKeys)
|
||||
.set({ isActive: false, updatedAt: new Date() })
|
||||
.where(and(eq(apiKeys.id, keyId), eq(apiKeys.userId, userId)))
|
||||
.returning();
|
||||
}
|
||||
|
||||
async findActiveByKeyHash(keyHash: string) {
|
||||
const result = await this.databaseService.db
|
||||
.select()
|
||||
.from(apiKeys)
|
||||
.where(and(eq(apiKeys.keyHash, keyHash), eq(apiKeys.isActive, true)))
|
||||
.limit(1);
|
||||
return result[0] || null;
|
||||
}
|
||||
|
||||
async updateLastUsed(id: string) {
|
||||
return await this.databaseService.db
|
||||
.update(apiKeys)
|
||||
.set({ lastUsedAt: new Date() })
|
||||
.where(eq(apiKeys.id, id));
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,84 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { CacheModule } from "@nestjs/cache-manager";
|
||||
import { MiddlewareConsumer, Module, NestModule } from "@nestjs/common";
|
||||
import { ConfigModule, ConfigService } from "@nestjs/config";
|
||||
import { ScheduleModule } from "@nestjs/schedule";
|
||||
import { ThrottlerModule } from "@nestjs/throttler";
|
||||
import { redisStore } from "cache-manager-redis-yet";
|
||||
import { AdminModule } from "./admin/admin.module";
|
||||
import { ApiKeysModule } from "./api-keys/api-keys.module";
|
||||
import { AppController } from "./app.controller";
|
||||
import { AppService } from "./app.service";
|
||||
import { AuthModule } from "./auth/auth.module";
|
||||
import { CategoriesModule } from "./categories/categories.module";
|
||||
import { CommonModule } from "./common/common.module";
|
||||
import { CrawlerDetectionMiddleware } from "./common/middlewares/crawler-detection.middleware";
|
||||
import { HTTPLoggerMiddleware } from "./common/middlewares/http-logger.middleware";
|
||||
import { validateEnv } from "./config/env.schema";
|
||||
import { ContentsModule } from "./contents/contents.module";
|
||||
import { CryptoModule } from "./crypto/crypto.module";
|
||||
import { DatabaseModule } from "./database/database.module";
|
||||
import { FavoritesModule } from "./favorites/favorites.module";
|
||||
import { HealthController } from "./health.controller";
|
||||
import { MailModule } from "./mail/mail.module";
|
||||
import { MediaModule } from "./media/media.module";
|
||||
import { ReportsModule } from "./reports/reports.module";
|
||||
import { S3Module } from "./s3/s3.module";
|
||||
import { SessionsModule } from "./sessions/sessions.module";
|
||||
import { TagsModule } from "./tags/tags.module";
|
||||
import { UsersModule } from "./users/users.module";
|
||||
|
||||
@Module({
|
||||
imports: [],
|
||||
controllers: [AppController],
|
||||
imports: [
|
||||
DatabaseModule,
|
||||
CryptoModule,
|
||||
CommonModule,
|
||||
S3Module,
|
||||
MailModule,
|
||||
UsersModule,
|
||||
AuthModule,
|
||||
CategoriesModule,
|
||||
ContentsModule,
|
||||
FavoritesModule,
|
||||
TagsModule,
|
||||
MediaModule,
|
||||
SessionsModule,
|
||||
ReportsModule,
|
||||
ApiKeysModule,
|
||||
AdminModule,
|
||||
ScheduleModule.forRoot(),
|
||||
ThrottlerModule.forRootAsync({
|
||||
imports: [ConfigModule],
|
||||
inject: [ConfigService],
|
||||
useFactory: (config: ConfigService) => [
|
||||
{
|
||||
ttl: 60000,
|
||||
limit: config.get("NODE_ENV") === "production" ? 100 : 1000,
|
||||
},
|
||||
],
|
||||
}),
|
||||
ConfigModule.forRoot({
|
||||
isGlobal: true,
|
||||
validate: validateEnv,
|
||||
}),
|
||||
CacheModule.registerAsync({
|
||||
isGlobal: true,
|
||||
imports: [ConfigModule],
|
||||
inject: [ConfigService],
|
||||
useFactory: async (config: ConfigService) => ({
|
||||
store: await redisStore({
|
||||
url: `redis://${config.get("REDIS_HOST")}:${config.get("REDIS_PORT")}`,
|
||||
}),
|
||||
ttl: 600, // 10 minutes
|
||||
}),
|
||||
}),
|
||||
],
|
||||
controllers: [AppController, HealthController],
|
||||
providers: [AppService],
|
||||
})
|
||||
export class AppModule {}
|
||||
export class AppModule implements NestModule {
|
||||
configure(consumer: MiddlewareConsumer) {
|
||||
consumer
|
||||
.apply(HTTPLoggerMiddleware, CrawlerDetectionMiddleware)
|
||||
.forRoutes("*");
|
||||
}
|
||||
}
|
||||
|
||||
123
backend/src/auth/auth.controller.ts
Normal file
123
backend/src/auth/auth.controller.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import { Body, Controller, Headers, Post, Req, Res } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { Throttle } from "@nestjs/throttler";
|
||||
import type { Request, Response } from "express";
|
||||
import { getIronSession } from "iron-session";
|
||||
import { AuthService } from "./auth.service";
|
||||
import { LoginDto } from "./dto/login.dto";
|
||||
import { RegisterDto } from "./dto/register.dto";
|
||||
import { Verify2faDto } from "./dto/verify-2fa.dto";
|
||||
import { getSessionOptions, SessionData } from "./session.config";
|
||||
|
||||
@Controller("auth")
|
||||
export class AuthController {
|
||||
constructor(
|
||||
private readonly authService: AuthService,
|
||||
private readonly configService: ConfigService,
|
||||
) {}
|
||||
|
||||
@Post("register")
|
||||
@Throttle({ default: { limit: 5, ttl: 60000 } })
|
||||
register(@Body() registerDto: RegisterDto) {
|
||||
return this.authService.register(registerDto);
|
||||
}
|
||||
|
||||
@Post("login")
|
||||
@Throttle({ default: { limit: 5, ttl: 60000 } })
|
||||
async login(
|
||||
@Body() loginDto: LoginDto,
|
||||
@Headers("user-agent") userAgent: string,
|
||||
@Req() req: Request,
|
||||
@Res() res: Response,
|
||||
) {
|
||||
const ip = req.ip;
|
||||
const result = await this.authService.login(loginDto, userAgent, ip);
|
||||
|
||||
if (result.access_token) {
|
||||
const session = await getIronSession<SessionData>(
|
||||
req,
|
||||
res,
|
||||
getSessionOptions(this.configService.get("SESSION_PASSWORD") as string),
|
||||
);
|
||||
session.accessToken = result.access_token;
|
||||
session.refreshToken = result.refresh_token;
|
||||
session.userId = result.userId;
|
||||
await session.save();
|
||||
|
||||
// On ne renvoie pas les tokens dans le body pour plus de sécurité
|
||||
return res.json({
|
||||
message: result.message,
|
||||
userId: result.userId,
|
||||
});
|
||||
}
|
||||
|
||||
return res.json(result);
|
||||
}
|
||||
|
||||
@Post("verify-2fa")
|
||||
@Throttle({ default: { limit: 5, ttl: 60000 } })
|
||||
async verifyTwoFactor(
|
||||
@Body() verify2faDto: Verify2faDto,
|
||||
@Headers("user-agent") userAgent: string,
|
||||
@Req() req: Request,
|
||||
@Res() res: Response,
|
||||
) {
|
||||
const ip = req.ip;
|
||||
const result = await this.authService.verifyTwoFactorLogin(
|
||||
verify2faDto.userId,
|
||||
verify2faDto.token,
|
||||
userAgent,
|
||||
ip,
|
||||
);
|
||||
|
||||
if (result.access_token) {
|
||||
const session = await getIronSession<SessionData>(
|
||||
req,
|
||||
res,
|
||||
getSessionOptions(this.configService.get("SESSION_PASSWORD") as string),
|
||||
);
|
||||
session.accessToken = result.access_token;
|
||||
session.refreshToken = result.refresh_token;
|
||||
session.userId = verify2faDto.userId;
|
||||
await session.save();
|
||||
|
||||
return res.json({
|
||||
message: result.message,
|
||||
});
|
||||
}
|
||||
|
||||
return res.json(result);
|
||||
}
|
||||
|
||||
@Post("refresh")
|
||||
async refresh(@Req() req: Request, @Res() res: Response) {
|
||||
const session = await getIronSession<SessionData>(
|
||||
req,
|
||||
res,
|
||||
getSessionOptions(this.configService.get("SESSION_PASSWORD") as string),
|
||||
);
|
||||
|
||||
if (!session.refreshToken) {
|
||||
return res.status(401).json({ message: "No refresh token" });
|
||||
}
|
||||
|
||||
const result = await this.authService.refresh(session.refreshToken);
|
||||
|
||||
session.accessToken = result.access_token;
|
||||
session.refreshToken = result.refresh_token;
|
||||
await session.save();
|
||||
|
||||
return res.json({ message: "Token refreshed" });
|
||||
}
|
||||
|
||||
@Post("logout")
|
||||
async logout(@Req() req: Request, @Res() res: Response) {
|
||||
const session = await getIronSession<SessionData>(
|
||||
req,
|
||||
res,
|
||||
getSessionOptions(this.configService.get("SESSION_PASSWORD") as string),
|
||||
);
|
||||
session.destroy();
|
||||
return res.json({ message: "User logged out" });
|
||||
}
|
||||
}
|
||||
32
backend/src/auth/auth.module.ts
Normal file
32
backend/src/auth/auth.module.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { forwardRef, Module } from "@nestjs/common";
|
||||
import { SessionsModule } from "../sessions/sessions.module";
|
||||
import { UsersModule } from "../users/users.module";
|
||||
import { AuthController } from "./auth.controller";
|
||||
import { AuthService } from "./auth.service";
|
||||
import { AuthGuard } from "./guards/auth.guard";
|
||||
import { OptionalAuthGuard } from "./guards/optional-auth.guard";
|
||||
import { RolesGuard } from "./guards/roles.guard";
|
||||
import { RbacService } from "./rbac.service";
|
||||
import { RbacRepository } from "./repositories/rbac.repository";
|
||||
|
||||
@Module({
|
||||
imports: [forwardRef(() => UsersModule), SessionsModule],
|
||||
controllers: [AuthController],
|
||||
providers: [
|
||||
AuthService,
|
||||
RbacService,
|
||||
RbacRepository,
|
||||
AuthGuard,
|
||||
OptionalAuthGuard,
|
||||
RolesGuard,
|
||||
],
|
||||
exports: [
|
||||
AuthService,
|
||||
RbacService,
|
||||
RbacRepository,
|
||||
AuthGuard,
|
||||
OptionalAuthGuard,
|
||||
RolesGuard,
|
||||
],
|
||||
})
|
||||
export class AuthModule {}
|
||||
261
backend/src/auth/auth.service.spec.ts
Normal file
261
backend/src/auth/auth.service.spec.ts
Normal file
@@ -0,0 +1,261 @@
|
||||
jest.mock("uuid", () => ({
|
||||
v4: jest.fn(() => "mocked-uuid"),
|
||||
}));
|
||||
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
|
||||
jest.mock("@noble/post-quantum/ml-kem.js", () => ({
|
||||
ml_kem768: {
|
||||
keygen: jest.fn(),
|
||||
encapsulate: jest.fn(),
|
||||
decapsulate: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock("jose", () => ({
|
||||
SignJWT: jest.fn(),
|
||||
jwtVerify: jest.fn(),
|
||||
}));
|
||||
|
||||
import { BadRequestException, UnauthorizedException } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { authenticator } from "otplib";
|
||||
import * as qrcode from "qrcode";
|
||||
import { HashingService } from "../crypto/services/hashing.service";
|
||||
import { JwtService } from "../crypto/services/jwt.service";
|
||||
import { SessionsService } from "../sessions/sessions.service";
|
||||
import { UsersService } from "../users/users.service";
|
||||
import { AuthService } from "./auth.service";
|
||||
|
||||
jest.mock("otplib");
|
||||
jest.mock("qrcode");
|
||||
jest.mock("../users/users.service");
|
||||
jest.mock("../sessions/sessions.service");
|
||||
|
||||
describe("AuthService", () => {
|
||||
let service: AuthService;
|
||||
|
||||
const mockUsersService = {
|
||||
findOne: jest.fn(),
|
||||
setTwoFactorSecret: jest.fn(),
|
||||
getTwoFactorSecret: jest.fn(),
|
||||
toggleTwoFactor: jest.fn(),
|
||||
create: jest.fn(),
|
||||
findByEmailHash: jest.fn(),
|
||||
findOneWithPrivateData: jest.fn(),
|
||||
};
|
||||
|
||||
const mockHashingService = {
|
||||
hashPassword: jest.fn(),
|
||||
hashEmail: jest.fn(),
|
||||
verifyPassword: jest.fn(),
|
||||
};
|
||||
|
||||
const mockJwtService = {
|
||||
generateJwt: jest.fn(),
|
||||
};
|
||||
|
||||
const mockSessionsService = {
|
||||
createSession: jest.fn(),
|
||||
refreshSession: jest.fn(),
|
||||
};
|
||||
|
||||
const mockConfigService = {
|
||||
get: jest.fn(),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
AuthService,
|
||||
{ provide: UsersService, useValue: mockUsersService },
|
||||
{ provide: HashingService, useValue: mockHashingService },
|
||||
{ provide: JwtService, useValue: mockJwtService },
|
||||
{ provide: SessionsService, useValue: mockSessionsService },
|
||||
{ provide: ConfigService, useValue: mockConfigService },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<AuthService>(AuthService);
|
||||
});
|
||||
|
||||
it("should be defined", () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe("generateTwoFactorSecret", () => {
|
||||
it("should generate a 2FA secret", async () => {
|
||||
const userId = "user-id";
|
||||
const user = { username: "testuser" };
|
||||
mockUsersService.findOne.mockResolvedValue(user);
|
||||
(authenticator.generateSecret as jest.Mock).mockReturnValue("secret");
|
||||
(authenticator.keyuri as jest.Mock).mockReturnValue("otpauth://...");
|
||||
(qrcode.toDataURL as jest.Mock).mockResolvedValue(
|
||||
"data:image/png;base64,...",
|
||||
);
|
||||
|
||||
const result = await service.generateTwoFactorSecret(userId);
|
||||
|
||||
expect(result).toEqual({
|
||||
secret: "secret",
|
||||
qrCodeDataUrl: "data:image/png;base64,...",
|
||||
});
|
||||
expect(mockUsersService.setTwoFactorSecret).toHaveBeenCalledWith(
|
||||
userId,
|
||||
"secret",
|
||||
);
|
||||
});
|
||||
|
||||
it("should throw UnauthorizedException if user not found", async () => {
|
||||
mockUsersService.findOne.mockResolvedValue(null);
|
||||
await expect(service.generateTwoFactorSecret("invalid")).rejects.toThrow(
|
||||
UnauthorizedException,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("enableTwoFactor", () => {
|
||||
it("should enable 2FA", async () => {
|
||||
const userId = "user-id";
|
||||
const token = "123456";
|
||||
mockUsersService.getTwoFactorSecret.mockResolvedValue("secret");
|
||||
(authenticator.verify as jest.Mock).mockReturnValue(true);
|
||||
|
||||
const result = await service.enableTwoFactor(userId, token);
|
||||
|
||||
expect(result).toEqual({ message: "2FA enabled successfully" });
|
||||
expect(mockUsersService.toggleTwoFactor).toHaveBeenCalledWith(userId, true);
|
||||
});
|
||||
|
||||
it("should throw BadRequestException if 2FA not initiated", async () => {
|
||||
mockUsersService.getTwoFactorSecret.mockResolvedValue(null);
|
||||
await expect(service.enableTwoFactor("user-id", "token")).rejects.toThrow(
|
||||
BadRequestException,
|
||||
);
|
||||
});
|
||||
|
||||
it("should throw BadRequestException if token is invalid", async () => {
|
||||
mockUsersService.getTwoFactorSecret.mockResolvedValue("secret");
|
||||
(authenticator.verify as jest.Mock).mockReturnValue(false);
|
||||
await expect(service.enableTwoFactor("user-id", "invalid")).rejects.toThrow(
|
||||
BadRequestException,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("register", () => {
|
||||
it("should register a user", async () => {
|
||||
const dto = {
|
||||
username: "test",
|
||||
email: "test@example.com",
|
||||
password: "password",
|
||||
};
|
||||
mockHashingService.hashPassword.mockResolvedValue("hashed-password");
|
||||
mockHashingService.hashEmail.mockResolvedValue("hashed-email");
|
||||
mockUsersService.create.mockResolvedValue({ uuid: "new-user-id" });
|
||||
|
||||
const result = await service.register(dto);
|
||||
|
||||
expect(result).toEqual({
|
||||
message: "User registered successfully",
|
||||
userId: "new-user-id",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("login", () => {
|
||||
it("should login a user", async () => {
|
||||
const dto = { email: "test@example.com", password: "password" };
|
||||
const user = {
|
||||
uuid: "user-id",
|
||||
username: "test",
|
||||
passwordHash: "hash",
|
||||
isTwoFactorEnabled: false,
|
||||
};
|
||||
mockHashingService.hashEmail.mockResolvedValue("hashed-email");
|
||||
mockUsersService.findByEmailHash.mockResolvedValue(user);
|
||||
mockHashingService.verifyPassword.mockResolvedValue(true);
|
||||
mockJwtService.generateJwt.mockResolvedValue("access-token");
|
||||
mockSessionsService.createSession.mockResolvedValue({
|
||||
refreshToken: "refresh-token",
|
||||
});
|
||||
|
||||
const result = await service.login(dto);
|
||||
|
||||
expect(result).toEqual({
|
||||
message: "User logged in successfully",
|
||||
access_token: "access-token",
|
||||
refresh_token: "refresh-token",
|
||||
});
|
||||
});
|
||||
|
||||
it("should return requires2FA if 2FA is enabled", async () => {
|
||||
const dto = { email: "test@example.com", password: "password" };
|
||||
const user = {
|
||||
uuid: "user-id",
|
||||
username: "test",
|
||||
passwordHash: "hash",
|
||||
isTwoFactorEnabled: true,
|
||||
};
|
||||
mockHashingService.hashEmail.mockResolvedValue("hashed-email");
|
||||
mockUsersService.findByEmailHash.mockResolvedValue(user);
|
||||
mockHashingService.verifyPassword.mockResolvedValue(true);
|
||||
|
||||
const result = await service.login(dto);
|
||||
|
||||
expect(result).toEqual({
|
||||
message: "2FA required",
|
||||
requires2FA: true,
|
||||
userId: "user-id",
|
||||
});
|
||||
});
|
||||
|
||||
it("should throw UnauthorizedException for invalid credentials", async () => {
|
||||
mockUsersService.findByEmailHash.mockResolvedValue(null);
|
||||
await expect(service.login({ email: "x", password: "y" })).rejects.toThrow(
|
||||
UnauthorizedException,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("verifyTwoFactorLogin", () => {
|
||||
it("should verify 2FA login", async () => {
|
||||
const userId = "user-id";
|
||||
const token = "123456";
|
||||
const user = { uuid: userId, username: "test", isTwoFactorEnabled: true };
|
||||
mockUsersService.findOneWithPrivateData.mockResolvedValue(user);
|
||||
mockUsersService.getTwoFactorSecret.mockResolvedValue("secret");
|
||||
(authenticator.verify as jest.Mock).mockReturnValue(true);
|
||||
mockJwtService.generateJwt.mockResolvedValue("access-token");
|
||||
mockSessionsService.createSession.mockResolvedValue({
|
||||
refreshToken: "refresh-token",
|
||||
});
|
||||
|
||||
const result = await service.verifyTwoFactorLogin(userId, token);
|
||||
|
||||
expect(result).toEqual({
|
||||
message: "User logged in successfully (2FA)",
|
||||
access_token: "access-token",
|
||||
refresh_token: "refresh-token",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("refresh", () => {
|
||||
it("should refresh tokens", async () => {
|
||||
const refreshToken = "old-refresh";
|
||||
const session = { userId: "user-id", refreshToken: "new-refresh" };
|
||||
const user = { uuid: "user-id", username: "test" };
|
||||
mockSessionsService.refreshSession.mockResolvedValue(session);
|
||||
mockUsersService.findOne.mockResolvedValue(user);
|
||||
mockJwtService.generateJwt.mockResolvedValue("new-access");
|
||||
|
||||
const result = await service.refresh(refreshToken);
|
||||
|
||||
expect(result).toEqual({
|
||||
access_token: "new-access",
|
||||
refresh_token: "new-refresh",
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
219
backend/src/auth/auth.service.ts
Normal file
219
backend/src/auth/auth.service.ts
Normal file
@@ -0,0 +1,219 @@
|
||||
import {
|
||||
BadRequestException,
|
||||
forwardRef,
|
||||
Inject,
|
||||
Injectable,
|
||||
Logger,
|
||||
UnauthorizedException,
|
||||
} from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { authenticator } from "otplib";
|
||||
import { toDataURL } from "qrcode";
|
||||
import { HashingService } from "../crypto/services/hashing.service";
|
||||
import { JwtService } from "../crypto/services/jwt.service";
|
||||
import { SessionsService } from "../sessions/sessions.service";
|
||||
import { UsersService } from "../users/users.service";
|
||||
import { LoginDto } from "./dto/login.dto";
|
||||
import { RegisterDto } from "./dto/register.dto";
|
||||
|
||||
@Injectable()
|
||||
export class AuthService {
|
||||
private readonly logger = new Logger(AuthService.name);
|
||||
|
||||
constructor(
|
||||
@Inject(forwardRef(() => UsersService))
|
||||
private readonly usersService: UsersService,
|
||||
private readonly hashingService: HashingService,
|
||||
private readonly jwtService: JwtService,
|
||||
private readonly sessionsService: SessionsService,
|
||||
private readonly configService: ConfigService,
|
||||
) {}
|
||||
|
||||
async generateTwoFactorSecret(userId: string) {
|
||||
this.logger.log(`Generating 2FA secret for user ${userId}`);
|
||||
const user = await this.usersService.findOne(userId);
|
||||
if (!user) throw new UnauthorizedException();
|
||||
|
||||
const secret = authenticator.generateSecret();
|
||||
const otpauthUrl = authenticator.keyuri(
|
||||
user.username,
|
||||
this.configService.get("DOMAIN_NAME") || "Memegoat",
|
||||
secret,
|
||||
);
|
||||
|
||||
await this.usersService.setTwoFactorSecret(userId, secret);
|
||||
|
||||
const qrCodeDataUrl = await toDataURL(otpauthUrl);
|
||||
return {
|
||||
secret,
|
||||
qrCodeDataUrl,
|
||||
};
|
||||
}
|
||||
|
||||
async enableTwoFactor(userId: string, token: string) {
|
||||
this.logger.log(`Enabling 2FA for user ${userId}`);
|
||||
const secret = await this.usersService.getTwoFactorSecret(userId);
|
||||
if (!secret) {
|
||||
throw new BadRequestException("2FA not initiated");
|
||||
}
|
||||
|
||||
const isValid = authenticator.verify({ token, secret });
|
||||
if (!isValid) {
|
||||
throw new BadRequestException("Invalid 2FA token");
|
||||
}
|
||||
|
||||
await this.usersService.toggleTwoFactor(userId, true);
|
||||
return { message: "2FA enabled successfully" };
|
||||
}
|
||||
|
||||
async disableTwoFactor(userId: string, token: string) {
|
||||
this.logger.log(`Disabling 2FA for user ${userId}`);
|
||||
const secret = await this.usersService.getTwoFactorSecret(userId);
|
||||
if (!secret) {
|
||||
throw new BadRequestException("2FA not enabled");
|
||||
}
|
||||
|
||||
const isValid = authenticator.verify({ token, secret });
|
||||
if (!isValid) {
|
||||
throw new BadRequestException("Invalid 2FA token");
|
||||
}
|
||||
|
||||
await this.usersService.toggleTwoFactor(userId, false);
|
||||
return { message: "2FA disabled successfully" };
|
||||
}
|
||||
|
||||
async register(dto: RegisterDto) {
|
||||
this.logger.log(`Registering new user: ${dto.username}`);
|
||||
const { username, email, password } = dto;
|
||||
|
||||
const passwordHash = await this.hashingService.hashPassword(password);
|
||||
const emailHash = await this.hashingService.hashEmail(email);
|
||||
|
||||
const user = await this.usersService.create({
|
||||
username,
|
||||
email,
|
||||
passwordHash,
|
||||
emailHash,
|
||||
});
|
||||
|
||||
return {
|
||||
message: "User registered successfully",
|
||||
userId: user.uuid,
|
||||
};
|
||||
}
|
||||
|
||||
async login(dto: LoginDto, userAgent?: string, ip?: string) {
|
||||
this.logger.log(`Login attempt for email: ${dto.email}`);
|
||||
const { email, password } = dto;
|
||||
|
||||
const emailHash = await this.hashingService.hashEmail(email);
|
||||
const user = await this.usersService.findByEmailHash(emailHash);
|
||||
|
||||
if (!user) {
|
||||
this.logger.warn(`Login failed: user not found for email hash`);
|
||||
throw new UnauthorizedException("Invalid credentials");
|
||||
}
|
||||
|
||||
const isPasswordValid = await this.hashingService.verifyPassword(
|
||||
password,
|
||||
user.passwordHash,
|
||||
);
|
||||
|
||||
if (!isPasswordValid) {
|
||||
this.logger.warn(`Login failed: invalid password for user ${user.uuid}`);
|
||||
throw new UnauthorizedException("Invalid credentials");
|
||||
}
|
||||
|
||||
if (user.isTwoFactorEnabled) {
|
||||
this.logger.log(`2FA required for user ${user.uuid}`);
|
||||
return {
|
||||
message: "2FA required",
|
||||
requires2FA: true,
|
||||
userId: user.uuid,
|
||||
};
|
||||
}
|
||||
|
||||
const accessToken = await this.jwtService.generateJwt({
|
||||
sub: user.uuid,
|
||||
username: user.username,
|
||||
});
|
||||
|
||||
const session = await this.sessionsService.createSession(
|
||||
user.uuid,
|
||||
userAgent,
|
||||
ip,
|
||||
);
|
||||
|
||||
this.logger.log(`User ${user.uuid} logged in successfully`);
|
||||
return {
|
||||
message: "User logged in successfully",
|
||||
access_token: accessToken,
|
||||
refresh_token: session.refreshToken,
|
||||
};
|
||||
}
|
||||
|
||||
async verifyTwoFactorLogin(
|
||||
userId: string,
|
||||
token: string,
|
||||
userAgent?: string,
|
||||
ip?: string,
|
||||
) {
|
||||
this.logger.log(`2FA verification attempt for user ${userId}`);
|
||||
const user = await this.usersService.findOneWithPrivateData(userId);
|
||||
if (!user || !user.isTwoFactorEnabled) {
|
||||
throw new UnauthorizedException();
|
||||
}
|
||||
|
||||
const secret = await this.usersService.getTwoFactorSecret(userId);
|
||||
if (!secret) throw new UnauthorizedException();
|
||||
|
||||
const isValid = authenticator.verify({ token, secret });
|
||||
if (!isValid) {
|
||||
this.logger.warn(
|
||||
`2FA verification failed for user ${userId}: invalid token`,
|
||||
);
|
||||
throw new UnauthorizedException("Invalid 2FA token");
|
||||
}
|
||||
|
||||
const accessToken = await this.jwtService.generateJwt({
|
||||
sub: user.uuid,
|
||||
username: user.username,
|
||||
});
|
||||
|
||||
const session = await this.sessionsService.createSession(
|
||||
user.uuid,
|
||||
userAgent,
|
||||
ip,
|
||||
);
|
||||
|
||||
this.logger.log(`User ${userId} logged in successfully via 2FA`);
|
||||
return {
|
||||
message: "User logged in successfully (2FA)",
|
||||
access_token: accessToken,
|
||||
refresh_token: session.refreshToken,
|
||||
};
|
||||
}
|
||||
|
||||
async refresh(refreshToken: string) {
|
||||
const session = await this.sessionsService.refreshSession(refreshToken);
|
||||
const user = await this.usersService.findOne(session.userId);
|
||||
|
||||
if (!user) {
|
||||
throw new UnauthorizedException("User not found");
|
||||
}
|
||||
|
||||
const accessToken = await this.jwtService.generateJwt({
|
||||
sub: user.uuid,
|
||||
username: user.username,
|
||||
});
|
||||
|
||||
return {
|
||||
access_token: accessToken,
|
||||
refresh_token: session.refreshToken,
|
||||
};
|
||||
}
|
||||
|
||||
async logout() {
|
||||
return { message: "User logged out" };
|
||||
}
|
||||
}
|
||||
3
backend/src/auth/decorators/roles.decorator.ts
Normal file
3
backend/src/auth/decorators/roles.decorator.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import { SetMetadata } from "@nestjs/common";
|
||||
|
||||
export const Roles = (...roles: string[]) => SetMetadata("roles", roles);
|
||||
10
backend/src/auth/dto/login.dto.ts
Normal file
10
backend/src/auth/dto/login.dto.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { IsEmail, IsNotEmpty, IsString } from "class-validator";
|
||||
|
||||
export class LoginDto {
|
||||
@IsEmail()
|
||||
email!: string;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
password!: string;
|
||||
}
|
||||
7
backend/src/auth/dto/refresh.dto.ts
Normal file
7
backend/src/auth/dto/refresh.dto.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { IsNotEmpty, IsString } from "class-validator";
|
||||
|
||||
export class RefreshDto {
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
refresh_token!: string;
|
||||
}
|
||||
25
backend/src/auth/dto/register.dto.ts
Normal file
25
backend/src/auth/dto/register.dto.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import {
|
||||
IsEmail,
|
||||
IsNotEmpty,
|
||||
IsString,
|
||||
MaxLength,
|
||||
MinLength,
|
||||
} from "class-validator";
|
||||
|
||||
export class RegisterDto {
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@MaxLength(32)
|
||||
username!: string;
|
||||
|
||||
@IsString()
|
||||
@MaxLength(32)
|
||||
displayName?: string;
|
||||
|
||||
@IsEmail()
|
||||
email!: string;
|
||||
|
||||
@IsString()
|
||||
@MinLength(8)
|
||||
password!: string;
|
||||
}
|
||||
10
backend/src/auth/dto/verify-2fa.dto.ts
Normal file
10
backend/src/auth/dto/verify-2fa.dto.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { IsNotEmpty, IsString, IsUUID } from "class-validator";
|
||||
|
||||
export class Verify2faDto {
|
||||
@IsUUID()
|
||||
userId!: string;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
token!: string;
|
||||
}
|
||||
44
backend/src/auth/guards/auth.guard.ts
Normal file
44
backend/src/auth/guards/auth.guard.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import {
|
||||
CanActivate,
|
||||
ExecutionContext,
|
||||
Injectable,
|
||||
UnauthorizedException,
|
||||
} from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { getIronSession } from "iron-session";
|
||||
import { JwtService } from "../../crypto/services/jwt.service";
|
||||
import { getSessionOptions, SessionData } from "../session.config";
|
||||
|
||||
@Injectable()
|
||||
export class AuthGuard implements CanActivate {
|
||||
constructor(
|
||||
private readonly jwtService: JwtService,
|
||||
private readonly configService: ConfigService,
|
||||
) {}
|
||||
|
||||
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||
const request = context.switchToHttp().getRequest();
|
||||
const response = context.switchToHttp().getResponse();
|
||||
|
||||
const session = await getIronSession<SessionData>(
|
||||
request,
|
||||
response,
|
||||
getSessionOptions(this.configService.get("SESSION_PASSWORD") as string),
|
||||
);
|
||||
|
||||
const token = session.accessToken;
|
||||
|
||||
if (!token) {
|
||||
throw new UnauthorizedException();
|
||||
}
|
||||
|
||||
try {
|
||||
const payload = await this.jwtService.verifyJwt(token);
|
||||
request.user = payload;
|
||||
} catch {
|
||||
throw new UnauthorizedException();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
39
backend/src/auth/guards/optional-auth.guard.ts
Normal file
39
backend/src/auth/guards/optional-auth.guard.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { CanActivate, ExecutionContext, Injectable } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { getIronSession } from "iron-session";
|
||||
import { JwtService } from "../../crypto/services/jwt.service";
|
||||
import { getSessionOptions, SessionData } from "../session.config";
|
||||
|
||||
@Injectable()
|
||||
export class OptionalAuthGuard implements CanActivate {
|
||||
constructor(
|
||||
private readonly jwtService: JwtService,
|
||||
private readonly configService: ConfigService,
|
||||
) {}
|
||||
|
||||
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||
const request = context.switchToHttp().getRequest();
|
||||
const response = context.switchToHttp().getResponse();
|
||||
|
||||
const session = await getIronSession<SessionData>(
|
||||
request,
|
||||
response,
|
||||
getSessionOptions(this.configService.get("SESSION_PASSWORD") as string),
|
||||
);
|
||||
|
||||
const token = session.accessToken;
|
||||
|
||||
if (!token) {
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
const payload = await this.jwtService.verifyJwt(token);
|
||||
request.user = payload;
|
||||
} catch {
|
||||
// Ignore invalid tokens for optional auth
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
28
backend/src/auth/guards/roles.guard.ts
Normal file
28
backend/src/auth/guards/roles.guard.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { CanActivate, ExecutionContext, Injectable } from "@nestjs/common";
|
||||
import { Reflector } from "@nestjs/core";
|
||||
import { RbacService } from "../rbac.service";
|
||||
|
||||
@Injectable()
|
||||
export class RolesGuard implements CanActivate {
|
||||
constructor(
|
||||
private reflector: Reflector,
|
||||
private rbacService: RbacService,
|
||||
) {}
|
||||
|
||||
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||
const requiredRoles = this.reflector.getAllAndOverride<string[]>("roles", [
|
||||
context.getHandler(),
|
||||
context.getClass(),
|
||||
]);
|
||||
if (!requiredRoles) {
|
||||
return true;
|
||||
}
|
||||
const { user } = context.switchToHttp().getRequest();
|
||||
if (!user) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const userRoles = await this.rbacService.getUserRoles(user.sub);
|
||||
return requiredRoles.some((role) => userRoles.includes(role));
|
||||
}
|
||||
}
|
||||
61
backend/src/auth/rbac.service.spec.ts
Normal file
61
backend/src/auth/rbac.service.spec.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { RbacService } from "./rbac.service";
|
||||
import { RbacRepository } from "./repositories/rbac.repository";
|
||||
|
||||
describe("RbacService", () => {
|
||||
let service: RbacService;
|
||||
let repository: RbacRepository;
|
||||
|
||||
const mockRbacRepository = {
|
||||
findRolesByUserId: jest.fn(),
|
||||
findPermissionsByUserId: jest.fn(),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.clearAllMocks();
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
RbacService,
|
||||
{
|
||||
provide: RbacRepository,
|
||||
useValue: mockRbacRepository,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<RbacService>(RbacService);
|
||||
repository = module.get<RbacRepository>(RbacRepository);
|
||||
});
|
||||
|
||||
it("should be defined", () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe("getUserRoles", () => {
|
||||
it("should return user roles", async () => {
|
||||
const userId = "user-id";
|
||||
const mockRoles = ["admin", "user"];
|
||||
mockRbacRepository.findRolesByUserId.mockResolvedValue(mockRoles);
|
||||
|
||||
const result = await service.getUserRoles(userId);
|
||||
|
||||
expect(result).toEqual(mockRoles);
|
||||
expect(repository.findRolesByUserId).toHaveBeenCalledWith(userId);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getUserPermissions", () => {
|
||||
it("should return user permissions", async () => {
|
||||
const userId = "user-id";
|
||||
const mockPermissions = ["read", "write"];
|
||||
mockRbacRepository.findPermissionsByUserId.mockResolvedValue(
|
||||
mockPermissions,
|
||||
);
|
||||
|
||||
const result = await service.getUserPermissions(userId);
|
||||
|
||||
expect(result).toEqual(mockPermissions);
|
||||
expect(repository.findPermissionsByUserId).toHaveBeenCalledWith(userId);
|
||||
});
|
||||
});
|
||||
});
|
||||
15
backend/src/auth/rbac.service.ts
Normal file
15
backend/src/auth/rbac.service.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { Injectable } from "@nestjs/common";
|
||||
import { RbacRepository } from "./repositories/rbac.repository";
|
||||
|
||||
@Injectable()
|
||||
export class RbacService {
|
||||
constructor(private readonly rbacRepository: RbacRepository) {}
|
||||
|
||||
async getUserRoles(userId: string) {
|
||||
return this.rbacRepository.findRolesByUserId(userId);
|
||||
}
|
||||
|
||||
async getUserPermissions(userId: string) {
|
||||
return this.rbacRepository.findPermissionsByUserId(userId);
|
||||
}
|
||||
}
|
||||
42
backend/src/auth/repositories/rbac.repository.ts
Normal file
42
backend/src/auth/repositories/rbac.repository.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { Injectable } from "@nestjs/common";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { DatabaseService } from "../../database/database.service";
|
||||
import {
|
||||
permissions,
|
||||
roles,
|
||||
rolesToPermissions,
|
||||
usersToRoles,
|
||||
} from "../../database/schemas";
|
||||
|
||||
@Injectable()
|
||||
export class RbacRepository {
|
||||
constructor(private readonly databaseService: DatabaseService) {}
|
||||
|
||||
async findRolesByUserId(userId: string) {
|
||||
const result = await this.databaseService.db
|
||||
.select({
|
||||
slug: roles.slug,
|
||||
})
|
||||
.from(usersToRoles)
|
||||
.innerJoin(roles, eq(usersToRoles.roleId, roles.id))
|
||||
.where(eq(usersToRoles.userId, userId));
|
||||
|
||||
return result.map((r) => r.slug);
|
||||
}
|
||||
|
||||
async findPermissionsByUserId(userId: string) {
|
||||
const result = await this.databaseService.db
|
||||
.select({
|
||||
slug: permissions.slug,
|
||||
})
|
||||
.from(usersToRoles)
|
||||
.innerJoin(
|
||||
rolesToPermissions,
|
||||
eq(usersToRoles.roleId, rolesToPermissions.roleId),
|
||||
)
|
||||
.innerJoin(permissions, eq(rolesToPermissions.permissionId, permissions.id))
|
||||
.where(eq(usersToRoles.userId, userId));
|
||||
|
||||
return Array.from(new Set(result.map((p) => p.slug)));
|
||||
}
|
||||
}
|
||||
18
backend/src/auth/session.config.ts
Normal file
18
backend/src/auth/session.config.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { SessionOptions } from "iron-session";
|
||||
|
||||
export interface SessionData {
|
||||
accessToken?: string;
|
||||
refreshToken?: string;
|
||||
userId?: string;
|
||||
}
|
||||
|
||||
export const getSessionOptions = (password: string): SessionOptions => ({
|
||||
password,
|
||||
cookieName: "memegoat_session",
|
||||
cookieOptions: {
|
||||
secure: process.env.NODE_ENV === "production",
|
||||
httpOnly: true,
|
||||
sameSite: "strict",
|
||||
maxAge: 60 * 60 * 24 * 7, // 7 days
|
||||
},
|
||||
});
|
||||
57
backend/src/categories/categories.controller.ts
Normal file
57
backend/src/categories/categories.controller.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { CacheInterceptor, CacheKey, CacheTTL } from "@nestjs/cache-manager";
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
Delete,
|
||||
Get,
|
||||
Param,
|
||||
Patch,
|
||||
Post,
|
||||
UseGuards,
|
||||
UseInterceptors,
|
||||
} from "@nestjs/common";
|
||||
import { Roles } from "../auth/decorators/roles.decorator";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { RolesGuard } from "../auth/guards/roles.guard";
|
||||
import { CategoriesService } from "./categories.service";
|
||||
import { CreateCategoryDto } from "./dto/create-category.dto";
|
||||
import { UpdateCategoryDto } from "./dto/update-category.dto";
|
||||
|
||||
@Controller("categories")
|
||||
export class CategoriesController {
|
||||
constructor(private readonly categoriesService: CategoriesService) {}
|
||||
|
||||
@Get()
|
||||
@UseInterceptors(CacheInterceptor)
|
||||
@CacheKey("categories/all")
|
||||
@CacheTTL(3600000) // 1 heure
|
||||
findAll() {
|
||||
return this.categoriesService.findAll();
|
||||
}
|
||||
|
||||
@Get(":id")
|
||||
findOne(@Param("id") id: string) {
|
||||
return this.categoriesService.findOne(id);
|
||||
}
|
||||
|
||||
@Post()
|
||||
@UseGuards(AuthGuard, RolesGuard)
|
||||
@Roles("admin")
|
||||
create(@Body() createCategoryDto: CreateCategoryDto) {
|
||||
return this.categoriesService.create(createCategoryDto);
|
||||
}
|
||||
|
||||
@Patch(":id")
|
||||
@UseGuards(AuthGuard, RolesGuard)
|
||||
@Roles("admin")
|
||||
update(@Param("id") id: string, @Body() updateCategoryDto: UpdateCategoryDto) {
|
||||
return this.categoriesService.update(id, updateCategoryDto);
|
||||
}
|
||||
|
||||
@Delete(":id")
|
||||
@UseGuards(AuthGuard, RolesGuard)
|
||||
@Roles("admin")
|
||||
remove(@Param("id") id: string) {
|
||||
return this.categoriesService.remove(id);
|
||||
}
|
||||
}
|
||||
13
backend/src/categories/categories.module.ts
Normal file
13
backend/src/categories/categories.module.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
import { CategoriesController } from "./categories.controller";
|
||||
import { CategoriesService } from "./categories.service";
|
||||
import { CategoriesRepository } from "./repositories/categories.repository";
|
||||
|
||||
@Module({
|
||||
imports: [AuthModule],
|
||||
controllers: [CategoriesController],
|
||||
providers: [CategoriesService, CategoriesRepository],
|
||||
exports: [CategoriesService, CategoriesRepository],
|
||||
})
|
||||
export class CategoriesModule {}
|
||||
124
backend/src/categories/categories.service.spec.ts
Normal file
124
backend/src/categories/categories.service.spec.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
import { CACHE_MANAGER } from "@nestjs/cache-manager";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { CategoriesService } from "./categories.service";
|
||||
import { CreateCategoryDto } from "./dto/create-category.dto";
|
||||
import { UpdateCategoryDto } from "./dto/update-category.dto";
|
||||
import { CategoriesRepository } from "./repositories/categories.repository";
|
||||
|
||||
describe("CategoriesService", () => {
|
||||
let service: CategoriesService;
|
||||
let repository: CategoriesRepository;
|
||||
|
||||
const mockCategoriesRepository = {
|
||||
findAll: jest.fn(),
|
||||
findOne: jest.fn(),
|
||||
create: jest.fn(),
|
||||
update: jest.fn(),
|
||||
remove: jest.fn(),
|
||||
};
|
||||
|
||||
const mockCacheManager = {
|
||||
del: jest.fn(),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.clearAllMocks();
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
CategoriesService,
|
||||
{
|
||||
provide: CategoriesRepository,
|
||||
useValue: mockCategoriesRepository,
|
||||
},
|
||||
{ provide: CACHE_MANAGER, useValue: mockCacheManager },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<CategoriesService>(CategoriesService);
|
||||
repository = module.get<CategoriesRepository>(CategoriesRepository);
|
||||
});
|
||||
|
||||
it("should be defined", () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
it("should return all categories ordered by name", async () => {
|
||||
const mockCategories = [{ name: "A" }, { name: "B" }];
|
||||
mockCategoriesRepository.findAll.mockResolvedValue(mockCategories);
|
||||
|
||||
const result = await service.findAll();
|
||||
|
||||
expect(result).toEqual(mockCategories);
|
||||
expect(repository.findAll).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("findOne", () => {
|
||||
it("should return a category by id", async () => {
|
||||
const mockCategory = { id: "1", name: "Cat" };
|
||||
mockCategoriesRepository.findOne.mockResolvedValue(mockCategory);
|
||||
|
||||
const result = await service.findOne("1");
|
||||
|
||||
expect(result).toEqual(mockCategory);
|
||||
expect(repository.findOne).toHaveBeenCalledWith("1");
|
||||
});
|
||||
|
||||
it("should return null if category not found", async () => {
|
||||
mockCategoriesRepository.findOne.mockResolvedValue(null);
|
||||
const result = await service.findOne("999");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("create", () => {
|
||||
it("should create a category and generate slug", async () => {
|
||||
const dto: CreateCategoryDto = { name: "Test Category" };
|
||||
mockCategoriesRepository.create.mockResolvedValue([
|
||||
{ ...dto, slug: "test-category" },
|
||||
]);
|
||||
|
||||
const result = await service.create(dto);
|
||||
|
||||
expect(repository.create).toHaveBeenCalledWith({
|
||||
name: "Test Category",
|
||||
slug: "test-category",
|
||||
});
|
||||
expect(result[0].slug).toBe("test-category");
|
||||
});
|
||||
});
|
||||
|
||||
describe("update", () => {
|
||||
it("should update a category and regenerate slug", async () => {
|
||||
const id = "1";
|
||||
const dto: UpdateCategoryDto = { name: "New Name" };
|
||||
mockCategoriesRepository.update.mockResolvedValue([
|
||||
{ id, ...dto, slug: "new-name" },
|
||||
]);
|
||||
|
||||
const result = await service.update(id, dto);
|
||||
|
||||
expect(repository.update).toHaveBeenCalledWith(
|
||||
id,
|
||||
expect.objectContaining({
|
||||
name: "New Name",
|
||||
slug: "new-name",
|
||||
}),
|
||||
);
|
||||
expect(result[0].slug).toBe("new-name");
|
||||
});
|
||||
});
|
||||
|
||||
describe("remove", () => {
|
||||
it("should remove a category", async () => {
|
||||
const id = "1";
|
||||
mockCategoriesRepository.remove.mockResolvedValue([{ id }]);
|
||||
|
||||
const result = await service.remove(id);
|
||||
|
||||
expect(repository.remove).toHaveBeenCalledWith(id);
|
||||
expect(result).toEqual([{ id }]);
|
||||
});
|
||||
});
|
||||
});
|
||||
67
backend/src/categories/categories.service.ts
Normal file
67
backend/src/categories/categories.service.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { CACHE_MANAGER } from "@nestjs/cache-manager";
|
||||
import { Inject, Injectable, Logger } from "@nestjs/common";
|
||||
import type { Cache } from "cache-manager";
|
||||
import { CreateCategoryDto } from "./dto/create-category.dto";
|
||||
import { UpdateCategoryDto } from "./dto/update-category.dto";
|
||||
import { CategoriesRepository } from "./repositories/categories.repository";
|
||||
|
||||
@Injectable()
|
||||
export class CategoriesService {
|
||||
private readonly logger = new Logger(CategoriesService.name);
|
||||
|
||||
constructor(
|
||||
private readonly categoriesRepository: CategoriesRepository,
|
||||
@Inject(CACHE_MANAGER) private cacheManager: Cache,
|
||||
) {}
|
||||
|
||||
private async clearCategoriesCache() {
|
||||
this.logger.log("Clearing categories cache");
|
||||
await this.cacheManager.del("categories/all");
|
||||
}
|
||||
|
||||
async findAll() {
|
||||
return await this.categoriesRepository.findAll();
|
||||
}
|
||||
|
||||
async findOne(id: string) {
|
||||
return await this.categoriesRepository.findOne(id);
|
||||
}
|
||||
|
||||
async create(data: CreateCategoryDto) {
|
||||
this.logger.log(`Creating category: ${data.name}`);
|
||||
const slug = data.name
|
||||
.toLowerCase()
|
||||
.replace(/ /g, "-")
|
||||
.replace(/[^\w-]/g, "");
|
||||
const result = await this.categoriesRepository.create({ ...data, slug });
|
||||
|
||||
await this.clearCategoriesCache();
|
||||
return result;
|
||||
}
|
||||
|
||||
async update(id: string, data: UpdateCategoryDto) {
|
||||
this.logger.log(`Updating category: ${id}`);
|
||||
const updateData = {
|
||||
...data,
|
||||
updatedAt: new Date(),
|
||||
slug: data.name
|
||||
? data.name
|
||||
.toLowerCase()
|
||||
.replace(/ /g, "-")
|
||||
.replace(/[^\w-]/g, "")
|
||||
: undefined,
|
||||
};
|
||||
const result = await this.categoriesRepository.update(id, updateData);
|
||||
|
||||
await this.clearCategoriesCache();
|
||||
return result;
|
||||
}
|
||||
|
||||
async remove(id: string) {
|
||||
this.logger.log(`Removing category: ${id}`);
|
||||
const result = await this.categoriesRepository.remove(id);
|
||||
|
||||
await this.clearCategoriesCache();
|
||||
return result;
|
||||
}
|
||||
}
|
||||
18
backend/src/categories/dto/create-category.dto.ts
Normal file
18
backend/src/categories/dto/create-category.dto.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { IsNotEmpty, IsOptional, IsString, MaxLength } from "class-validator";
|
||||
|
||||
export class CreateCategoryDto {
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@MaxLength(64)
|
||||
name!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
@MaxLength(255)
|
||||
description?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
@MaxLength(512)
|
||||
iconUrl?: string;
|
||||
}
|
||||
4
backend/src/categories/dto/update-category.dto.ts
Normal file
4
backend/src/categories/dto/update-category.dto.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
import { PartialType } from "@nestjs/mapped-types";
|
||||
import { CreateCategoryDto } from "./create-category.dto";
|
||||
|
||||
export class UpdateCategoryDto extends PartialType(CreateCategoryDto) {}
|
||||
60
backend/src/categories/repositories/categories.repository.ts
Normal file
60
backend/src/categories/repositories/categories.repository.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { Injectable } from "@nestjs/common";
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { DatabaseService } from "../../database/database.service";
|
||||
import { categories } from "../../database/schemas";
|
||||
import type { CreateCategoryDto } from "../dto/create-category.dto";
|
||||
import type { UpdateCategoryDto } from "../dto/update-category.dto";
|
||||
|
||||
@Injectable()
|
||||
export class CategoriesRepository {
|
||||
constructor(private readonly databaseService: DatabaseService) {}
|
||||
|
||||
async findAll() {
|
||||
return await this.databaseService.db
|
||||
.select()
|
||||
.from(categories)
|
||||
.orderBy(categories.name);
|
||||
}
|
||||
|
||||
async countAll() {
|
||||
const result = await this.databaseService.db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(categories);
|
||||
return Number(result[0].count);
|
||||
}
|
||||
|
||||
async findOne(id: string) {
|
||||
const result = await this.databaseService.db
|
||||
.select()
|
||||
.from(categories)
|
||||
.where(eq(categories.id, id))
|
||||
.limit(1);
|
||||
|
||||
return result[0] || null;
|
||||
}
|
||||
|
||||
async create(data: CreateCategoryDto & { slug: string }) {
|
||||
return await this.databaseService.db
|
||||
.insert(categories)
|
||||
.values(data)
|
||||
.returning();
|
||||
}
|
||||
|
||||
async update(
|
||||
id: string,
|
||||
data: UpdateCategoryDto & { slug?: string; updatedAt: Date },
|
||||
) {
|
||||
return await this.databaseService.db
|
||||
.update(categories)
|
||||
.set(data)
|
||||
.where(eq(categories.id, id))
|
||||
.returning();
|
||||
}
|
||||
|
||||
async remove(id: string) {
|
||||
return await this.databaseService.db
|
||||
.delete(categories)
|
||||
.where(eq(categories.id, id))
|
||||
.returning();
|
||||
}
|
||||
}
|
||||
21
backend/src/common/common.module.ts
Normal file
21
backend/src/common/common.module.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { forwardRef, Global, Module } from "@nestjs/common";
|
||||
import { ContentsModule } from "../contents/contents.module";
|
||||
import { DatabaseModule } from "../database/database.module";
|
||||
import { ReportsModule } from "../reports/reports.module";
|
||||
import { SessionsModule } from "../sessions/sessions.module";
|
||||
import { UsersModule } from "../users/users.module";
|
||||
import { PurgeService } from "./services/purge.service";
|
||||
|
||||
@Global()
|
||||
@Module({
|
||||
imports: [
|
||||
DatabaseModule,
|
||||
forwardRef(() => SessionsModule),
|
||||
forwardRef(() => ReportsModule),
|
||||
forwardRef(() => UsersModule),
|
||||
forwardRef(() => ContentsModule),
|
||||
],
|
||||
providers: [PurgeService],
|
||||
exports: [PurgeService],
|
||||
})
|
||||
export class CommonModule {}
|
||||
67
backend/src/common/filters/http-exception.filter.ts
Normal file
67
backend/src/common/filters/http-exception.filter.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import {
|
||||
ArgumentsHost,
|
||||
Catch,
|
||||
ExceptionFilter,
|
||||
HttpException,
|
||||
HttpStatus,
|
||||
Logger,
|
||||
} from "@nestjs/common";
|
||||
import * as Sentry from "@sentry/nestjs";
|
||||
import { Request, Response } from "express";
|
||||
|
||||
interface RequestWithUser extends Request {
|
||||
user?: {
|
||||
sub?: string;
|
||||
username?: string;
|
||||
id?: string;
|
||||
};
|
||||
}
|
||||
|
||||
@Catch()
|
||||
export class AllExceptionsFilter implements ExceptionFilter {
|
||||
private readonly logger = new Logger("ExceptionFilter");
|
||||
|
||||
catch(exception: unknown, host: ArgumentsHost) {
|
||||
const ctx = host.switchToHttp();
|
||||
const response = ctx.getResponse<Response>();
|
||||
const request = ctx.getRequest<RequestWithUser>();
|
||||
|
||||
const status =
|
||||
exception instanceof HttpException
|
||||
? exception.getStatus()
|
||||
: HttpStatus.INTERNAL_SERVER_ERROR;
|
||||
|
||||
const message =
|
||||
exception instanceof HttpException
|
||||
? exception.getResponse()
|
||||
: "Internal server error";
|
||||
|
||||
const userId = request.user?.sub || request.user?.id;
|
||||
const userPart = userId ? `[User: ${userId}] ` : "";
|
||||
|
||||
const errorResponse = {
|
||||
statusCode: status,
|
||||
timestamp: new Date().toISOString(),
|
||||
path: request.url,
|
||||
method: request.method,
|
||||
message:
|
||||
typeof message === "object" && message !== null
|
||||
? (message as Record<string, unknown>).message || message
|
||||
: message,
|
||||
};
|
||||
|
||||
if (status === HttpStatus.INTERNAL_SERVER_ERROR) {
|
||||
Sentry.captureException(exception);
|
||||
this.logger.error(
|
||||
`${userPart}${request.method} ${request.url} - Error: ${exception instanceof Error ? exception.message : "Unknown error"}`,
|
||||
exception instanceof Error ? exception.stack : "",
|
||||
);
|
||||
} else {
|
||||
this.logger.warn(
|
||||
`${userPart}${request.method} ${request.url} - Status: ${status} - Message: ${JSON.stringify(message)}`,
|
||||
);
|
||||
}
|
||||
|
||||
response.status(status).json(errorResponse);
|
||||
}
|
||||
}
|
||||
4
backend/src/common/interfaces/mail.interface.ts
Normal file
4
backend/src/common/interfaces/mail.interface.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export interface IMailService {
|
||||
sendEmailValidation(email: string, token: string): Promise<void>;
|
||||
sendPasswordReset(email: string, token: string): Promise<void>;
|
||||
}
|
||||
26
backend/src/common/interfaces/media.interface.ts
Normal file
26
backend/src/common/interfaces/media.interface.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
export interface MediaProcessingResult {
|
||||
buffer: Buffer;
|
||||
mimeType: string;
|
||||
extension: string;
|
||||
width?: number;
|
||||
height?: number;
|
||||
size: number;
|
||||
}
|
||||
|
||||
export interface ScanResult {
|
||||
isInfected: boolean;
|
||||
virusName?: string;
|
||||
}
|
||||
|
||||
export interface IMediaService {
|
||||
scanFile(buffer: Buffer, filename: string): Promise<ScanResult>;
|
||||
processImage(
|
||||
buffer: Buffer,
|
||||
format?: "webp" | "avif",
|
||||
resize?: { width?: number; height?: number },
|
||||
): Promise<MediaProcessingResult>;
|
||||
processVideo(
|
||||
buffer: Buffer,
|
||||
format?: "webm" | "av1",
|
||||
): Promise<MediaProcessingResult>;
|
||||
}
|
||||
8
backend/src/common/interfaces/request.interface.ts
Normal file
8
backend/src/common/interfaces/request.interface.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { Request } from "express";
|
||||
|
||||
export interface AuthenticatedRequest extends Request {
|
||||
user: {
|
||||
sub: string;
|
||||
username: string;
|
||||
};
|
||||
}
|
||||
38
backend/src/common/interfaces/storage.interface.ts
Normal file
38
backend/src/common/interfaces/storage.interface.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import type { Readable } from "node:stream";
|
||||
|
||||
export interface IStorageService {
|
||||
uploadFile(
|
||||
fileName: string,
|
||||
file: Buffer,
|
||||
mimeType: string,
|
||||
metaData?: Record<string, string>,
|
||||
bucketName?: string,
|
||||
): Promise<string>;
|
||||
|
||||
getFile(fileName: string, bucketName?: string): Promise<Readable>;
|
||||
|
||||
getFileUrl(
|
||||
fileName: string,
|
||||
expiry?: number,
|
||||
bucketName?: string,
|
||||
): Promise<string>;
|
||||
|
||||
getUploadUrl(
|
||||
fileName: string,
|
||||
expiry?: number,
|
||||
bucketName?: string,
|
||||
): Promise<string>;
|
||||
|
||||
deleteFile(fileName: string, bucketName?: string): Promise<void>;
|
||||
|
||||
getFileInfo(fileName: string, bucketName?: string): Promise<unknown>;
|
||||
|
||||
moveFile(
|
||||
sourceFileName: string,
|
||||
destinationFileName: string,
|
||||
sourceBucketName?: string,
|
||||
destinationBucketName?: string,
|
||||
): Promise<string>;
|
||||
|
||||
getPublicUrl(storageKey: string): string;
|
||||
}
|
||||
@@ -0,0 +1,67 @@
|
||||
import { Injectable, Logger, NestMiddleware } from "@nestjs/common";
|
||||
import type { NextFunction, Request, Response } from "express";
|
||||
|
||||
@Injectable()
|
||||
export class CrawlerDetectionMiddleware implements NestMiddleware {
|
||||
private readonly logger = new Logger("CrawlerDetection");
|
||||
|
||||
private readonly SUSPICIOUS_PATTERNS = [
|
||||
/\.env/,
|
||||
/wp-admin/,
|
||||
/wp-login/,
|
||||
/\.git/,
|
||||
/\.php$/,
|
||||
/xmlrpc/,
|
||||
/config/,
|
||||
/setup/,
|
||||
/wp-config/,
|
||||
/_next/,
|
||||
/install/,
|
||||
/admin/,
|
||||
/phpmyadmin/,
|
||||
/sql/,
|
||||
/backup/,
|
||||
/db\./,
|
||||
/backup\./,
|
||||
/cgi-bin/,
|
||||
/\.well-known\/security\.txt/, // Bien que légitime, souvent scanné
|
||||
];
|
||||
|
||||
private readonly BOT_USER_AGENTS = [
|
||||
/bot/i,
|
||||
/crawler/i,
|
||||
/spider/i,
|
||||
/python/i,
|
||||
/curl/i,
|
||||
/wget/i,
|
||||
/nmap/i,
|
||||
/nikto/i,
|
||||
/zgrab/i,
|
||||
/masscan/i,
|
||||
];
|
||||
|
||||
use(req: Request, res: Response, next: NextFunction) {
|
||||
const { method, url, ip } = req;
|
||||
const userAgent = req.get("user-agent") || "unknown";
|
||||
|
||||
res.on("finish", () => {
|
||||
if (res.statusCode === 404) {
|
||||
const isSuspiciousPath = this.SUSPICIOUS_PATTERNS.some((pattern) =>
|
||||
pattern.test(url),
|
||||
);
|
||||
const isBotUserAgent = this.BOT_USER_AGENTS.some((pattern) =>
|
||||
pattern.test(userAgent),
|
||||
);
|
||||
|
||||
if (isSuspiciousPath || isBotUserAgent) {
|
||||
this.logger.warn(
|
||||
`Potential crawler detected: [${ip}] ${method} ${url} - User-Agent: ${userAgent}`,
|
||||
);
|
||||
// Ici, on pourrait ajouter une logique pour bannir l'IP temporairement via Redis
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
next();
|
||||
}
|
||||
}
|
||||
37
backend/src/common/middlewares/http-logger.middleware.ts
Normal file
37
backend/src/common/middlewares/http-logger.middleware.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { createHash } from "node:crypto";
|
||||
import { Injectable, Logger, NestMiddleware } from "@nestjs/common";
|
||||
import { NextFunction, Request, Response } from "express";
|
||||
|
||||
@Injectable()
|
||||
export class HTTPLoggerMiddleware implements NestMiddleware {
|
||||
private readonly logger = new Logger("HTTP");
|
||||
|
||||
use(request: Request, response: Response, next: NextFunction): void {
|
||||
const { method, originalUrl, ip } = request;
|
||||
const userAgent = request.get("user-agent") || "";
|
||||
const startTime = Date.now();
|
||||
|
||||
response.on("finish", () => {
|
||||
const { statusCode } = response;
|
||||
const contentLength = response.get("content-length");
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
const hashedIp = createHash("sha256")
|
||||
.update(ip as string)
|
||||
.digest("hex");
|
||||
const message = `${method} ${originalUrl} ${statusCode} ${contentLength || 0} - ${userAgent} ${hashedIp} +${duration}ms`;
|
||||
|
||||
if (statusCode >= 500) {
|
||||
return this.logger.error(message);
|
||||
}
|
||||
|
||||
if (statusCode >= 400) {
|
||||
return this.logger.warn(message);
|
||||
}
|
||||
|
||||
return this.logger.log(message);
|
||||
});
|
||||
|
||||
next();
|
||||
}
|
||||
}
|
||||
65
backend/src/common/services/purge.service.spec.ts
Normal file
65
backend/src/common/services/purge.service.spec.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { Logger } from "@nestjs/common";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { ContentsRepository } from "../../contents/repositories/contents.repository";
|
||||
import { ReportsRepository } from "../../reports/repositories/reports.repository";
|
||||
import { SessionsRepository } from "../../sessions/repositories/sessions.repository";
|
||||
import { UsersRepository } from "../../users/repositories/users.repository";
|
||||
import { PurgeService } from "./purge.service";
|
||||
|
||||
describe("PurgeService", () => {
|
||||
let service: PurgeService;
|
||||
|
||||
const mockSessionsRepository = {
|
||||
purgeExpired: jest.fn().mockResolvedValue([]),
|
||||
};
|
||||
const mockReportsRepository = {
|
||||
purgeObsolete: jest.fn().mockResolvedValue([]),
|
||||
};
|
||||
const mockUsersRepository = { purgeDeleted: jest.fn().mockResolvedValue([]) };
|
||||
const mockContentsRepository = {
|
||||
purgeSoftDeleted: jest.fn().mockResolvedValue([]),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.clearAllMocks();
|
||||
jest.spyOn(Logger.prototype, "error").mockImplementation(() => {});
|
||||
jest.spyOn(Logger.prototype, "log").mockImplementation(() => {});
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
PurgeService,
|
||||
{ provide: SessionsRepository, useValue: mockSessionsRepository },
|
||||
{ provide: ReportsRepository, useValue: mockReportsRepository },
|
||||
{ provide: UsersRepository, useValue: mockUsersRepository },
|
||||
{ provide: ContentsRepository, useValue: mockContentsRepository },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<PurgeService>(PurgeService);
|
||||
});
|
||||
|
||||
it("should be defined", () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe("purgeExpiredData", () => {
|
||||
it("should purge data using repositories", async () => {
|
||||
mockSessionsRepository.purgeExpired.mockResolvedValue([{ id: "s1" }]);
|
||||
mockReportsRepository.purgeObsolete.mockResolvedValue([{ id: "r1" }]);
|
||||
mockUsersRepository.purgeDeleted.mockResolvedValue([{ id: "u1" }]);
|
||||
mockContentsRepository.purgeSoftDeleted.mockResolvedValue([{ id: "c1" }]);
|
||||
|
||||
await service.purgeExpiredData();
|
||||
|
||||
expect(mockSessionsRepository.purgeExpired).toHaveBeenCalled();
|
||||
expect(mockReportsRepository.purgeObsolete).toHaveBeenCalled();
|
||||
expect(mockUsersRepository.purgeDeleted).toHaveBeenCalled();
|
||||
expect(mockContentsRepository.purgeSoftDeleted).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should handle errors", async () => {
|
||||
mockSessionsRepository.purgeExpired.mockRejectedValue(new Error("Db error"));
|
||||
await expect(service.purgeExpiredData()).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
54
backend/src/common/services/purge.service.ts
Normal file
54
backend/src/common/services/purge.service.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { Injectable, Logger } from "@nestjs/common";
|
||||
import { Cron, CronExpression } from "@nestjs/schedule";
|
||||
import { ContentsRepository } from "../../contents/repositories/contents.repository";
|
||||
import { ReportsRepository } from "../../reports/repositories/reports.repository";
|
||||
import { SessionsRepository } from "../../sessions/repositories/sessions.repository";
|
||||
import { UsersRepository } from "../../users/repositories/users.repository";
|
||||
|
||||
@Injectable()
|
||||
export class PurgeService {
|
||||
private readonly logger = new Logger(PurgeService.name);
|
||||
|
||||
constructor(
|
||||
private readonly sessionsRepository: SessionsRepository,
|
||||
private readonly reportsRepository: ReportsRepository,
|
||||
private readonly usersRepository: UsersRepository,
|
||||
private readonly contentsRepository: ContentsRepository,
|
||||
) {}
|
||||
|
||||
// Toutes les nuits à minuit
|
||||
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT)
|
||||
async purgeExpiredData() {
|
||||
this.logger.log("Starting automatic data purge...");
|
||||
|
||||
try {
|
||||
const now = new Date();
|
||||
|
||||
// 1. Purge des sessions expirées
|
||||
const deletedSessions = await this.sessionsRepository.purgeExpired(now);
|
||||
this.logger.log(`Purged ${deletedSessions.length} expired sessions.`);
|
||||
|
||||
// 2. Purge des signalements obsolètes
|
||||
const deletedReports = await this.reportsRepository.purgeObsolete(now);
|
||||
this.logger.log(`Purged ${deletedReports.length} obsolete reports.`);
|
||||
|
||||
// 3. Purge des utilisateurs supprimés (Soft Delete > 30 jours)
|
||||
const thirtyDaysAgo = new Date();
|
||||
thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
|
||||
|
||||
const deletedUsers = await this.usersRepository.purgeDeleted(thirtyDaysAgo);
|
||||
this.logger.log(
|
||||
`Purged ${deletedUsers.length} users marked for deletion more than 30 days ago.`,
|
||||
);
|
||||
|
||||
// 4. Purge des contenus supprimés (Soft Delete > 30 jours)
|
||||
const deletedContents =
|
||||
await this.contentsRepository.purgeSoftDeleted(thirtyDaysAgo);
|
||||
this.logger.log(
|
||||
`Purged ${deletedContents.length} contents marked for deletion more than 30 days ago.`,
|
||||
);
|
||||
} catch (error) {
|
||||
this.logger.error("Error during data purge", error);
|
||||
}
|
||||
}
|
||||
}
|
||||
64
backend/src/config/env.schema.ts
Normal file
64
backend/src/config/env.schema.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import { z } from "zod";
|
||||
|
||||
export const envSchema = z.object({
|
||||
NODE_ENV: z.enum(["development", "production", "test"]).default("development"),
|
||||
PORT: z.coerce.number().default(3000),
|
||||
|
||||
// Database
|
||||
POSTGRES_HOST: z.string(),
|
||||
POSTGRES_PORT: z.coerce.number().default(5432),
|
||||
POSTGRES_DB: z.string(),
|
||||
POSTGRES_USER: z.string(),
|
||||
POSTGRES_PASSWORD: z.string(),
|
||||
|
||||
// S3
|
||||
S3_ENDPOINT: z.string().default("localhost"),
|
||||
S3_PORT: z.coerce.number().default(9000),
|
||||
S3_USE_SSL: z.preprocess((val) => val === "true", z.boolean()).default(false),
|
||||
S3_ACCESS_KEY: z.string().default("minioadmin"),
|
||||
S3_SECRET_KEY: z.string().default("minioadmin"),
|
||||
S3_BUCKET_NAME: z.string().default("memegoat"),
|
||||
|
||||
// Security
|
||||
JWT_SECRET: z.string().min(32),
|
||||
ENCRYPTION_KEY: z.string().length(32),
|
||||
PGP_ENCRYPTION_KEY: z.string().min(16),
|
||||
|
||||
// Mail
|
||||
MAIL_HOST: z.string(),
|
||||
MAIL_PORT: z.coerce.number(),
|
||||
MAIL_SECURE: z.preprocess((val) => val === "true", z.boolean()).default(false),
|
||||
MAIL_USER: z.string(),
|
||||
MAIL_PASS: z.string(),
|
||||
MAIL_FROM: z.string().email(),
|
||||
|
||||
DOMAIN_NAME: z.string(),
|
||||
API_URL: z.string().url().optional(),
|
||||
|
||||
// Sentry
|
||||
SENTRY_DSN: z.string().optional(),
|
||||
|
||||
// Redis
|
||||
REDIS_HOST: z.string().default("localhost"),
|
||||
REDIS_PORT: z.coerce.number().default(6379),
|
||||
|
||||
// Session
|
||||
SESSION_PASSWORD: z.string().min(32),
|
||||
|
||||
// Media Limits
|
||||
MAX_IMAGE_SIZE_KB: z.coerce.number().default(512),
|
||||
MAX_GIF_SIZE_KB: z.coerce.number().default(1024),
|
||||
});
|
||||
|
||||
export type Env = z.infer<typeof envSchema>;
|
||||
|
||||
export function validateEnv(config: Record<string, unknown>) {
|
||||
const result = envSchema.safeParse(config);
|
||||
|
||||
if (!result.success) {
|
||||
console.error("❌ Invalid environment variables:", result.error.format());
|
||||
throw new Error("Invalid environment variables");
|
||||
}
|
||||
|
||||
return result.data;
|
||||
}
|
||||
188
backend/src/contents/contents.controller.ts
Normal file
188
backend/src/contents/contents.controller.ts
Normal file
@@ -0,0 +1,188 @@
|
||||
import { CacheInterceptor, CacheTTL } from "@nestjs/cache-manager";
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
DefaultValuePipe,
|
||||
Delete,
|
||||
Get,
|
||||
Header,
|
||||
NotFoundException,
|
||||
Param,
|
||||
ParseBoolPipe,
|
||||
ParseIntPipe,
|
||||
Post,
|
||||
Query,
|
||||
Req,
|
||||
Res,
|
||||
UploadedFile,
|
||||
UseGuards,
|
||||
UseInterceptors,
|
||||
} from "@nestjs/common";
|
||||
import { FileInterceptor } from "@nestjs/platform-express";
|
||||
import type { Response } from "express";
|
||||
import { Roles } from "../auth/decorators/roles.decorator";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { OptionalAuthGuard } from "../auth/guards/optional-auth.guard";
|
||||
import { RolesGuard } from "../auth/guards/roles.guard";
|
||||
import type { AuthenticatedRequest } from "../common/interfaces/request.interface";
|
||||
import { ContentsService } from "./contents.service";
|
||||
import { CreateContentDto } from "./dto/create-content.dto";
|
||||
import { UploadContentDto } from "./dto/upload-content.dto";
|
||||
|
||||
@Controller("contents")
|
||||
export class ContentsController {
|
||||
constructor(private readonly contentsService: ContentsService) {}
|
||||
|
||||
@Post()
|
||||
@UseGuards(AuthGuard)
|
||||
create(
|
||||
@Req() req: AuthenticatedRequest,
|
||||
@Body() createContentDto: CreateContentDto,
|
||||
) {
|
||||
return this.contentsService.create(req.user.sub, createContentDto);
|
||||
}
|
||||
|
||||
@Post("upload-url")
|
||||
@UseGuards(AuthGuard)
|
||||
getUploadUrl(
|
||||
@Req() req: AuthenticatedRequest,
|
||||
@Query("fileName") fileName: string,
|
||||
) {
|
||||
return this.contentsService.getUploadUrl(req.user.sub, fileName);
|
||||
}
|
||||
|
||||
@Post("upload")
|
||||
@UseGuards(AuthGuard)
|
||||
@UseInterceptors(FileInterceptor("file"))
|
||||
upload(
|
||||
@Req() req: AuthenticatedRequest,
|
||||
@UploadedFile()
|
||||
file: Express.Multer.File,
|
||||
@Body() uploadContentDto: UploadContentDto,
|
||||
) {
|
||||
return this.contentsService.uploadAndProcess(
|
||||
req.user.sub,
|
||||
file,
|
||||
uploadContentDto,
|
||||
);
|
||||
}
|
||||
|
||||
@Get("explore")
|
||||
@UseGuards(OptionalAuthGuard)
|
||||
@UseInterceptors(CacheInterceptor)
|
||||
@CacheTTL(60)
|
||||
@Header("Cache-Control", "public, max-age=60")
|
||||
explore(
|
||||
@Req() req: AuthenticatedRequest,
|
||||
@Query("limit", new DefaultValuePipe(10), ParseIntPipe) limit: number,
|
||||
@Query("offset", new DefaultValuePipe(0), ParseIntPipe) offset: number,
|
||||
@Query("sort") sort?: "trend" | "recent",
|
||||
@Query("tag") tag?: string,
|
||||
@Query("category") category?: string,
|
||||
@Query("author") author?: string,
|
||||
@Query("query") query?: string,
|
||||
@Query("favoritesOnly", new DefaultValuePipe(false), ParseBoolPipe)
|
||||
favoritesOnly?: boolean,
|
||||
@Query("userId") userIdQuery?: string,
|
||||
) {
|
||||
return this.contentsService.findAll({
|
||||
limit,
|
||||
offset,
|
||||
sortBy: sort,
|
||||
tag,
|
||||
category,
|
||||
author,
|
||||
query,
|
||||
favoritesOnly,
|
||||
userId: userIdQuery || req.user?.sub,
|
||||
});
|
||||
}
|
||||
|
||||
@Get("trends")
|
||||
@UseGuards(OptionalAuthGuard)
|
||||
@UseInterceptors(CacheInterceptor)
|
||||
@CacheTTL(300)
|
||||
@Header("Cache-Control", "public, max-age=300")
|
||||
trends(
|
||||
@Req() req: AuthenticatedRequest,
|
||||
@Query("limit", new DefaultValuePipe(10), ParseIntPipe) limit: number,
|
||||
@Query("offset", new DefaultValuePipe(0), ParseIntPipe) offset: number,
|
||||
) {
|
||||
return this.contentsService.findAll({
|
||||
limit,
|
||||
offset,
|
||||
sortBy: "trend",
|
||||
userId: req.user?.sub,
|
||||
});
|
||||
}
|
||||
|
||||
@Get("recent")
|
||||
@UseGuards(OptionalAuthGuard)
|
||||
@UseInterceptors(CacheInterceptor)
|
||||
@CacheTTL(60)
|
||||
@Header("Cache-Control", "public, max-age=60")
|
||||
recent(
|
||||
@Req() req: AuthenticatedRequest,
|
||||
@Query("limit", new DefaultValuePipe(10), ParseIntPipe) limit: number,
|
||||
@Query("offset", new DefaultValuePipe(0), ParseIntPipe) offset: number,
|
||||
) {
|
||||
return this.contentsService.findAll({
|
||||
limit,
|
||||
offset,
|
||||
sortBy: "recent",
|
||||
userId: req.user?.sub,
|
||||
});
|
||||
}
|
||||
|
||||
@Get(":idOrSlug")
|
||||
@UseGuards(OptionalAuthGuard)
|
||||
@UseInterceptors(CacheInterceptor)
|
||||
@CacheTTL(3600)
|
||||
@Header("Cache-Control", "public, max-age=3600")
|
||||
async findOne(
|
||||
@Param("idOrSlug") idOrSlug: string,
|
||||
@Req() req: AuthenticatedRequest,
|
||||
@Res() res: Response,
|
||||
) {
|
||||
const content = await this.contentsService.findOne(idOrSlug, req.user?.sub);
|
||||
if (!content) {
|
||||
throw new NotFoundException("Contenu non trouvé");
|
||||
}
|
||||
|
||||
const userAgent = req.headers["user-agent"] || "";
|
||||
const isBot =
|
||||
/bot|googlebot|crawler|spider|robot|crawling|facebookexternalhit|twitterbot/i.test(
|
||||
userAgent,
|
||||
);
|
||||
|
||||
if (isBot) {
|
||||
const html = this.contentsService.generateBotHtml(content);
|
||||
return res.send(html);
|
||||
}
|
||||
|
||||
return res.json(content);
|
||||
}
|
||||
|
||||
@Post(":id/view")
|
||||
incrementViews(@Param("id") id: string) {
|
||||
return this.contentsService.incrementViews(id);
|
||||
}
|
||||
|
||||
@Post(":id/use")
|
||||
incrementUsage(@Param("id") id: string) {
|
||||
return this.contentsService.incrementUsage(id);
|
||||
}
|
||||
|
||||
@Delete(":id")
|
||||
@UseGuards(AuthGuard)
|
||||
remove(@Param("id") id: string, @Req() req: AuthenticatedRequest) {
|
||||
return this.contentsService.remove(id, req.user.sub);
|
||||
}
|
||||
|
||||
@Delete(":id/admin")
|
||||
@UseGuards(AuthGuard, RolesGuard)
|
||||
@Roles("admin")
|
||||
removeAdmin(@Param("id") id: string) {
|
||||
return this.contentsService.removeAdmin(id);
|
||||
}
|
||||
}
|
||||
15
backend/src/contents/contents.module.ts
Normal file
15
backend/src/contents/contents.module.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
import { MediaModule } from "../media/media.module";
|
||||
import { S3Module } from "../s3/s3.module";
|
||||
import { ContentsController } from "./contents.controller";
|
||||
import { ContentsService } from "./contents.service";
|
||||
import { ContentsRepository } from "./repositories/contents.repository";
|
||||
|
||||
@Module({
|
||||
imports: [S3Module, AuthModule, MediaModule],
|
||||
controllers: [ContentsController],
|
||||
providers: [ContentsService, ContentsRepository],
|
||||
exports: [ContentsRepository],
|
||||
})
|
||||
export class ContentsModule {}
|
||||
150
backend/src/contents/contents.service.spec.ts
Normal file
150
backend/src/contents/contents.service.spec.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
jest.mock("uuid", () => ({
|
||||
v4: jest.fn(() => "mocked-uuid"),
|
||||
}));
|
||||
|
||||
import { CACHE_MANAGER } from "@nestjs/cache-manager";
|
||||
import { BadRequestException } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { MediaService } from "../media/media.service";
|
||||
import { S3Service } from "../s3/s3.service";
|
||||
import { ContentsService } from "./contents.service";
|
||||
import { ContentsRepository } from "./repositories/contents.repository";
|
||||
|
||||
describe("ContentsService", () => {
|
||||
let service: ContentsService;
|
||||
let s3Service: S3Service;
|
||||
let mediaService: MediaService;
|
||||
|
||||
const mockContentsRepository = {
|
||||
findAll: jest.fn(),
|
||||
count: jest.fn(),
|
||||
create: jest.fn(),
|
||||
incrementViews: jest.fn(),
|
||||
incrementUsage: jest.fn(),
|
||||
softDelete: jest.fn(),
|
||||
findOne: jest.fn(),
|
||||
findBySlug: jest.fn(),
|
||||
};
|
||||
|
||||
const mockS3Service = {
|
||||
getUploadUrl: jest.fn(),
|
||||
uploadFile: jest.fn(),
|
||||
getPublicUrl: jest.fn(),
|
||||
};
|
||||
|
||||
const mockMediaService = {
|
||||
scanFile: jest.fn(),
|
||||
processImage: jest.fn(),
|
||||
processVideo: jest.fn(),
|
||||
};
|
||||
|
||||
const mockConfigService = {
|
||||
get: jest.fn(),
|
||||
};
|
||||
|
||||
const mockCacheManager = {
|
||||
clear: jest.fn(),
|
||||
del: jest.fn(),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
ContentsService,
|
||||
{ provide: ContentsRepository, useValue: mockContentsRepository },
|
||||
{ provide: S3Service, useValue: mockS3Service },
|
||||
{ provide: MediaService, useValue: mockMediaService },
|
||||
{ provide: ConfigService, useValue: mockConfigService },
|
||||
{ provide: CACHE_MANAGER, useValue: mockCacheManager },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<ContentsService>(ContentsService);
|
||||
s3Service = module.get<S3Service>(S3Service);
|
||||
mediaService = module.get<MediaService>(MediaService);
|
||||
});
|
||||
|
||||
it("should be defined", () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe("getUploadUrl", () => {
|
||||
it("should return an upload URL", async () => {
|
||||
mockS3Service.getUploadUrl.mockResolvedValue("http://s3/url");
|
||||
const result = await service.getUploadUrl("user1", "test.png");
|
||||
expect(result).toHaveProperty("url", "http://s3/url");
|
||||
expect(result).toHaveProperty("key");
|
||||
expect(result.key).toContain("uploads/user1/");
|
||||
});
|
||||
});
|
||||
|
||||
describe("uploadAndProcess", () => {
|
||||
const file = {
|
||||
buffer: Buffer.from("test"),
|
||||
originalname: "test.png",
|
||||
mimetype: "image/png",
|
||||
size: 1000,
|
||||
} as Express.Multer.File;
|
||||
|
||||
it("should upload and process an image", async () => {
|
||||
mockConfigService.get.mockReturnValue(1024); // max size
|
||||
mockMediaService.scanFile.mockResolvedValue({ isInfected: false });
|
||||
mockMediaService.processImage.mockResolvedValue({
|
||||
buffer: Buffer.from("processed"),
|
||||
extension: "webp",
|
||||
mimeType: "image/webp",
|
||||
size: 500,
|
||||
});
|
||||
mockContentsRepository.findBySlug.mockResolvedValue(null);
|
||||
mockContentsRepository.create.mockResolvedValue({ id: "content-id" });
|
||||
|
||||
const result = await service.uploadAndProcess("user1", file, {
|
||||
title: "Meme",
|
||||
type: "meme",
|
||||
});
|
||||
|
||||
expect(mediaService.scanFile).toHaveBeenCalled();
|
||||
expect(mediaService.processImage).toHaveBeenCalled();
|
||||
expect(s3Service.uploadFile).toHaveBeenCalled();
|
||||
expect(result).toEqual({ id: "content-id" });
|
||||
});
|
||||
|
||||
it("should throw if file is infected", async () => {
|
||||
mockConfigService.get.mockReturnValue(1024);
|
||||
mockMediaService.scanFile.mockResolvedValue({
|
||||
isInfected: true,
|
||||
virusName: "Eicar",
|
||||
});
|
||||
|
||||
await expect(
|
||||
service.uploadAndProcess("user1", file, { title: "X", type: "meme" }),
|
||||
).rejects.toThrow(BadRequestException);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
it("should return contents and total count", async () => {
|
||||
mockContentsRepository.count.mockResolvedValue(10);
|
||||
mockContentsRepository.findAll.mockResolvedValue([{ id: "1" }]);
|
||||
|
||||
const result = await service.findAll({ limit: 10, offset: 0 });
|
||||
|
||||
expect(result.totalCount).toBe(10);
|
||||
expect(result.data).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("incrementViews", () => {
|
||||
it("should increment views", async () => {
|
||||
mockContentsRepository.incrementViews.mockResolvedValue([
|
||||
{ id: "1", views: 1 },
|
||||
]);
|
||||
const result = await service.incrementViews("1");
|
||||
expect(mockContentsRepository.incrementViews).toHaveBeenCalledWith("1");
|
||||
expect(result[0].views).toBe(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
248
backend/src/contents/contents.service.ts
Normal file
248
backend/src/contents/contents.service.ts
Normal file
@@ -0,0 +1,248 @@
|
||||
import { CACHE_MANAGER } from "@nestjs/cache-manager";
|
||||
import {
|
||||
BadRequestException,
|
||||
Inject,
|
||||
Injectable,
|
||||
Logger,
|
||||
} from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import type { Cache } from "cache-manager";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import type {
|
||||
IMediaService,
|
||||
MediaProcessingResult,
|
||||
} from "../common/interfaces/media.interface";
|
||||
import type { IStorageService } from "../common/interfaces/storage.interface";
|
||||
import { MediaService } from "../media/media.service";
|
||||
import { S3Service } from "../s3/s3.service";
|
||||
import { CreateContentDto } from "./dto/create-content.dto";
|
||||
import { UploadContentDto } from "./dto/upload-content.dto";
|
||||
import { ContentsRepository } from "./repositories/contents.repository";
|
||||
|
||||
@Injectable()
|
||||
export class ContentsService {
|
||||
private readonly logger = new Logger(ContentsService.name);
|
||||
|
||||
constructor(
|
||||
private readonly contentsRepository: ContentsRepository,
|
||||
@Inject(S3Service) private readonly s3Service: IStorageService,
|
||||
@Inject(MediaService) private readonly mediaService: IMediaService,
|
||||
private readonly configService: ConfigService,
|
||||
@Inject(CACHE_MANAGER) private cacheManager: Cache,
|
||||
) {}
|
||||
|
||||
private async clearContentsCache() {
|
||||
this.logger.log("Clearing contents cache");
|
||||
await this.cacheManager.clear();
|
||||
}
|
||||
|
||||
async getUploadUrl(userId: string, fileName: string) {
|
||||
const key = `uploads/${userId}/${Date.now()}-${fileName}`;
|
||||
const url = await this.s3Service.getUploadUrl(key);
|
||||
return { url, key };
|
||||
}
|
||||
|
||||
async uploadAndProcess(
|
||||
userId: string,
|
||||
file: Express.Multer.File,
|
||||
data: UploadContentDto,
|
||||
) {
|
||||
this.logger.log(`Uploading and processing file for user ${userId}`);
|
||||
// 0. Validation du format et de la taille
|
||||
const allowedMimeTypes = [
|
||||
"image/png",
|
||||
"image/jpeg",
|
||||
"image/webp",
|
||||
"image/gif",
|
||||
"video/webm",
|
||||
];
|
||||
|
||||
if (!allowedMimeTypes.includes(file.mimetype)) {
|
||||
throw new BadRequestException(
|
||||
"Format de fichier non supporté. Formats acceptés: png, jpeg, jpg, webp, webm, gif.",
|
||||
);
|
||||
}
|
||||
|
||||
const isGif = file.mimetype === "image/gif";
|
||||
const maxSizeKb = isGif
|
||||
? this.configService.get<number>("MAX_GIF_SIZE_KB", 1024)
|
||||
: this.configService.get<number>("MAX_IMAGE_SIZE_KB", 512);
|
||||
|
||||
if (file.size > maxSizeKb * 1024) {
|
||||
throw new BadRequestException(
|
||||
`Fichier trop volumineux. Limite pour ${isGif ? "GIF" : "image"}: ${maxSizeKb} Ko.`,
|
||||
);
|
||||
}
|
||||
|
||||
// 1. Scan Antivirus
|
||||
const scanResult = await this.mediaService.scanFile(
|
||||
file.buffer,
|
||||
file.originalname,
|
||||
);
|
||||
if (scanResult.isInfected) {
|
||||
throw new BadRequestException(
|
||||
`Le fichier est infecté par ${scanResult.virusName}`,
|
||||
);
|
||||
}
|
||||
|
||||
// 2. Transcodage
|
||||
let processed: MediaProcessingResult;
|
||||
if (file.mimetype.startsWith("image/")) {
|
||||
// Image ou GIF -> WebP (format moderne, bien supporté)
|
||||
processed = await this.mediaService.processImage(file.buffer, "webp");
|
||||
} else if (file.mimetype.startsWith("video/")) {
|
||||
// Vidéo -> WebM
|
||||
processed = await this.mediaService.processVideo(file.buffer, "webm");
|
||||
} else {
|
||||
throw new BadRequestException("Format de fichier non supporté");
|
||||
}
|
||||
|
||||
// 3. Upload vers S3
|
||||
const key = `contents/${userId}/${Date.now()}-${uuidv4()}.${processed.extension}`;
|
||||
await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType);
|
||||
this.logger.log(`File uploaded successfully to S3: ${key}`);
|
||||
|
||||
// 4. Création en base de données
|
||||
return await this.create(userId, {
|
||||
...data,
|
||||
storageKey: key,
|
||||
mimeType: processed.mimeType,
|
||||
fileSize: processed.size,
|
||||
});
|
||||
}
|
||||
|
||||
async findAll(options: {
|
||||
limit: number;
|
||||
offset: number;
|
||||
sortBy?: "trend" | "recent";
|
||||
tag?: string;
|
||||
category?: string; // Slug ou ID
|
||||
author?: string;
|
||||
query?: string;
|
||||
favoritesOnly?: boolean;
|
||||
userId?: string; // Nécessaire si favoritesOnly est vrai
|
||||
}) {
|
||||
const [data, totalCount] = await Promise.all([
|
||||
this.contentsRepository.findAll(options),
|
||||
this.contentsRepository.count(options),
|
||||
]);
|
||||
|
||||
const processedData = data.map((content) => ({
|
||||
...content,
|
||||
url: this.s3Service.getPublicUrl(content.storageKey),
|
||||
author: {
|
||||
...content.author,
|
||||
avatarUrl: content.author?.avatarUrl
|
||||
? this.s3Service.getPublicUrl(content.author.avatarUrl)
|
||||
: null,
|
||||
},
|
||||
}));
|
||||
|
||||
return { data: processedData, totalCount };
|
||||
}
|
||||
|
||||
async create(userId: string, data: CreateContentDto) {
|
||||
this.logger.log(`Creating content for user ${userId}: ${data.title}`);
|
||||
const { tags: tagNames, ...contentData } = data;
|
||||
|
||||
const slug = await this.ensureUniqueSlug(contentData.title);
|
||||
|
||||
const newContent = await this.contentsRepository.create(
|
||||
{ ...contentData, userId, slug },
|
||||
tagNames,
|
||||
);
|
||||
|
||||
await this.clearContentsCache();
|
||||
return newContent;
|
||||
}
|
||||
|
||||
async incrementViews(id: string) {
|
||||
return await this.contentsRepository.incrementViews(id);
|
||||
}
|
||||
|
||||
async incrementUsage(id: string) {
|
||||
return await this.contentsRepository.incrementUsage(id);
|
||||
}
|
||||
|
||||
async remove(id: string, userId: string) {
|
||||
this.logger.log(`Removing content ${id} for user ${userId}`);
|
||||
const deleted = await this.contentsRepository.softDelete(id, userId);
|
||||
|
||||
if (deleted) {
|
||||
await this.clearContentsCache();
|
||||
}
|
||||
return deleted;
|
||||
}
|
||||
|
||||
async removeAdmin(id: string) {
|
||||
this.logger.log(`Removing content ${id} by admin`);
|
||||
const deleted = await this.contentsRepository.softDeleteAdmin(id);
|
||||
|
||||
if (deleted) {
|
||||
await this.clearContentsCache();
|
||||
}
|
||||
return deleted;
|
||||
}
|
||||
|
||||
async findOne(idOrSlug: string, userId?: string) {
|
||||
const content = await this.contentsRepository.findOne(idOrSlug, userId);
|
||||
if (!content) return null;
|
||||
|
||||
return {
|
||||
...content,
|
||||
url: this.s3Service.getPublicUrl(content.storageKey),
|
||||
author: {
|
||||
...content.author,
|
||||
avatarUrl: content.author?.avatarUrl
|
||||
? this.s3Service.getPublicUrl(content.author.avatarUrl)
|
||||
: null,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
generateBotHtml(content: { title: string; storageKey: string }): string {
|
||||
const imageUrl = this.s3Service.getPublicUrl(content.storageKey);
|
||||
return `<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>${content.title}</title>
|
||||
<meta property="og:title" content="${content.title}" />
|
||||
<meta property="og:type" content="website" />
|
||||
<meta property="og:image" content="${imageUrl}" />
|
||||
<meta property="og:description" content="Découvrez ce meme sur Memegoat" />
|
||||
<meta name="twitter:card" content="summary_large_image" />
|
||||
<meta name="twitter:title" content="${content.title}" />
|
||||
<meta name="twitter:image" content="${imageUrl}" />
|
||||
</head>
|
||||
<body>
|
||||
<h1>${content.title}</h1>
|
||||
<img src="${imageUrl}" alt="${content.title}" />
|
||||
</body>
|
||||
</html>`;
|
||||
}
|
||||
|
||||
private generateSlug(text: string): string {
|
||||
return text
|
||||
.toLowerCase()
|
||||
.normalize("NFD")
|
||||
.replace(/[\u0300-\u036f]/g, "")
|
||||
.replace(/[^\w\s-]/g, "")
|
||||
.replace(/[\s_-]+/g, "-")
|
||||
.replace(/^-+|-+$/g, "");
|
||||
}
|
||||
|
||||
private async ensureUniqueSlug(title: string): Promise<string> {
|
||||
const baseSlug = this.generateSlug(title) || "content";
|
||||
let slug = baseSlug;
|
||||
let counter = 1;
|
||||
|
||||
while (true) {
|
||||
const existing = await this.contentsRepository.findBySlug(slug);
|
||||
|
||||
if (!existing) break;
|
||||
slug = `${baseSlug}-${counter++}`;
|
||||
}
|
||||
return slug;
|
||||
}
|
||||
}
|
||||
48
backend/src/contents/dto/create-content.dto.ts
Normal file
48
backend/src/contents/dto/create-content.dto.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import {
|
||||
IsArray,
|
||||
IsEnum,
|
||||
IsInt,
|
||||
IsNotEmpty,
|
||||
IsOptional,
|
||||
IsString,
|
||||
IsUUID,
|
||||
MaxLength,
|
||||
} from "class-validator";
|
||||
|
||||
export enum ContentType {
|
||||
MEME = "meme",
|
||||
GIF = "gif",
|
||||
}
|
||||
|
||||
export class CreateContentDto {
|
||||
@IsEnum(ContentType)
|
||||
type!: "meme" | "gif";
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@MaxLength(255)
|
||||
title!: string;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@MaxLength(512)
|
||||
storageKey!: string;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@MaxLength(128)
|
||||
mimeType!: string;
|
||||
|
||||
@IsInt()
|
||||
fileSize!: number;
|
||||
|
||||
@IsOptional()
|
||||
@IsUUID()
|
||||
categoryId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsArray()
|
||||
@IsString({ each: true })
|
||||
@MaxLength(64, { each: true })
|
||||
tags?: string[];
|
||||
}
|
||||
30
backend/src/contents/dto/upload-content.dto.ts
Normal file
30
backend/src/contents/dto/upload-content.dto.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import {
|
||||
IsArray,
|
||||
IsEnum,
|
||||
IsNotEmpty,
|
||||
IsOptional,
|
||||
IsString,
|
||||
IsUUID,
|
||||
MaxLength,
|
||||
} from "class-validator";
|
||||
import { ContentType } from "./create-content.dto";
|
||||
|
||||
export class UploadContentDto {
|
||||
@IsEnum(ContentType)
|
||||
type!: "meme" | "gif";
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@MaxLength(255)
|
||||
title!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsUUID()
|
||||
categoryId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsArray()
|
||||
@IsString({ each: true })
|
||||
@MaxLength(64, { each: true })
|
||||
tags?: string[];
|
||||
}
|
||||
427
backend/src/contents/repositories/contents.repository.ts
Normal file
427
backend/src/contents/repositories/contents.repository.ts
Normal file
@@ -0,0 +1,427 @@
|
||||
import { Injectable } from "@nestjs/common";
|
||||
import {
|
||||
and,
|
||||
desc,
|
||||
eq,
|
||||
exists,
|
||||
ilike,
|
||||
isNull,
|
||||
lte,
|
||||
type SQL,
|
||||
sql,
|
||||
} from "drizzle-orm";
|
||||
import { DatabaseService } from "../../database/database.service";
|
||||
import {
|
||||
categories,
|
||||
contents,
|
||||
contentsToTags,
|
||||
favorites,
|
||||
tags,
|
||||
users,
|
||||
} from "../../database/schemas";
|
||||
import type { NewContentInDb } from "../../database/schemas/content";
|
||||
|
||||
export interface FindAllOptions {
|
||||
limit: number;
|
||||
offset: number;
|
||||
sortBy?: "trend" | "recent";
|
||||
tag?: string;
|
||||
category?: string;
|
||||
author?: string;
|
||||
query?: string;
|
||||
favoritesOnly?: boolean;
|
||||
userId?: string;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class ContentsRepository {
|
||||
constructor(private readonly databaseService: DatabaseService) {}
|
||||
|
||||
async findAll(options: FindAllOptions) {
|
||||
const {
|
||||
limit,
|
||||
offset,
|
||||
sortBy,
|
||||
tag,
|
||||
category,
|
||||
author,
|
||||
query,
|
||||
favoritesOnly,
|
||||
userId,
|
||||
} = options;
|
||||
|
||||
let whereClause: SQL | undefined = isNull(contents.deletedAt);
|
||||
|
||||
if (tag) {
|
||||
whereClause = and(
|
||||
whereClause,
|
||||
exists(
|
||||
this.databaseService.db
|
||||
.select()
|
||||
.from(contentsToTags)
|
||||
.innerJoin(tags, eq(contentsToTags.tagId, tags.id))
|
||||
.where(
|
||||
and(eq(contentsToTags.contentId, contents.id), eq(tags.name, tag)),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (category) {
|
||||
whereClause = and(
|
||||
whereClause,
|
||||
exists(
|
||||
this.databaseService.db
|
||||
.select()
|
||||
.from(categories)
|
||||
.where(
|
||||
and(
|
||||
eq(contents.categoryId, categories.id),
|
||||
sql`(${categories.id}::text = ${category} OR ${categories.slug} = ${category})`,
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (author) {
|
||||
whereClause = and(
|
||||
whereClause,
|
||||
exists(
|
||||
this.databaseService.db
|
||||
.select()
|
||||
.from(users)
|
||||
.where(
|
||||
and(
|
||||
eq(contents.userId, users.uuid),
|
||||
sql`(${users.uuid}::text = ${author} OR ${users.username} = ${author})`,
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (query) {
|
||||
whereClause = and(whereClause, ilike(contents.title, `%${query}%`));
|
||||
}
|
||||
|
||||
if (favoritesOnly && userId) {
|
||||
whereClause = and(
|
||||
whereClause,
|
||||
exists(
|
||||
this.databaseService.db
|
||||
.select()
|
||||
.from(favorites)
|
||||
.where(
|
||||
and(eq(favorites.contentId, contents.id), eq(favorites.userId, userId)),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
let orderBy = desc(contents.createdAt);
|
||||
if (sortBy === "trend") {
|
||||
orderBy = desc(sql`${contents.views} + ${contents.usageCount} * 2`);
|
||||
}
|
||||
|
||||
const results = await this.databaseService.db
|
||||
.select({
|
||||
id: contents.id,
|
||||
title: contents.title,
|
||||
slug: contents.slug,
|
||||
type: contents.type,
|
||||
storageKey: contents.storageKey,
|
||||
mimeType: contents.mimeType,
|
||||
fileSize: contents.fileSize,
|
||||
views: contents.views,
|
||||
usageCount: contents.usageCount,
|
||||
favoritesCount:
|
||||
sql<number>`(SELECT count(*) FROM ${favorites} WHERE ${favorites.contentId} = ${contents.id})`.mapWith(
|
||||
Number,
|
||||
),
|
||||
isLiked: userId
|
||||
? sql<boolean>`EXISTS(SELECT 1 FROM ${favorites} WHERE ${favorites.contentId} = ${contents.id} AND ${favorites.userId} = ${userId})`
|
||||
: sql<boolean>`false`,
|
||||
createdAt: contents.createdAt,
|
||||
updatedAt: contents.updatedAt,
|
||||
author: {
|
||||
id: users.uuid,
|
||||
username: users.username,
|
||||
displayName: users.displayName,
|
||||
avatarUrl: users.avatarUrl,
|
||||
},
|
||||
category: {
|
||||
id: categories.id,
|
||||
name: categories.name,
|
||||
slug: categories.slug,
|
||||
},
|
||||
})
|
||||
.from(contents)
|
||||
.leftJoin(users, eq(contents.userId, users.uuid))
|
||||
.leftJoin(categories, eq(contents.categoryId, categories.id))
|
||||
.where(whereClause)
|
||||
.orderBy(orderBy)
|
||||
.limit(limit)
|
||||
.offset(offset);
|
||||
|
||||
const contentIds = results.map((r) => r.id);
|
||||
const tagsForContents = contentIds.length
|
||||
? await this.databaseService.db
|
||||
.select({
|
||||
contentId: contentsToTags.contentId,
|
||||
name: tags.name,
|
||||
})
|
||||
.from(contentsToTags)
|
||||
.innerJoin(tags, eq(contentsToTags.tagId, tags.id))
|
||||
.where(sql`${contentsToTags.contentId} IN ${contentIds}`)
|
||||
: [];
|
||||
|
||||
return results.map((r) => ({
|
||||
...r,
|
||||
tags: tagsForContents.filter((t) => t.contentId === r.id).map((t) => t.name),
|
||||
}));
|
||||
}
|
||||
|
||||
async create(data: NewContentInDb & { userId: string }, tagNames?: string[]) {
|
||||
return await this.databaseService.db.transaction(async (tx) => {
|
||||
const [newContent] = await tx.insert(contents).values(data).returning();
|
||||
|
||||
if (tagNames && tagNames.length > 0) {
|
||||
for (const tagName of tagNames) {
|
||||
const slug = tagName
|
||||
.toLowerCase()
|
||||
.replace(/ /g, "-")
|
||||
.replace(/[^\w-]/g, "");
|
||||
|
||||
let [tag] = await tx
|
||||
.select()
|
||||
.from(tags)
|
||||
.where(eq(tags.slug, slug))
|
||||
.limit(1);
|
||||
|
||||
if (!tag) {
|
||||
[tag] = await tx
|
||||
.insert(tags)
|
||||
.values({
|
||||
name: tagName,
|
||||
slug,
|
||||
userId: data.userId,
|
||||
})
|
||||
.returning();
|
||||
}
|
||||
|
||||
await tx
|
||||
.insert(contentsToTags)
|
||||
.values({
|
||||
contentId: newContent.id,
|
||||
tagId: tag.id,
|
||||
})
|
||||
.onConflictDoNothing();
|
||||
}
|
||||
}
|
||||
|
||||
return newContent;
|
||||
});
|
||||
}
|
||||
|
||||
async findOne(idOrSlug: string, userId?: string) {
|
||||
const [result] = await this.databaseService.db
|
||||
.select({
|
||||
id: contents.id,
|
||||
title: contents.title,
|
||||
slug: contents.slug,
|
||||
type: contents.type,
|
||||
storageKey: contents.storageKey,
|
||||
mimeType: contents.mimeType,
|
||||
fileSize: contents.fileSize,
|
||||
views: contents.views,
|
||||
usageCount: contents.usageCount,
|
||||
favoritesCount:
|
||||
sql<number>`(SELECT count(*) FROM ${favorites} WHERE ${favorites.contentId} = ${contents.id})`.mapWith(
|
||||
Number,
|
||||
),
|
||||
isLiked: userId
|
||||
? sql<boolean>`EXISTS(SELECT 1 FROM ${favorites} WHERE ${favorites.contentId} = ${contents.id} AND ${favorites.userId} = ${userId})`
|
||||
: sql<boolean>`false`,
|
||||
createdAt: contents.createdAt,
|
||||
updatedAt: contents.updatedAt,
|
||||
userId: contents.userId,
|
||||
author: {
|
||||
id: users.uuid,
|
||||
username: users.username,
|
||||
displayName: users.displayName,
|
||||
avatarUrl: users.avatarUrl,
|
||||
},
|
||||
category: {
|
||||
id: categories.id,
|
||||
name: categories.name,
|
||||
slug: categories.slug,
|
||||
},
|
||||
})
|
||||
.from(contents)
|
||||
.leftJoin(users, eq(contents.userId, users.uuid))
|
||||
.leftJoin(categories, eq(contents.categoryId, categories.id))
|
||||
.where(
|
||||
and(
|
||||
isNull(contents.deletedAt),
|
||||
sql`(${contents.id}::text = ${idOrSlug} OR ${contents.slug} = ${idOrSlug})`,
|
||||
),
|
||||
)
|
||||
.limit(1);
|
||||
|
||||
if (!result) return null;
|
||||
|
||||
const tagsForContent = await this.databaseService.db
|
||||
.select({
|
||||
name: tags.name,
|
||||
})
|
||||
.from(contentsToTags)
|
||||
.innerJoin(tags, eq(contentsToTags.tagId, tags.id))
|
||||
.where(eq(contentsToTags.contentId, result.id));
|
||||
|
||||
return {
|
||||
...result,
|
||||
tags: tagsForContent.map((t) => t.name),
|
||||
};
|
||||
}
|
||||
|
||||
async count(options: {
|
||||
tag?: string;
|
||||
category?: string;
|
||||
author?: string;
|
||||
query?: string;
|
||||
favoritesOnly?: boolean;
|
||||
userId?: string;
|
||||
}) {
|
||||
const { tag, category, author, query, favoritesOnly, userId } = options;
|
||||
|
||||
let whereClause: SQL | undefined = isNull(contents.deletedAt);
|
||||
|
||||
if (tag) {
|
||||
whereClause = and(
|
||||
whereClause,
|
||||
exists(
|
||||
this.databaseService.db
|
||||
.select()
|
||||
.from(contentsToTags)
|
||||
.innerJoin(tags, eq(contentsToTags.tagId, tags.id))
|
||||
.where(
|
||||
and(eq(contentsToTags.contentId, contents.id), eq(tags.name, tag)),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (category) {
|
||||
whereClause = and(
|
||||
whereClause,
|
||||
exists(
|
||||
this.databaseService.db
|
||||
.select()
|
||||
.from(categories)
|
||||
.where(
|
||||
and(
|
||||
eq(contents.categoryId, categories.id),
|
||||
sql`(${categories.id}::text = ${category} OR ${categories.slug} = ${category})`,
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (author) {
|
||||
whereClause = and(
|
||||
whereClause,
|
||||
exists(
|
||||
this.databaseService.db
|
||||
.select()
|
||||
.from(users)
|
||||
.where(
|
||||
and(
|
||||
eq(contents.userId, users.uuid),
|
||||
sql`(${users.uuid}::text = ${author} OR ${users.username} = ${author})`,
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (query) {
|
||||
whereClause = and(whereClause, ilike(contents.title, `%${query}%`));
|
||||
}
|
||||
|
||||
if (favoritesOnly && userId) {
|
||||
whereClause = and(
|
||||
whereClause,
|
||||
exists(
|
||||
this.databaseService.db
|
||||
.select()
|
||||
.from(favorites)
|
||||
.where(
|
||||
and(eq(favorites.contentId, contents.id), eq(favorites.userId, userId)),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
const [result] = await this.databaseService.db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(contents)
|
||||
.where(whereClause);
|
||||
|
||||
return Number(result.count);
|
||||
}
|
||||
|
||||
async incrementViews(id: string) {
|
||||
await this.databaseService.db
|
||||
.update(contents)
|
||||
.set({ views: sql`${contents.views} + 1` })
|
||||
.where(eq(contents.id, id));
|
||||
}
|
||||
|
||||
async incrementUsage(id: string) {
|
||||
await this.databaseService.db
|
||||
.update(contents)
|
||||
.set({ usageCount: sql`${contents.usageCount} + 1` })
|
||||
.where(eq(contents.id, id));
|
||||
}
|
||||
|
||||
async softDelete(id: string, userId: string) {
|
||||
const [deleted] = await this.databaseService.db
|
||||
.update(contents)
|
||||
.set({ deletedAt: new Date() })
|
||||
.where(and(eq(contents.id, id), eq(contents.userId, userId)))
|
||||
.returning();
|
||||
return deleted;
|
||||
}
|
||||
|
||||
async softDeleteAdmin(id: string) {
|
||||
const [deleted] = await this.databaseService.db
|
||||
.update(contents)
|
||||
.set({ deletedAt: new Date() })
|
||||
.where(eq(contents.id, id))
|
||||
.returning();
|
||||
return deleted;
|
||||
}
|
||||
|
||||
async findBySlug(slug: string) {
|
||||
const [result] = await this.databaseService.db
|
||||
.select()
|
||||
.from(contents)
|
||||
.where(eq(contents.slug, slug))
|
||||
.limit(1);
|
||||
return result;
|
||||
}
|
||||
|
||||
async purgeSoftDeleted(before: Date) {
|
||||
return await this.databaseService.db
|
||||
.delete(contents)
|
||||
.where(
|
||||
and(
|
||||
sql`${contents.deletedAt} IS NOT NULL`,
|
||||
lte(contents.deletedAt, before),
|
||||
),
|
||||
)
|
||||
.returning();
|
||||
}
|
||||
}
|
||||
25
backend/src/crypto/crypto.module.ts
Normal file
25
backend/src/crypto/crypto.module.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { Global, Module } from "@nestjs/common";
|
||||
import { CryptoService } from "./crypto.service";
|
||||
import { EncryptionService } from "./services/encryption.service";
|
||||
import { HashingService } from "./services/hashing.service";
|
||||
import { JwtService } from "./services/jwt.service";
|
||||
import { PostQuantumService } from "./services/post-quantum.service";
|
||||
|
||||
@Global()
|
||||
@Module({
|
||||
providers: [
|
||||
CryptoService,
|
||||
HashingService,
|
||||
JwtService,
|
||||
EncryptionService,
|
||||
PostQuantumService,
|
||||
],
|
||||
exports: [
|
||||
CryptoService,
|
||||
HashingService,
|
||||
JwtService,
|
||||
EncryptionService,
|
||||
PostQuantumService,
|
||||
],
|
||||
})
|
||||
export class CryptoModule {}
|
||||
187
backend/src/crypto/crypto.service.spec.ts
Normal file
187
backend/src/crypto/crypto.service.spec.ts
Normal file
@@ -0,0 +1,187 @@
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
|
||||
jest.mock("@noble/post-quantum/ml-kem.js", () => ({
|
||||
ml_kem768: {
|
||||
keygen: jest.fn(() => ({
|
||||
publicKey: new Uint8Array(1184),
|
||||
secretKey: new Uint8Array(2400),
|
||||
})),
|
||||
encapsulate: jest.fn((_pk: Uint8Array) => ({
|
||||
cipherText: new Uint8Array(1088),
|
||||
sharedSecret: new Uint8Array(32),
|
||||
})),
|
||||
decapsulate: jest.fn(
|
||||
(_ct: Uint8Array, _sk: Uint8Array) => new Uint8Array(32),
|
||||
),
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock("jose", () => ({
|
||||
generateSecret: jest.fn().mockResolvedValue(new Uint8Array(32)),
|
||||
CompactEncrypt: jest.fn().mockImplementation(() => ({
|
||||
setProtectedHeader: jest.fn().mockReturnThis(),
|
||||
encrypt: jest.fn().mockResolvedValue("mocked.jwe.token.parts.here"),
|
||||
})),
|
||||
compactDecrypt: jest.fn().mockImplementation((jwe) => {
|
||||
if (jwe === "invalid.jwe.content") {
|
||||
throw new Error("Invalid JWE");
|
||||
}
|
||||
return Promise.resolve({
|
||||
plaintext: new TextEncoder().encode("This is a secret message 🤫"),
|
||||
});
|
||||
}),
|
||||
SignJWT: jest.fn().mockImplementation(() => ({
|
||||
setProtectedHeader: jest.fn().mockReturnThis(),
|
||||
setIssuedAt: jest.fn().mockReturnThis(),
|
||||
setExpirationTime: jest.fn().mockReturnThis(),
|
||||
sign: jest.fn().mockResolvedValue("mocked.jwt.token"),
|
||||
})),
|
||||
jwtVerify: jest.fn().mockImplementation((token) => {
|
||||
if (token === "invalid.token.here") {
|
||||
throw new Error("Invalid token");
|
||||
}
|
||||
return Promise.resolve({
|
||||
payload: { sub: "1234567890", name: "John Doe", admin: true },
|
||||
});
|
||||
}),
|
||||
CompactSign: jest.fn().mockImplementation(() => ({
|
||||
setProtectedHeader: jest.fn().mockReturnThis(),
|
||||
sign: jest.fn().mockResolvedValue("mocked.jws.token"),
|
||||
})),
|
||||
compactVerify: jest.fn().mockImplementation((jws) => {
|
||||
if (jws.includes("tampered") || jws.split(".").length !== 3) {
|
||||
throw new Error("Tampered or invalid content");
|
||||
}
|
||||
const payload =
|
||||
jws === "mocked.jws.token"
|
||||
? "Important document content"
|
||||
: "Original content";
|
||||
return Promise.resolve({
|
||||
payload: new TextEncoder().encode(payload),
|
||||
});
|
||||
}),
|
||||
}));
|
||||
|
||||
import { CryptoService } from "./crypto.service";
|
||||
import { EncryptionService } from "./services/encryption.service";
|
||||
import { HashingService } from "./services/hashing.service";
|
||||
import { JwtService } from "./services/jwt.service";
|
||||
import { PostQuantumService } from "./services/post-quantum.service";
|
||||
|
||||
describe("CryptoService", () => {
|
||||
let service: CryptoService;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
CryptoService,
|
||||
HashingService,
|
||||
JwtService,
|
||||
EncryptionService,
|
||||
PostQuantumService,
|
||||
{
|
||||
provide: ConfigService,
|
||||
useValue: {
|
||||
get: jest.fn().mockReturnValue("test-secret"),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<CryptoService>(CryptoService);
|
||||
});
|
||||
|
||||
it("should be defined", () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe("Argon2 Password Hashing", () => {
|
||||
it("should hash and verify a password", async () => {
|
||||
const password = "mySecurePassword123!";
|
||||
const hash = await service.hashPassword(password);
|
||||
expect(hash).toBeDefined();
|
||||
expect(hash).not.toBe(password);
|
||||
|
||||
const isValid = await service.verifyPassword(password, hash);
|
||||
expect(isValid).toBe(true);
|
||||
|
||||
const isInvalid = await service.verifyPassword("wrongPassword", hash);
|
||||
expect(isInvalid).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("JWT jose", () => {
|
||||
it("should generate and verify a JWT", async () => {
|
||||
const payload = { sub: "1234567890", name: "John Doe", admin: true };
|
||||
const token = await service.generateJwt(payload);
|
||||
expect(token).toBeDefined();
|
||||
|
||||
const verifiedPayload = await service.verifyJwt(token);
|
||||
expect(verifiedPayload.sub).toBe(payload.sub);
|
||||
expect(verifiedPayload.name).toBe(payload.name);
|
||||
expect(verifiedPayload.admin).toBe(payload.admin);
|
||||
});
|
||||
|
||||
it("should throw for invalid token", async () => {
|
||||
await expect(service.verifyJwt("invalid.token.here")).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Encryption/Decryption (JWE)", () => {
|
||||
it("should encrypt and decrypt content", async () => {
|
||||
const content = "This is a secret message 🤫";
|
||||
const jwe = await service.encryptContent(content);
|
||||
expect(jwe).toBeDefined();
|
||||
expect(typeof jwe).toBe("string");
|
||||
expect(jwe.split(".").length).toBe(5); // JWE compact serialization has 5 parts
|
||||
|
||||
const decrypted = await service.decryptContent(jwe);
|
||||
expect(decrypted).toBe(content);
|
||||
});
|
||||
|
||||
it("should fail to decrypt invalid content", async () => {
|
||||
await expect(
|
||||
service.decryptContent("invalid.jwe.content"),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Signature (JWS)", () => {
|
||||
it("should sign and verify content signature", async () => {
|
||||
const content = "Important document content";
|
||||
const jws = await service.signContent(content);
|
||||
expect(jws).toBeDefined();
|
||||
expect(typeof jws).toBe("string");
|
||||
expect(jws.split(".").length).toBe(3); // JWS compact serialization has 3 parts
|
||||
|
||||
const verifiedContent = await service.verifyContentSignature(jws);
|
||||
expect(verifiedContent).toBe(content);
|
||||
});
|
||||
|
||||
it("should fail to verify tampered content", async () => {
|
||||
const content = "Original content";
|
||||
const jws = await service.signContent(content);
|
||||
const _parts = jws.split(".");
|
||||
// Tamper with the payload (middle part)
|
||||
const tamperedJws = "this.is.tampered";
|
||||
|
||||
await expect(service.verifyContentSignature(tamperedJws)).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Post-Quantum @noble/post-quantum", () => {
|
||||
it("should generate keypair, encapsulate and decapsulate", () => {
|
||||
const { publicKey, secretKey } = service.generatePostQuantumKeyPair();
|
||||
expect(publicKey).toBeDefined();
|
||||
expect(secretKey).toBeDefined();
|
||||
|
||||
const { cipherText, sharedSecret } = service.encapsulate(publicKey);
|
||||
expect(cipherText).toBeDefined();
|
||||
expect(sharedSecret).toBeDefined();
|
||||
|
||||
const decapsulatedSecret = service.decapsulate(cipherText, secretKey);
|
||||
expect(decapsulatedSecret).toEqual(sharedSecret);
|
||||
});
|
||||
});
|
||||
});
|
||||
79
backend/src/crypto/crypto.service.ts
Normal file
79
backend/src/crypto/crypto.service.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import { Injectable } from "@nestjs/common";
|
||||
import type * as jose from "jose";
|
||||
import { EncryptionService } from "./services/encryption.service";
|
||||
import { HashingService } from "./services/hashing.service";
|
||||
import { JwtService } from "./services/jwt.service";
|
||||
import { PostQuantumService } from "./services/post-quantum.service";
|
||||
|
||||
/**
|
||||
* @deprecated Use HashingService, JwtService, EncryptionService or PostQuantumService directly.
|
||||
* This service acts as a Facade for backward compatibility.
|
||||
*/
|
||||
@Injectable()
|
||||
export class CryptoService {
|
||||
constructor(
|
||||
private readonly hashingService: HashingService,
|
||||
private readonly jwtService: JwtService,
|
||||
private readonly encryptionService: EncryptionService,
|
||||
private readonly postQuantumService: PostQuantumService,
|
||||
) {}
|
||||
|
||||
async hashEmail(email: string): Promise<string> {
|
||||
return this.hashingService.hashEmail(email);
|
||||
}
|
||||
|
||||
async hashIp(ip: string): Promise<string> {
|
||||
return this.hashingService.hashIp(ip);
|
||||
}
|
||||
|
||||
getPgpEncryptionKey(): string {
|
||||
return this.encryptionService.getPgpEncryptionKey();
|
||||
}
|
||||
|
||||
async hashPassword(password: string): Promise<string> {
|
||||
return this.hashingService.hashPassword(password);
|
||||
}
|
||||
|
||||
async verifyPassword(password: string, hash: string): Promise<boolean> {
|
||||
return this.hashingService.verifyPassword(password, hash);
|
||||
}
|
||||
|
||||
async generateJwt(
|
||||
payload: jose.JWTPayload,
|
||||
expiresIn = "2h",
|
||||
): Promise<string> {
|
||||
return this.jwtService.generateJwt(payload, expiresIn);
|
||||
}
|
||||
|
||||
async verifyJwt<T extends jose.JWTPayload>(token: string): Promise<T> {
|
||||
return this.jwtService.verifyJwt<T>(token);
|
||||
}
|
||||
|
||||
async encryptContent(content: string): Promise<string> {
|
||||
return this.encryptionService.encryptContent(content);
|
||||
}
|
||||
|
||||
async decryptContent(jwe: string): Promise<string> {
|
||||
return this.encryptionService.decryptContent(jwe);
|
||||
}
|
||||
|
||||
async signContent(content: string): Promise<string> {
|
||||
return this.encryptionService.signContent(content);
|
||||
}
|
||||
|
||||
async verifyContentSignature(jws: string): Promise<string> {
|
||||
return this.encryptionService.verifyContentSignature(jws);
|
||||
}
|
||||
|
||||
generatePostQuantumKeyPair() {
|
||||
return this.postQuantumService.generatePostQuantumKeyPair();
|
||||
}
|
||||
|
||||
encapsulate(publicKey: Uint8Array) {
|
||||
return this.postQuantumService.encapsulate(publicKey);
|
||||
}
|
||||
|
||||
decapsulate(cipherText: Uint8Array, secretKey: Uint8Array) {
|
||||
return this.postQuantumService.decapsulate(cipherText, secretKey);
|
||||
}
|
||||
}
|
||||
58
backend/src/crypto/services/encryption.service.ts
Normal file
58
backend/src/crypto/services/encryption.service.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { Injectable, Logger } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import * as jose from "jose";
|
||||
|
||||
@Injectable()
|
||||
export class EncryptionService {
|
||||
private readonly logger = new Logger(EncryptionService.name);
|
||||
private readonly jwtSecret: Uint8Array;
|
||||
private readonly encryptionKey: Uint8Array;
|
||||
|
||||
constructor(private configService: ConfigService) {
|
||||
const secret = this.configService.get<string>("JWT_SECRET");
|
||||
this.jwtSecret = new TextEncoder().encode(
|
||||
secret || "default-secret-change-me-in-production",
|
||||
);
|
||||
|
||||
const encKey = this.configService.get<string>("ENCRYPTION_KEY");
|
||||
if (!encKey) {
|
||||
this.logger.warn(
|
||||
"ENCRYPTION_KEY is not defined, using a default insecure key for development",
|
||||
);
|
||||
}
|
||||
const rawKey = encKey || "default-encryption-key-32-chars-";
|
||||
this.encryptionKey = new TextEncoder().encode(
|
||||
rawKey.padEnd(32, "0").substring(0, 32),
|
||||
);
|
||||
}
|
||||
|
||||
async encryptContent(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content);
|
||||
return new jose.CompactEncrypt(data)
|
||||
.setProtectedHeader({ alg: "dir", enc: "A256GCM" })
|
||||
.encrypt(this.encryptionKey);
|
||||
}
|
||||
|
||||
async decryptContent(jwe: string): Promise<string> {
|
||||
const { plaintext } = await jose.compactDecrypt(jwe, this.encryptionKey);
|
||||
return new TextDecoder().decode(plaintext);
|
||||
}
|
||||
|
||||
async signContent(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content);
|
||||
return new jose.CompactSign(data)
|
||||
.setProtectedHeader({ alg: "HS256" })
|
||||
.sign(this.jwtSecret);
|
||||
}
|
||||
|
||||
async verifyContentSignature(jws: string): Promise<string> {
|
||||
const { payload } = await jose.compactVerify(jws, this.jwtSecret);
|
||||
return new TextDecoder().decode(payload);
|
||||
}
|
||||
|
||||
getPgpEncryptionKey(): string {
|
||||
return (
|
||||
this.configService.get<string>("PGP_ENCRYPTION_KEY") || "default-pgp-key"
|
||||
);
|
||||
}
|
||||
}
|
||||
32
backend/src/crypto/services/hashing.service.ts
Normal file
32
backend/src/crypto/services/hashing.service.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { Injectable } from "@nestjs/common";
|
||||
import { hash, verify } from "@node-rs/argon2";
|
||||
|
||||
@Injectable()
|
||||
export class HashingService {
|
||||
async hashEmail(email: string): Promise<string> {
|
||||
const normalizedEmail = email.toLowerCase().trim();
|
||||
return this.hashSha256(normalizedEmail);
|
||||
}
|
||||
|
||||
async hashIp(ip: string): Promise<string> {
|
||||
return this.hashSha256(ip);
|
||||
}
|
||||
|
||||
async hashSha256(text: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(text);
|
||||
const hashBuffer = await crypto.subtle.digest("SHA-256", data);
|
||||
return Array.from(new Uint8Array(hashBuffer))
|
||||
.map((b) => b.toString(16).padStart(2, "0"))
|
||||
.join("");
|
||||
}
|
||||
|
||||
async hashPassword(password: string): Promise<string> {
|
||||
return hash(password, {
|
||||
algorithm: 2,
|
||||
});
|
||||
}
|
||||
|
||||
async verifyPassword(password: string, hash: string): Promise<boolean> {
|
||||
return verify(hash, password);
|
||||
}
|
||||
}
|
||||
37
backend/src/crypto/services/jwt.service.ts
Normal file
37
backend/src/crypto/services/jwt.service.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { Injectable, Logger } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import * as jose from "jose";
|
||||
|
||||
@Injectable()
|
||||
export class JwtService {
|
||||
private readonly logger = new Logger(JwtService.name);
|
||||
private readonly jwtSecret: Uint8Array;
|
||||
|
||||
constructor(private configService: ConfigService) {
|
||||
const secret = this.configService.get<string>("JWT_SECRET");
|
||||
if (!secret) {
|
||||
this.logger.warn(
|
||||
"JWT_SECRET is not defined, using a default insecure secret for development",
|
||||
);
|
||||
}
|
||||
this.jwtSecret = new TextEncoder().encode(
|
||||
secret || "default-secret-change-me-in-production",
|
||||
);
|
||||
}
|
||||
|
||||
async generateJwt(
|
||||
payload: jose.JWTPayload,
|
||||
expiresIn = "2h",
|
||||
): Promise<string> {
|
||||
return new jose.SignJWT(payload)
|
||||
.setProtectedHeader({ alg: "HS256" })
|
||||
.setIssuedAt()
|
||||
.setExpirationTime(expiresIn)
|
||||
.sign(this.jwtSecret);
|
||||
}
|
||||
|
||||
async verifyJwt<T extends jose.JWTPayload>(token: string): Promise<T> {
|
||||
const { payload } = await jose.jwtVerify(token, this.jwtSecret);
|
||||
return payload as T;
|
||||
}
|
||||
}
|
||||
20
backend/src/crypto/services/post-quantum.service.ts
Normal file
20
backend/src/crypto/services/post-quantum.service.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { Injectable } from "@nestjs/common";
|
||||
import { ml_kem768 } from "@noble/post-quantum/ml-kem.js";
|
||||
|
||||
@Injectable()
|
||||
export class PostQuantumService {
|
||||
generatePostQuantumKeyPair() {
|
||||
const seed = new Uint8Array(64);
|
||||
crypto.getRandomValues(seed);
|
||||
const { publicKey, secretKey } = ml_kem768.keygen(seed);
|
||||
return { publicKey, secretKey };
|
||||
}
|
||||
|
||||
encapsulate(publicKey: Uint8Array) {
|
||||
return ml_kem768.encapsulate(publicKey);
|
||||
}
|
||||
|
||||
decapsulate(cipherText: Uint8Array, secretKey: Uint8Array) {
|
||||
return ml_kem768.decapsulate(cipherText, secretKey);
|
||||
}
|
||||
}
|
||||
11
backend/src/database/database.module.ts
Normal file
11
backend/src/database/database.module.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { Global, Module } from "@nestjs/common";
|
||||
import { ConfigModule } from "@nestjs/config";
|
||||
import { DatabaseService } from "./database.service";
|
||||
|
||||
@Global()
|
||||
@Module({
|
||||
imports: [ConfigModule],
|
||||
providers: [DatabaseService],
|
||||
exports: [DatabaseService],
|
||||
})
|
||||
export class DatabaseModule {}
|
||||
92
backend/src/database/database.service.ts
Normal file
92
backend/src/database/database.service.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
/*
|
||||
* Copyright (C) 2025 Yidhra Studio. - All Rights Reserved
|
||||
* Updated : 25/04/2025 10:52
|
||||
*
|
||||
* Unauthorized copying or redistribution of this file in source and binary forms via any medium
|
||||
* is strictly prohibited.
|
||||
*/
|
||||
|
||||
import {
|
||||
Injectable,
|
||||
Logger,
|
||||
OnModuleDestroy,
|
||||
OnModuleInit,
|
||||
} from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { drizzle } from "drizzle-orm/node-postgres";
|
||||
import { migrate } from "drizzle-orm/node-postgres/migrator";
|
||||
import { Pool } from "pg";
|
||||
import * as schema from "./schemas";
|
||||
|
||||
@Injectable()
|
||||
export class DatabaseService implements OnModuleInit, OnModuleDestroy {
|
||||
private readonly logger = new Logger(DatabaseService.name);
|
||||
private readonly pool!: Pool;
|
||||
public readonly db: ReturnType<typeof drizzle>;
|
||||
|
||||
constructor(private configService: ConfigService) {
|
||||
// Create the PostgreSQL client
|
||||
const connectionString = this.getDatabaseConnectionString();
|
||||
this.pool = new Pool({ connectionString });
|
||||
// Recreate drizzle with initialized pool
|
||||
this.db = drizzle(this.pool, { schema });
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
try {
|
||||
// Run migrations if in production mode
|
||||
if (this.configService.get("NODE_ENV") === "production") {
|
||||
this.logger.log("Running database migrations...");
|
||||
await migrate(this.db, { migrationsFolder: ".migrations" });
|
||||
this.logger.log("Database migrations completed successfully");
|
||||
} else {
|
||||
this.logger.debug("Skipping migrations in non-production environment");
|
||||
}
|
||||
this.logger.log("Database connection established successfully");
|
||||
} catch (error) {
|
||||
this.logger.error("Failed to initialize database connection", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async onModuleDestroy() {
|
||||
try {
|
||||
// Close the database connection
|
||||
await this.pool.end();
|
||||
this.logger.log("Database connection closed successfully");
|
||||
} catch (error) {
|
||||
this.logger.error("Error closing database connection", error);
|
||||
}
|
||||
}
|
||||
|
||||
// Get the database connection string from environment variables
|
||||
private getDatabaseConnectionString(): string {
|
||||
this.logger.debug(
|
||||
"Getting database connection string from environment variables",
|
||||
);
|
||||
|
||||
const password = this.configService.get<string>("POSTGRES_PASSWORD");
|
||||
const username = this.configService.get<string>("POSTGRES_USER");
|
||||
const host = this.configService.get<string>("POSTGRES_HOST");
|
||||
const port = this.configService.get<string>("POSTGRES_PORT");
|
||||
const database = this.configService.get<string>("POSTGRES_DB");
|
||||
|
||||
const missingVars: string[] = [];
|
||||
if (!password) missingVars.push("POSTGRES_PASSWORD");
|
||||
if (!username) missingVars.push("POSTGRES_USER");
|
||||
if (!host) missingVars.push("POSTGRES_HOST");
|
||||
if (!port) missingVars.push("POSTGRES_PORT");
|
||||
if (!database) missingVars.push("POSTGRES_DB");
|
||||
|
||||
if (missingVars.length > 0) {
|
||||
const errorMessage = `Database configuration is missing. Missing variables: ${missingVars.join(", ")}. Please check your .env file.`;
|
||||
this.logger.error(errorMessage);
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
this.logger.debug(
|
||||
`Database connection configured for ${username}@${host}:${port}/${database}`,
|
||||
);
|
||||
return `postgres://${username}:${password}@${host}:${port}/${database}`;
|
||||
}
|
||||
}
|
||||
35
backend/src/database/schemas/api_keys.ts
Normal file
35
backend/src/database/schemas/api_keys.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import {
|
||||
boolean,
|
||||
index,
|
||||
pgTable,
|
||||
timestamp,
|
||||
uuid,
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { users } from "./users";
|
||||
|
||||
export const apiKeys = pgTable(
|
||||
"api_keys",
|
||||
{
|
||||
id: uuid("id").primaryKey().defaultRandom(),
|
||||
userId: uuid("user_id")
|
||||
.notNull()
|
||||
.references(() => users.uuid, { onDelete: "cascade" }),
|
||||
keyHash: varchar("key_hash", { length: 128 }).notNull().unique(), // Haché pour la sécurité (SHA-256)
|
||||
name: varchar("name", { length: 128 }).notNull(), // Nom donné par l'utilisateur (ex: "My App")
|
||||
prefix: varchar("prefix", { length: 8 }).notNull(), // Pour identification visuelle (ex: "mg_...")
|
||||
isActive: boolean("is_active").notNull().default(true),
|
||||
lastUsedAt: timestamp("last_used_at", { withTimezone: true }),
|
||||
expiresAt: timestamp("expires_at", { withTimezone: true }),
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
userIdIdx: index("api_keys_user_id_idx").on(table.userId),
|
||||
keyHashIdx: index("api_keys_key_hash_idx").on(table.keyHash),
|
||||
}),
|
||||
);
|
||||
43
backend/src/database/schemas/audit_logs.ts
Normal file
43
backend/src/database/schemas/audit_logs.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import {
|
||||
index,
|
||||
jsonb,
|
||||
pgTable,
|
||||
timestamp,
|
||||
uuid,
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { users } from "./users";
|
||||
|
||||
export const auditLogs = pgTable(
|
||||
"audit_logs",
|
||||
{
|
||||
id: uuid("id").primaryKey().defaultRandom(),
|
||||
userId: uuid("user_id").references(() => users.uuid, {
|
||||
onDelete: "set null",
|
||||
}), // L'utilisateur qui a fait l'action
|
||||
action: varchar("action", { length: 64 }).notNull(), // ex: 'PII_ACCESS', 'USER_DELETE', 'ROLE_CHANGE'
|
||||
entityType: varchar("entity_type", { length: 64 }).notNull(), // ex: 'users', 'contents'
|
||||
entityId: uuid("entity_id"), // ID de l'entité concernée
|
||||
|
||||
// Détails de l'action pour la conformité
|
||||
details: jsonb("details"), // Données supplémentaires (ex: quelles colonnes ont changé)
|
||||
ipHash: varchar("ip_hash", { length: 64 }), // IP de l'auteur (hachée pour RGPD)
|
||||
userAgent: varchar("user_agent", { length: 255 }),
|
||||
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
userIdIdx: index("audit_logs_user_id_idx").on(table.userId),
|
||||
actionIdx: index("audit_logs_action_idx").on(table.action),
|
||||
entityIdx: index("audit_logs_entity_idx").on(
|
||||
table.entityType,
|
||||
table.entityId,
|
||||
),
|
||||
createdAtIdx: index("audit_logs_created_at_idx").on(table.createdAt),
|
||||
}),
|
||||
);
|
||||
|
||||
export type AuditLogInDb = typeof auditLogs.$inferSelect;
|
||||
export type NewAuditLogInDb = typeof auditLogs.$inferInsert;
|
||||
24
backend/src/database/schemas/categories.ts
Normal file
24
backend/src/database/schemas/categories.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { index, pgTable, timestamp, uuid, varchar } from "drizzle-orm/pg-core";
|
||||
|
||||
export const categories = pgTable(
|
||||
"categories",
|
||||
{
|
||||
id: uuid("id").primaryKey().defaultRandom(),
|
||||
name: varchar("name", { length: 64 }).notNull().unique(),
|
||||
slug: varchar("slug", { length: 64 }).notNull().unique(),
|
||||
description: varchar("description", { length: 255 }),
|
||||
iconUrl: varchar("icon_url", { length: 512 }),
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
slugIdx: index("categories_slug_idx").on(table.slug),
|
||||
}),
|
||||
);
|
||||
|
||||
export type CategoryInDb = typeof categories.$inferSelect;
|
||||
export type NewCategoryInDb = typeof categories.$inferInsert;
|
||||
66
backend/src/database/schemas/content.ts
Normal file
66
backend/src/database/schemas/content.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import {
|
||||
index,
|
||||
integer,
|
||||
pgEnum,
|
||||
pgTable,
|
||||
primaryKey,
|
||||
timestamp,
|
||||
uuid,
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { categories } from "./categories";
|
||||
import { tags } from "./tags";
|
||||
import { users } from "./users";
|
||||
|
||||
export const contentType = pgEnum("content_type", ["meme", "gif"]);
|
||||
|
||||
export const contents = pgTable(
|
||||
"contents",
|
||||
{
|
||||
id: uuid("id").primaryKey().defaultRandom(),
|
||||
userId: uuid("user_id")
|
||||
.notNull()
|
||||
.references(() => users.uuid, { onDelete: "cascade" }),
|
||||
type: contentType("type").notNull(),
|
||||
categoryId: uuid("category_id").references(() => categories.id, {
|
||||
onDelete: "set null",
|
||||
}),
|
||||
title: varchar("title", { length: 255 }).notNull(),
|
||||
slug: varchar("slug", { length: 255 }).notNull().unique(),
|
||||
storageKey: varchar("storage_key", { length: 512 }).notNull().unique(), // Clé interne S3
|
||||
mimeType: varchar("mime_type", { length: 128 }).notNull(), // Pour le Content-Type HTTP
|
||||
fileSize: integer("file_size").notNull(), // Taille en octets
|
||||
views: integer("views").notNull().default(0),
|
||||
usageCount: integer("usage_count").notNull().default(0),
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
deletedAt: timestamp("deleted_at", { withTimezone: true }), // Soft delete
|
||||
},
|
||||
(table) => ({
|
||||
userIdIdx: index("contents_user_id_idx").on(table.userId),
|
||||
storageKeyIdx: index("contents_storage_key_idx").on(table.storageKey),
|
||||
deletedAtIdx: index("contents_deleted_at_idx").on(table.deletedAt),
|
||||
}),
|
||||
);
|
||||
|
||||
export const contentsToTags = pgTable(
|
||||
"contents_to_tags",
|
||||
{
|
||||
contentId: uuid("content_id")
|
||||
.notNull()
|
||||
.references(() => contents.id, { onDelete: "cascade" }),
|
||||
tagId: uuid("tag_id")
|
||||
.notNull()
|
||||
.references(() => tags.id, { onDelete: "cascade" }),
|
||||
},
|
||||
(t) => ({
|
||||
pk: primaryKey({ columns: [t.contentId, t.tagId] }),
|
||||
}),
|
||||
);
|
||||
|
||||
export type ContentInDb = typeof contents.$inferSelect;
|
||||
export type NewContentInDb = typeof contents.$inferInsert;
|
||||
24
backend/src/database/schemas/favorites.ts
Normal file
24
backend/src/database/schemas/favorites.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { pgTable, primaryKey, timestamp, uuid } from "drizzle-orm/pg-core";
|
||||
import { contents } from "./content";
|
||||
import { users } from "./users";
|
||||
|
||||
export const favorites = pgTable(
|
||||
"favorites",
|
||||
{
|
||||
userId: uuid("user_id")
|
||||
.notNull()
|
||||
.references(() => users.uuid, { onDelete: "cascade" }),
|
||||
contentId: uuid("content_id")
|
||||
.notNull()
|
||||
.references(() => contents.id, { onDelete: "cascade" }),
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
},
|
||||
(t) => ({
|
||||
pk: primaryKey({ columns: [t.userId, t.contentId] }),
|
||||
}),
|
||||
);
|
||||
|
||||
export type FavoriteInDb = typeof favorites.$inferSelect;
|
||||
export type NewFavoriteInDb = typeof favorites.$inferInsert;
|
||||
11
backend/src/database/schemas/index.ts
Normal file
11
backend/src/database/schemas/index.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export * from "./api_keys";
|
||||
export * from "./audit_logs";
|
||||
export * from "./categories";
|
||||
export * from "./content";
|
||||
export * from "./favorites";
|
||||
export * from "./pgp";
|
||||
export * from "./rbac";
|
||||
export * from "./reports";
|
||||
export * from "./sessions";
|
||||
export * from "./tags";
|
||||
export * from "./users";
|
||||
63
backend/src/database/schemas/pgp.ts
Normal file
63
backend/src/database/schemas/pgp.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { SQL, sql } from "drizzle-orm";
|
||||
import { AnyPgColumn, customType } from "drizzle-orm/pg-core";
|
||||
|
||||
// Clé de chiffrement PGP récupérée depuis l'environnement
|
||||
const getPgpKey = () => process.env.PGP_ENCRYPTION_KEY || "default-pgp-key";
|
||||
|
||||
/**
|
||||
* Type personnalisé pour les données chiffrées PGP (stockées en bytea dans Postgres).
|
||||
* Le chiffrement est géré automatiquement à l'écriture (INSERT/UPDATE) via `toDriver`.
|
||||
*
|
||||
* **Pour que le déchiffrement soit automatique à la lecture (SELECT), il faut impérativement utiliser l'utilitaire
|
||||
* `withAutomaticPgpDecrypt` sur la colonne après la définition de la table. Attention à la consommation CPU et mémoire**
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* export const users = pgTable('users', {
|
||||
* email: pgpEncrypted('email').notNull(),
|
||||
* });
|
||||
*
|
||||
* // Activation du déchiffrement automatique
|
||||
* withAutomaticPgpDecrypt(users.email);
|
||||
* ```
|
||||
*/
|
||||
export const pgpEncrypted = customType<{ data: string; driverData: Buffer }>({
|
||||
dataType() {
|
||||
return "bytea";
|
||||
},
|
||||
toDriver(value: string): SQL {
|
||||
return sql`pgp_sym_encrypt(${value}, ${getPgpKey()})`;
|
||||
},
|
||||
fromDriver(value: Buffer | string): string {
|
||||
if (typeof value === "string") return value;
|
||||
return value.toString();
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Utilitaire pour injecter le déchiffrement automatique dans une colonne.
|
||||
* Modifie la méthode getSQL de la colonne pour inclure pgp_sym_decrypt.
|
||||
*/
|
||||
export function withAutomaticPgpDecrypt<T extends AnyPgColumn>(column: T): T {
|
||||
const originalGetSQL = column.getSQL.bind(column);
|
||||
column.getSQL = () =>
|
||||
sql`pgp_sym_decrypt(${originalGetSQL()}, ${getPgpKey()})`.mapWith(column);
|
||||
return column;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Utiliser directement les colonnes de type pgpEncrypted qui gèrent maintenant le chiffrement automatiquement.
|
||||
*/
|
||||
export function pgpSymEncrypt(value: string | SQL, key: string | SQL) {
|
||||
return sql`pgp_sym_encrypt(${value}, ${key})`;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Utiliser directement les colonnes de type pgpEncrypted qui gèrent maintenant le déchiffrement automatiquement.
|
||||
*/
|
||||
export function pgpSymDecrypt(
|
||||
column: AnyPgColumn,
|
||||
key: string | SQL,
|
||||
): SQL<string> {
|
||||
return sql`pgp_sym_decrypt(${column}, ${key})`.mapWith(column) as SQL<string>;
|
||||
}
|
||||
71
backend/src/database/schemas/rbac.ts
Normal file
71
backend/src/database/schemas/rbac.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import {
|
||||
index,
|
||||
pgTable,
|
||||
primaryKey,
|
||||
timestamp,
|
||||
uuid,
|
||||
varchar,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { users } from "./users";
|
||||
|
||||
export const roles = pgTable(
|
||||
"roles",
|
||||
{
|
||||
id: uuid("id").primaryKey().defaultRandom(),
|
||||
name: varchar("name", { length: 64 }).notNull().unique(),
|
||||
slug: varchar("slug", { length: 64 }).notNull().unique(),
|
||||
description: varchar("description", { length: 128 }),
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
slugIdx: index("roles_slug_idx").on(table.slug),
|
||||
}),
|
||||
);
|
||||
|
||||
export const permissions = pgTable(
|
||||
"permissions",
|
||||
{
|
||||
id: uuid("id").primaryKey().defaultRandom(),
|
||||
name: varchar("name", { length: 64 }).notNull().unique(),
|
||||
slug: varchar("slug", { length: 64 }).notNull().unique(),
|
||||
description: varchar("description", { length: 128 }),
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
slugIdx: index("permissions_slug_idx").on(table.slug),
|
||||
}),
|
||||
);
|
||||
|
||||
export const rolesToPermissions = pgTable(
|
||||
"roles_to_permissions",
|
||||
{
|
||||
roleId: uuid("role_id")
|
||||
.notNull()
|
||||
.references(() => roles.id, { onDelete: "cascade" }),
|
||||
permissionId: uuid("permission_id")
|
||||
.notNull()
|
||||
.references(() => permissions.id, { onDelete: "cascade" }),
|
||||
},
|
||||
(t) => ({
|
||||
pk: primaryKey({ columns: [t.roleId, t.permissionId] }),
|
||||
}),
|
||||
);
|
||||
|
||||
export const usersToRoles = pgTable(
|
||||
"users_to_roles",
|
||||
{
|
||||
userId: uuid("user_id")
|
||||
.notNull()
|
||||
.references(() => users.uuid, { onDelete: "cascade" }),
|
||||
roleId: uuid("role_id")
|
||||
.notNull()
|
||||
.references(() => roles.id, { onDelete: "cascade" }),
|
||||
},
|
||||
(t) => ({
|
||||
pk: primaryKey({ columns: [t.userId, t.roleId] }),
|
||||
}),
|
||||
);
|
||||
62
backend/src/database/schemas/reports.ts
Normal file
62
backend/src/database/schemas/reports.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import {
|
||||
index,
|
||||
pgEnum,
|
||||
pgTable,
|
||||
text,
|
||||
timestamp,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { contents } from "./content";
|
||||
import { tags } from "./tags";
|
||||
import { users } from "./users";
|
||||
|
||||
export const reportStatus = pgEnum("report_status", [
|
||||
"pending",
|
||||
"reviewed",
|
||||
"resolved",
|
||||
"dismissed",
|
||||
]);
|
||||
export const reportReason = pgEnum("report_reason", [
|
||||
"inappropriate",
|
||||
"spam",
|
||||
"copyright",
|
||||
"other",
|
||||
]);
|
||||
|
||||
export const reports = pgTable(
|
||||
"reports",
|
||||
{
|
||||
id: uuid("id").primaryKey().defaultRandom(),
|
||||
reporterId: uuid("reporter_id")
|
||||
.notNull()
|
||||
.references(() => users.uuid, { onDelete: "cascade" }),
|
||||
|
||||
// Le signalement peut porter sur un contenu OU un tag
|
||||
contentId: uuid("content_id").references(() => contents.id, {
|
||||
onDelete: "cascade",
|
||||
}),
|
||||
tagId: uuid("tag_id").references(() => tags.id, { onDelete: "cascade" }),
|
||||
|
||||
reason: reportReason("reason").notNull(),
|
||||
description: text("description"),
|
||||
status: reportStatus("status").default("pending").notNull(),
|
||||
|
||||
expiresAt: timestamp("expires_at", { withTimezone: true }), // Pour purge automatique RGPD
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
reporterIdx: index("reports_reporter_id_idx").on(table.reporterId),
|
||||
contentIdx: index("reports_content_id_idx").on(table.contentId),
|
||||
tagIdx: index("reports_tag_id_idx").on(table.tagId),
|
||||
statusIdx: index("reports_status_idx").on(table.status),
|
||||
expiresAtIdx: index("reports_expires_at_idx").on(table.expiresAt),
|
||||
}),
|
||||
);
|
||||
|
||||
export type ReportInDb = typeof reports.$inferSelect;
|
||||
export type NewReportInDb = typeof reports.$inferInsert;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user