Compare commits
103 Commits
9c45bf11e4
...
v0.1.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9699127739
|
||
|
|
938d8bde7b
|
||
|
|
65c7096f46
|
||
|
|
57c00ad4d1
|
||
|
|
39618f7708
|
||
|
|
e84e4a5a9d
|
||
|
|
e74973a9d0
|
||
|
|
9233c1bf89
|
||
|
|
88c7f45a2c
|
||
|
|
9af72156f5
|
||
|
|
597a4d615e
|
||
|
|
2df45af305
|
||
|
|
863a4bf528
|
||
|
|
9a1cdb05a4
|
||
|
|
28caf92f9a
|
||
|
|
8b2728dc5a
|
||
|
|
3bbbbc307f
|
||
|
|
f080919563
|
||
|
|
edc1ab2438
|
||
|
|
01b66d6f2f
|
||
|
|
9a70dd02bb
|
||
|
|
e285a4e634
|
||
|
|
f247a01ac7
|
||
|
|
bb640cd8f9
|
||
|
|
c1118e9f25
|
||
|
|
eae1f84b92
|
||
|
|
8d27532dc0
|
||
|
|
f79507730e
|
||
|
|
7048c2731e
|
||
|
|
d74fd15036
|
||
|
|
86a697c392
|
||
|
|
38adbb6e77
|
||
| 594a387712 | |||
|
|
4ca15b578d
|
||
| 2912231769 | |||
|
|
db17994bb5
|
||
|
|
f57e028178
|
||
|
|
e84aa8a8db
|
||
|
|
c6b23de481
|
||
|
|
0611ef715c
|
||
|
|
0a1391674f
|
||
|
|
2fedaca502
|
||
|
|
a6837ff7fb
|
||
|
|
74b61004e7
|
||
|
|
760343da76
|
||
|
|
14f8b8b63d
|
||
|
|
50a186da1d
|
||
|
|
3908989b39
|
||
|
|
02d70f27ea
|
||
|
|
65f8860cc0
|
||
|
|
0e9edd4bfc
|
||
|
|
6ce58d1639
|
||
|
|
47d6fcb6a0
|
||
|
|
d7c2a965a0
|
||
|
|
fb7ddde42e
|
||
|
|
026aebaee3
|
||
|
|
a30113e8e2
|
||
| f10c444957 | |||
|
|
975e29dea1
|
||
|
|
a4ce48a91c
|
||
|
|
ff6fc1c6b3
|
||
|
|
5671ba60a6
|
||
|
|
5f2672021e
|
||
| 17c2cea366 | |||
| 5665fcd98f | |||
| cb6d87eafd | |||
| 48ebc7dc36 | |||
| dbfd14b57a | |||
| 570576435c | |||
| 7c3f4050c5 | |||
| c19d86a0cb | |||
| 6d2e1ead05 | |||
| 6756cf6bc7 | |||
| 6aaf53c90b | |||
| ccec39bfa0 | |||
| a06fdbf21e | |||
| de537e5947 | |||
|
|
0cb361afb8
|
||
| 9097a3e9b5 | |||
|
|
24eb99093c
|
||
|
|
75ac95cadb
|
||
|
|
35abd0496e
|
||
|
|
03e5915fcc
|
||
|
|
77ac960411
|
||
|
|
8425ffe4fc
|
||
|
|
b81835661c
|
||
|
|
fbc231dc9a
|
||
|
|
37a23390d5
|
||
|
|
bd9dd140ab
|
||
|
|
5b6e0143b6
|
||
|
|
214bf077e5
|
||
|
|
bb9ae058db
|
||
|
|
0b07320974
|
||
|
|
0c045e8d3c
|
||
|
|
8ffeaeba05
|
||
|
|
9e37272bff
|
||
|
|
7cb5ff487d
|
||
|
|
0cef694f2b
|
||
|
|
5c4badb837
|
||
|
|
b53c51b825
|
||
|
|
76de69fc64
|
||
|
|
ec8eb8d43a
|
||
|
|
514bd354bf
|
42
.env.example
42
.env.example
@@ -8,32 +8,40 @@ BACKEND_PORT=3001
|
|||||||
FRONTEND_PORT=3000
|
FRONTEND_PORT=3000
|
||||||
|
|
||||||
# Database (PostgreSQL)
|
# Database (PostgreSQL)
|
||||||
POSTGRES_HOST=localhost
|
POSTGRES_HOST=db
|
||||||
POSTGRES_PORT=5432
|
POSTGRES_PORT=5432
|
||||||
POSTGRES_DB=memegoat
|
POSTGRES_DB=app
|
||||||
POSTGRES_USER=app
|
POSTGRES_USER=app
|
||||||
POSTGRES_PASSWORD=app
|
POSTGRES_PASSWORD=app
|
||||||
|
|
||||||
# Storage (S3/MinIO) - À configurer lors de l'implémentation
|
# Redis
|
||||||
# S3_ENDPOINT=localhost
|
REDIS_HOST=redis
|
||||||
# S3_PORT=9000
|
REDIS_PORT=6379
|
||||||
# S3_ACCESS_KEY=
|
|
||||||
# S3_SECRET_KEY=
|
|
||||||
# S3_BUCKET=memegoat
|
|
||||||
|
|
||||||
# Security (PGP & Auth) - À configurer lors de l'implémentation
|
# Storage (S3/MinIO)
|
||||||
# PGP_PASSPHRASE=
|
S3_ENDPOINT=s3
|
||||||
JWT_SECRET=super-secret-key-change-me-in-production
|
S3_PORT=9000
|
||||||
ENCRYPTION_KEY=another-super-secret-key-32-chars
|
S3_ACCESS_KEY=minioadmin
|
||||||
|
S3_SECRET_KEY=minioadmin
|
||||||
|
S3_BUCKET_NAME=memegoat
|
||||||
|
|
||||||
|
# Security
|
||||||
|
JWT_SECRET=super-secret-jwt-key-change-me-in-prod
|
||||||
|
ENCRYPTION_KEY=01234567890123456789012345678901
|
||||||
|
PGP_ENCRYPTION_KEY=super-secret-pgp-key
|
||||||
|
SESSION_PASSWORD=super-secret-session-password-32-chars
|
||||||
|
|
||||||
# Mail
|
# Mail
|
||||||
MAIL_HOST=localhost
|
MAIL_HOST=mail
|
||||||
MAIL_PORT=1025
|
MAIL_PORT=1025
|
||||||
MAIL_SECURE=false
|
MAIL_SECURE=false
|
||||||
MAIL_USER=user
|
MAIL_USER=
|
||||||
MAIL_PASS=password
|
MAIL_PASS=
|
||||||
MAIL_FROM=noreply@memegoat.fr
|
MAIL_FROM=noreply@memegoat.local
|
||||||
DOMAIN_NAME=memegoat.fr
|
DOMAIN_NAME=localhost
|
||||||
|
|
||||||
|
ENABLE_CORS=false
|
||||||
|
CORS_DOMAIN_NAME=localhost
|
||||||
|
|
||||||
# Media Limits (in KB)
|
# Media Limits (in KB)
|
||||||
MAX_IMAGE_SIZE_KB=512
|
MAX_IMAGE_SIZE_KB=512
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
name: Backend Tests
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
paths:
|
|
||||||
- 'backend/**'
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- 'backend/**'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: pnpm/action-setup@v4
|
|
||||||
with:
|
|
||||||
version: 9
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 22
|
|
||||||
cache: 'pnpm'
|
|
||||||
- name: Install dependencies
|
|
||||||
run: pnpm install
|
|
||||||
- name: Run Backend Tests
|
|
||||||
run: pnpm -F @memegoat/backend test
|
|
||||||
111
.gitea/workflows/ci.yml
Normal file
111
.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
# Pipeline CI/CD pour Gitea Actions (Forgejo)
|
||||||
|
# Compatible avec GitHub Actions pour la portabilité
|
||||||
|
name: CI/CD Pipeline
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- '**'
|
||||||
|
tags:
|
||||||
|
- 'v*'
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate:
|
||||||
|
name: Valider ${{ matrix.component }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
component: [backend, frontend, documentation]
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Installer pnpm
|
||||||
|
uses: pnpm/action-setup@v4
|
||||||
|
with:
|
||||||
|
version: 9
|
||||||
|
|
||||||
|
- name: Configurer Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- name: Obtenir le chemin du store pnpm
|
||||||
|
id: pnpm-cache
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "STORE_PATH=$(pnpm store path --silent)" >> "${GITEA_OUTPUT:-$GITHUB_OUTPUT}"
|
||||||
|
|
||||||
|
- name: Configurer le cache pnpm
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
|
||||||
|
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pnpm-store-
|
||||||
|
|
||||||
|
- name: Installer les dépendances
|
||||||
|
run: pnpm install --frozen-lockfile --prefer-offline
|
||||||
|
|
||||||
|
- name: Lint ${{ matrix.component }}
|
||||||
|
run: pnpm -F @memegoat/${{ matrix.component }} lint
|
||||||
|
|
||||||
|
- name: Tester ${{ matrix.component }}
|
||||||
|
if: matrix.component == 'backend' || matrix.component == 'frontend'
|
||||||
|
run: |
|
||||||
|
if pnpm -F @memegoat/${{ matrix.component }} run | grep -q "test"; then
|
||||||
|
pnpm -F @memegoat/${{ matrix.component }} test
|
||||||
|
else
|
||||||
|
echo "Pas de script de test trouvé pour ${{ matrix.component }}, passage."
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Build ${{ matrix.component }}
|
||||||
|
run: pnpm -F @memegoat/${{ matrix.component }} build
|
||||||
|
env:
|
||||||
|
NEXT_PUBLIC_API_URL: ${{ secrets.NEXT_PUBLIC_API_URL }}
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
name: Déploiement en Production
|
||||||
|
needs: validate
|
||||||
|
# Déclenchement uniquement sur push sur main ou tag de version
|
||||||
|
# Gitea supporte le contexte 'github' pour la compatibilité
|
||||||
|
if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v'))
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Vérifier l'environnement Docker
|
||||||
|
run: |
|
||||||
|
docker version
|
||||||
|
docker compose version
|
||||||
|
|
||||||
|
- name: Déployer avec Docker Compose
|
||||||
|
run: |
|
||||||
|
docker compose -f docker-compose.prod.yml up -d --build
|
||||||
|
env:
|
||||||
|
BACKEND_PORT: ${{ secrets.BACKEND_PORT }}
|
||||||
|
FRONTEND_PORT: ${{ secrets.FRONTEND_PORT }}
|
||||||
|
POSTGRES_HOST: ${{ secrets.POSTGRES_HOST }}
|
||||||
|
POSTGRES_PORT: ${{ secrets.POSTGRES_PORT }}
|
||||||
|
POSTGRES_USER: ${{ secrets.POSTGRES_USER }}
|
||||||
|
POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
|
||||||
|
POSTGRES_DB: ${{ secrets.POSTGRES_DB }}
|
||||||
|
REDIS_HOST: ${{ secrets.REDIS_HOST }}
|
||||||
|
REDIS_PORT: ${{ secrets.REDIS_PORT }}
|
||||||
|
S3_ENDPOINT: ${{ secrets.S3_ENDPOINT }}
|
||||||
|
S3_PORT: ${{ secrets.S3_PORT }}
|
||||||
|
S3_ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }}
|
||||||
|
S3_SECRET_KEY: ${{ secrets.S3_SECRET_KEY }}
|
||||||
|
S3_BUCKET_NAME: ${{ secrets.S3_BUCKET_NAME }}
|
||||||
|
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||||
|
ENCRYPTION_KEY: ${{ secrets.ENCRYPTION_KEY }}
|
||||||
|
PGP_ENCRYPTION_KEY: ${{ secrets.PGP_ENCRYPTION_KEY }}
|
||||||
|
SESSION_PASSWORD: ${{ secrets.SESSION_PASSWORD }}
|
||||||
|
MAIL_HOST: ${{ secrets.MAIL_HOST }}
|
||||||
|
MAIL_PASS: ${{ secrets.MAIL_PASS }}
|
||||||
|
MAIL_USER: ${{ secrets.MAIL_USER }}
|
||||||
|
MAIL_FROM: ${{ secrets.MAIL_FROM }}
|
||||||
|
DOMAIN_NAME: ${{ secrets.DOMAIN_NAME }}
|
||||||
|
NEXT_PUBLIC_API_URL: ${{ secrets.NEXT_PUBLIC_API_URL }}
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
name: Lint
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
paths:
|
|
||||||
- 'frontend/**'
|
|
||||||
- 'backend/**'
|
|
||||||
- 'documentation/**'
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- 'frontend/**'
|
|
||||||
- 'backend/**'
|
|
||||||
- 'documentation/**'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
lint:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: pnpm/action-setup@v4
|
|
||||||
with:
|
|
||||||
version: 9
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 22
|
|
||||||
cache: 'pnpm'
|
|
||||||
- name: Install dependencies
|
|
||||||
run: pnpm install
|
|
||||||
- name: Lint Frontend
|
|
||||||
if: success() || failure()
|
|
||||||
run: pnpm -F @memegoat/frontend lint
|
|
||||||
- name: Lint Backend
|
|
||||||
if: success() || failure()
|
|
||||||
run: pnpm -F @memegoat/backend lint
|
|
||||||
- name: Lint Documentation
|
|
||||||
if: success() || failure()
|
|
||||||
run: pnpm -F @bypass/documentation lint
|
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,6 +1,7 @@
|
|||||||
# Dependencies
|
# Dependencies
|
||||||
node_modules/
|
node_modules/
|
||||||
jspm_packages/
|
jspm_packages/
|
||||||
|
.pnpm-store
|
||||||
|
|
||||||
# Environment variables
|
# Environment variables
|
||||||
.env
|
.env
|
||||||
|
|||||||
50
ROADMAP.md
Normal file
50
ROADMAP.md
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# 🐐 Memegoat - Roadmap & Critères de Production
|
||||||
|
|
||||||
|
Ce document définit les objectifs, les critères techniques et les fonctionnalités à atteindre pour que le projet Memegoat soit considéré comme prêt pour la production et conforme aux normes européennes (RGPD) et françaises.
|
||||||
|
|
||||||
|
## 1. 🏗️ Architecture & Infrastructure
|
||||||
|
- [x] Backend NestJS (TypeScript)
|
||||||
|
- [x] Base de données PostgreSQL avec Drizzle ORM
|
||||||
|
- [x] Stockage d'objets compatible S3 (MinIO)
|
||||||
|
- [x] Service d'Emailing (Nodemailer / SMTPS)
|
||||||
|
- [x] Documentation Technique & Référence API (`docs.memegoat.fr`)
|
||||||
|
- [x] Health Checks (`/health`)
|
||||||
|
- [x] Gestion des variables d'environnement (Validation avec Zod)
|
||||||
|
- [ ] CI/CD (Build, Lint, Test, Deploy)
|
||||||
|
|
||||||
|
## 2. 🔐 Sécurité & Authentification
|
||||||
|
- [x] Hachage des mots de passe (Argon2id)
|
||||||
|
- [x] Gestion des sessions robuste (JWT avec Refresh Token et Rotation)
|
||||||
|
- [x] RBAC (Role Based Access Control) fonctionnel
|
||||||
|
- [x] Système de Clés API (Hachées en base)
|
||||||
|
- [x] Double Authentification (2FA / TOTP)
|
||||||
|
- [x] Limitation de débit (Rate Limiting / Throttler)
|
||||||
|
- [x] Validation stricte des entrées (DTOs + ValidationPipe)
|
||||||
|
- [x] Protection contre les vulnérabilités OWASP (Helmet, CORS)
|
||||||
|
|
||||||
|
## 3. ⚖️ Conformité RGPD (EU & France)
|
||||||
|
- [x] Chiffrement natif des données personnelles (PII) via PGP (pgcrypto)
|
||||||
|
- [x] Hachage aveugle (Blind Indexing) pour l'email (recherche/unicité)
|
||||||
|
- [x] Journalisation d'audit complète (Audit Logs) pour les actions sensibles
|
||||||
|
- [x] Gestion du consentement (Versionnage CGU/Politique de Confidentialité)
|
||||||
|
- [x] Droit à l'effacement : Flux de suppression (Soft Delete -> Purge définitive)
|
||||||
|
- [x] Droit à la portabilité : Export des données utilisateur (JSON)
|
||||||
|
- [x] Purge automatique des données obsolètes (Signalements, Sessions expirées)
|
||||||
|
- [x] Anonymisation des adresses IP (Hachage) dans les logs
|
||||||
|
|
||||||
|
## 4. 🖼️ Fonctionnalités Coeur (Media & Galerie)
|
||||||
|
- [x] Exploration (Trends, Recent, Favoris)
|
||||||
|
- [x] Recherche par Tags, Catégories, Auteur, Texte
|
||||||
|
- [x] Gestion des Favoris
|
||||||
|
- [x] Upload sécurisé via S3 (URLs présignées)
|
||||||
|
- [x] Scan Antivirus (ClamAV) et traitement des médias (WebP, WebM, AVIF, AV1)
|
||||||
|
- [x] Limitation de la taille et des formats de fichiers entrants (Configurable)
|
||||||
|
- [x] Système de Signalement (Reports) et workflow de modération
|
||||||
|
- [ ] SEO : Metatags dynamiques et slugs sémantiques
|
||||||
|
|
||||||
|
## 5. ✅ Qualité & Robustesse
|
||||||
|
- [ ] Couverture de tests unitaires (Jest) > 80%
|
||||||
|
- [ ] Tests d'intégration et E2E
|
||||||
|
- [x] Gestion centralisée des erreurs (Filters NestJS)
|
||||||
|
- [ ] Monitoring et centralisation des logs (ex: Sentry, ELK/Loki)
|
||||||
|
- [ ] Performance : Cache (Redis) pour les tendances et recherches fréquentes
|
||||||
177
backend/.migrations/0000_right_sally_floyd.sql
Normal file
177
backend/.migrations/0000_right_sally_floyd.sql
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
CREATE EXTENSION IF NOT EXISTS "pgcrypto";
|
||||||
|
CREATE TYPE "public"."user_status" AS ENUM('active', 'verification', 'suspended', 'pending', 'deleted');--> statement-breakpoint
|
||||||
|
CREATE TYPE "public"."content_type" AS ENUM('meme', 'gif');--> statement-breakpoint
|
||||||
|
CREATE TYPE "public"."report_reason" AS ENUM('inappropriate', 'spam', 'copyright', 'other');--> statement-breakpoint
|
||||||
|
CREATE TYPE "public"."report_status" AS ENUM('pending', 'reviewed', 'resolved', 'dismissed');--> statement-breakpoint
|
||||||
|
CREATE TABLE "users" (
|
||||||
|
"uuid" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||||
|
"status" "user_status" DEFAULT 'pending' NOT NULL,
|
||||||
|
"email" "bytea" NOT NULL,
|
||||||
|
"email_hash" varchar(64) NOT NULL,
|
||||||
|
"display_name" varchar(32),
|
||||||
|
"username" varchar(32) NOT NULL,
|
||||||
|
"password_hash" varchar(72) NOT NULL,
|
||||||
|
"two_factor_secret" "bytea",
|
||||||
|
"is_two_factor_enabled" boolean DEFAULT false NOT NULL,
|
||||||
|
"terms_version" varchar(16),
|
||||||
|
"privacy_version" varchar(16),
|
||||||
|
"gdpr_accepted_at" timestamp with time zone,
|
||||||
|
"last_login_at" timestamp with time zone,
|
||||||
|
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
"deleted_at" timestamp with time zone,
|
||||||
|
CONSTRAINT "users_email_hash_unique" UNIQUE("email_hash"),
|
||||||
|
CONSTRAINT "users_username_unique" UNIQUE("username")
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "permissions" (
|
||||||
|
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||||
|
"name" varchar(64) NOT NULL,
|
||||||
|
"slug" varchar(64) NOT NULL,
|
||||||
|
"description" varchar(128),
|
||||||
|
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
CONSTRAINT "permissions_name_unique" UNIQUE("name"),
|
||||||
|
CONSTRAINT "permissions_slug_unique" UNIQUE("slug")
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "roles" (
|
||||||
|
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||||
|
"name" varchar(64) NOT NULL,
|
||||||
|
"slug" varchar(64) NOT NULL,
|
||||||
|
"description" varchar(128),
|
||||||
|
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
CONSTRAINT "roles_name_unique" UNIQUE("name"),
|
||||||
|
CONSTRAINT "roles_slug_unique" UNIQUE("slug")
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "roles_to_permissions" (
|
||||||
|
"role_id" uuid NOT NULL,
|
||||||
|
"permission_id" uuid NOT NULL,
|
||||||
|
CONSTRAINT "roles_to_permissions_role_id_permission_id_pk" PRIMARY KEY("role_id","permission_id")
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "users_to_roles" (
|
||||||
|
"user_id" uuid NOT NULL,
|
||||||
|
"role_id" uuid NOT NULL,
|
||||||
|
CONSTRAINT "users_to_roles_user_id_role_id_pk" PRIMARY KEY("user_id","role_id")
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "sessions" (
|
||||||
|
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||||
|
"user_id" uuid NOT NULL,
|
||||||
|
"refresh_token" varchar(512) NOT NULL,
|
||||||
|
"user_agent" varchar(255),
|
||||||
|
"ip_hash" varchar(64),
|
||||||
|
"is_valid" boolean DEFAULT true NOT NULL,
|
||||||
|
"expires_at" timestamp with time zone NOT NULL,
|
||||||
|
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
CONSTRAINT "sessions_refresh_token_unique" UNIQUE("refresh_token")
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "api_keys" (
|
||||||
|
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||||
|
"user_id" uuid NOT NULL,
|
||||||
|
"key_hash" varchar(128) NOT NULL,
|
||||||
|
"name" varchar(128) NOT NULL,
|
||||||
|
"prefix" varchar(8) NOT NULL,
|
||||||
|
"is_active" boolean DEFAULT true NOT NULL,
|
||||||
|
"last_used_at" timestamp with time zone,
|
||||||
|
"expires_at" timestamp with time zone,
|
||||||
|
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
CONSTRAINT "api_keys_key_hash_unique" UNIQUE("key_hash")
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "tags" (
|
||||||
|
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||||
|
"name" varchar(64) NOT NULL,
|
||||||
|
"slug" varchar(64) NOT NULL,
|
||||||
|
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
CONSTRAINT "tags_name_unique" UNIQUE("name"),
|
||||||
|
CONSTRAINT "tags_slug_unique" UNIQUE("slug")
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "contents" (
|
||||||
|
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||||
|
"user_id" uuid NOT NULL,
|
||||||
|
"type" "content_type" NOT NULL,
|
||||||
|
"title" varchar(255) NOT NULL,
|
||||||
|
"storage_key" varchar(512) NOT NULL,
|
||||||
|
"mime_type" varchar(128) NOT NULL,
|
||||||
|
"file_size" integer NOT NULL,
|
||||||
|
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
"deleted_at" timestamp with time zone,
|
||||||
|
CONSTRAINT "contents_storage_key_unique" UNIQUE("storage_key")
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "contents_to_tags" (
|
||||||
|
"content_id" uuid NOT NULL,
|
||||||
|
"tag_id" uuid NOT NULL,
|
||||||
|
CONSTRAINT "contents_to_tags_content_id_tag_id_pk" PRIMARY KEY("content_id","tag_id")
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "reports" (
|
||||||
|
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||||
|
"reporter_id" uuid NOT NULL,
|
||||||
|
"content_id" uuid,
|
||||||
|
"tag_id" uuid,
|
||||||
|
"reason" "report_reason" NOT NULL,
|
||||||
|
"description" text,
|
||||||
|
"status" "report_status" DEFAULT 'pending' NOT NULL,
|
||||||
|
"expires_at" timestamp with time zone,
|
||||||
|
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
"updated_at" timestamp with time zone DEFAULT now() NOT NULL
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "audit_logs" (
|
||||||
|
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||||
|
"user_id" uuid,
|
||||||
|
"action" varchar(64) NOT NULL,
|
||||||
|
"entity_type" varchar(64) NOT NULL,
|
||||||
|
"entity_id" uuid,
|
||||||
|
"details" jsonb,
|
||||||
|
"ip_hash" varchar(64),
|
||||||
|
"user_agent" varchar(255),
|
||||||
|
"created_at" timestamp with time zone DEFAULT now() NOT NULL
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
ALTER TABLE "roles_to_permissions" ADD CONSTRAINT "roles_to_permissions_role_id_roles_id_fk" FOREIGN KEY ("role_id") REFERENCES "public"."roles"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "roles_to_permissions" ADD CONSTRAINT "roles_to_permissions_permission_id_permissions_id_fk" FOREIGN KEY ("permission_id") REFERENCES "public"."permissions"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "users_to_roles" ADD CONSTRAINT "users_to_roles_user_id_users_uuid_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "users_to_roles" ADD CONSTRAINT "users_to_roles_role_id_roles_id_fk" FOREIGN KEY ("role_id") REFERENCES "public"."roles"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "sessions" ADD CONSTRAINT "sessions_user_id_users_uuid_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "api_keys" ADD CONSTRAINT "api_keys_user_id_users_uuid_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "contents" ADD CONSTRAINT "contents_user_id_users_uuid_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "contents_to_tags" ADD CONSTRAINT "contents_to_tags_content_id_contents_id_fk" FOREIGN KEY ("content_id") REFERENCES "public"."contents"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "contents_to_tags" ADD CONSTRAINT "contents_to_tags_tag_id_tags_id_fk" FOREIGN KEY ("tag_id") REFERENCES "public"."tags"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "reports" ADD CONSTRAINT "reports_reporter_id_users_uuid_fk" FOREIGN KEY ("reporter_id") REFERENCES "public"."users"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "reports" ADD CONSTRAINT "reports_content_id_contents_id_fk" FOREIGN KEY ("content_id") REFERENCES "public"."contents"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "reports" ADD CONSTRAINT "reports_tag_id_tags_id_fk" FOREIGN KEY ("tag_id") REFERENCES "public"."tags"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "audit_logs" ADD CONSTRAINT "audit_logs_user_id_users_uuid_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("uuid") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||||
|
CREATE INDEX "users_uuid_idx" ON "users" USING btree ("uuid");--> statement-breakpoint
|
||||||
|
CREATE INDEX "users_email_hash_idx" ON "users" USING btree ("email_hash");--> statement-breakpoint
|
||||||
|
CREATE INDEX "users_username_idx" ON "users" USING btree ("username");--> statement-breakpoint
|
||||||
|
CREATE INDEX "users_status_idx" ON "users" USING btree ("status");--> statement-breakpoint
|
||||||
|
CREATE INDEX "permissions_slug_idx" ON "permissions" USING btree ("slug");--> statement-breakpoint
|
||||||
|
CREATE INDEX "roles_slug_idx" ON "roles" USING btree ("slug");--> statement-breakpoint
|
||||||
|
CREATE INDEX "sessions_user_id_idx" ON "sessions" USING btree ("user_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "sessions_refresh_token_idx" ON "sessions" USING btree ("refresh_token");--> statement-breakpoint
|
||||||
|
CREATE INDEX "sessions_expires_at_idx" ON "sessions" USING btree ("expires_at");--> statement-breakpoint
|
||||||
|
CREATE INDEX "api_keys_user_id_idx" ON "api_keys" USING btree ("user_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "api_keys_key_hash_idx" ON "api_keys" USING btree ("key_hash");--> statement-breakpoint
|
||||||
|
CREATE INDEX "tags_slug_idx" ON "tags" USING btree ("slug");--> statement-breakpoint
|
||||||
|
CREATE INDEX "contents_user_id_idx" ON "contents" USING btree ("user_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "contents_storage_key_idx" ON "contents" USING btree ("storage_key");--> statement-breakpoint
|
||||||
|
CREATE INDEX "contents_deleted_at_idx" ON "contents" USING btree ("deleted_at");--> statement-breakpoint
|
||||||
|
CREATE INDEX "reports_reporter_id_idx" ON "reports" USING btree ("reporter_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "reports_content_id_idx" ON "reports" USING btree ("content_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "reports_tag_id_idx" ON "reports" USING btree ("tag_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "reports_status_idx" ON "reports" USING btree ("status");--> statement-breakpoint
|
||||||
|
CREATE INDEX "reports_expires_at_idx" ON "reports" USING btree ("expires_at");--> statement-breakpoint
|
||||||
|
CREATE INDEX "audit_logs_user_id_idx" ON "audit_logs" USING btree ("user_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "audit_logs_action_idx" ON "audit_logs" USING btree ("action");--> statement-breakpoint
|
||||||
|
CREATE INDEX "audit_logs_entity_idx" ON "audit_logs" USING btree ("entity_type","entity_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "audit_logs_created_at_idx" ON "audit_logs" USING btree ("created_at");
|
||||||
30
backend/.migrations/0001_purple_goliath.sql
Normal file
30
backend/.migrations/0001_purple_goliath.sql
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
CREATE TABLE "categories" (
|
||||||
|
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||||
|
"name" varchar(64) NOT NULL,
|
||||||
|
"slug" varchar(64) NOT NULL,
|
||||||
|
"description" varchar(255),
|
||||||
|
"icon_url" varchar(512),
|
||||||
|
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
CONSTRAINT "categories_name_unique" UNIQUE("name"),
|
||||||
|
CONSTRAINT "categories_slug_unique" UNIQUE("slug")
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "favorites" (
|
||||||
|
"user_id" uuid NOT NULL,
|
||||||
|
"content_id" uuid NOT NULL,
|
||||||
|
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||||
|
CONSTRAINT "favorites_user_id_content_id_pk" PRIMARY KEY("user_id","content_id")
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
ALTER TABLE "tags" ADD COLUMN "user_id" uuid;--> statement-breakpoint
|
||||||
|
ALTER TABLE "contents" ADD COLUMN "category_id" uuid;--> statement-breakpoint
|
||||||
|
ALTER TABLE "contents" ADD COLUMN "slug" varchar(255) NOT NULL;--> statement-breakpoint
|
||||||
|
ALTER TABLE "contents" ADD COLUMN "views" integer DEFAULT 0 NOT NULL;--> statement-breakpoint
|
||||||
|
ALTER TABLE "contents" ADD COLUMN "usage_count" integer DEFAULT 0 NOT NULL;--> statement-breakpoint
|
||||||
|
ALTER TABLE "favorites" ADD CONSTRAINT "favorites_user_id_users_uuid_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "favorites" ADD CONSTRAINT "favorites_content_id_contents_id_fk" FOREIGN KEY ("content_id") REFERENCES "public"."contents"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
CREATE INDEX "categories_slug_idx" ON "categories" USING btree ("slug");--> statement-breakpoint
|
||||||
|
ALTER TABLE "tags" ADD CONSTRAINT "tags_user_id_users_uuid_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("uuid") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "contents" ADD CONSTRAINT "contents_category_id_categories_id_fk" FOREIGN KEY ("category_id") REFERENCES "public"."categories"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "contents" ADD CONSTRAINT "contents_slug_unique" UNIQUE("slug");
|
||||||
1
backend/.migrations/0002_redundant_skin.sql
Normal file
1
backend/.migrations/0002_redundant_skin.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE "users" ADD COLUMN "avatar_url" varchar(255);
|
||||||
2
backend/.migrations/0003_colossal_fantastic_four.sql
Normal file
2
backend/.migrations/0003_colossal_fantastic_four.sql
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE "users" ALTER COLUMN "password_hash" SET DATA TYPE varchar(255);--> statement-breakpoint
|
||||||
|
ALTER TABLE "users" DROP COLUMN "avatar_url";
|
||||||
1
backend/.migrations/0004_cheerful_dakota_north.sql
Normal file
1
backend/.migrations/0004_cheerful_dakota_north.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE "users" ALTER COLUMN "password_hash" SET DATA TYPE varchar(95);
|
||||||
1
backend/.migrations/0005_perpetual_silverclaw.sql
Normal file
1
backend/.migrations/0005_perpetual_silverclaw.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE "users" ALTER COLUMN "password_hash" SET DATA TYPE varchar(100);
|
||||||
2
backend/.migrations/0006_friendly_adam_warlock.sql
Normal file
2
backend/.migrations/0006_friendly_adam_warlock.sql
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE "users" ADD COLUMN "avatar_url" varchar(512);--> statement-breakpoint
|
||||||
|
ALTER TABLE "users" ADD COLUMN "bio" varchar(255);
|
||||||
1316
backend/.migrations/meta/0000_snapshot.json
Normal file
1316
backend/.migrations/meta/0000_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1513
backend/.migrations/meta/0001_snapshot.json
Normal file
1513
backend/.migrations/meta/0001_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1519
backend/.migrations/meta/0002_snapshot.json
Normal file
1519
backend/.migrations/meta/0002_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1640
backend/.migrations/meta/0003_snapshot.json
Normal file
1640
backend/.migrations/meta/0003_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1640
backend/.migrations/meta/0004_snapshot.json
Normal file
1640
backend/.migrations/meta/0004_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1640
backend/.migrations/meta/0005_snapshot.json
Normal file
1640
backend/.migrations/meta/0005_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
1652
backend/.migrations/meta/0006_snapshot.json
Normal file
1652
backend/.migrations/meta/0006_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
55
backend/.migrations/meta/_journal.json
Normal file
55
backend/.migrations/meta/_journal.json
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
{
|
||||||
|
"version": "7",
|
||||||
|
"dialect": "postgresql",
|
||||||
|
"entries": [
|
||||||
|
{
|
||||||
|
"idx": 0,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1767618753676,
|
||||||
|
"tag": "0000_right_sally_floyd",
|
||||||
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 1,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1768392191169,
|
||||||
|
"tag": "0001_purple_goliath",
|
||||||
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 2,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1768393637823,
|
||||||
|
"tag": "0002_redundant_skin",
|
||||||
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 3,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1768415667895,
|
||||||
|
"tag": "0003_colossal_fantastic_four",
|
||||||
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 4,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1768417827439,
|
||||||
|
"tag": "0004_cheerful_dakota_north",
|
||||||
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 5,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1768420201679,
|
||||||
|
"tag": "0005_perpetual_silverclaw",
|
||||||
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 6,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1768423315172,
|
||||||
|
"tag": "0006_friendly_adam_warlock",
|
||||||
|
"breakpoints": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -1,17 +1,34 @@
|
|||||||
FROM node:22-slim AS base
|
# syntax=docker/dockerfile:1
|
||||||
|
FROM node:22-alpine AS base
|
||||||
ENV PNPM_HOME="/pnpm"
|
ENV PNPM_HOME="/pnpm"
|
||||||
ENV PATH="$PNPM_HOME:$PATH"
|
ENV PATH="$PNPM_HOME:$PATH"
|
||||||
RUN corepack enable
|
RUN corepack enable && corepack prepare pnpm@latest --activate
|
||||||
|
|
||||||
FROM base AS build
|
FROM base AS build
|
||||||
WORKDIR /usr/src/app
|
WORKDIR /usr/src/app
|
||||||
|
COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./
|
||||||
|
COPY backend/package.json ./backend/
|
||||||
|
COPY frontend/package.json ./frontend/
|
||||||
|
COPY documentation/package.json ./documentation/
|
||||||
|
|
||||||
|
# Utilisation du cache pour pnpm et installation figée
|
||||||
|
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
|
||||||
|
pnpm install --frozen-lockfile
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
|
|
||||||
|
# Deuxième passe avec cache pour les scripts/liens
|
||||||
|
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
|
||||||
|
pnpm install --frozen-lockfile
|
||||||
|
|
||||||
RUN pnpm run --filter @memegoat/backend build
|
RUN pnpm run --filter @memegoat/backend build
|
||||||
RUN pnpm deploy --filter=@memegoat/backend --prod /app
|
RUN pnpm deploy --filter=@memegoat/backend --prod --legacy /app
|
||||||
|
RUN cp -r backend/dist /app/dist
|
||||||
|
RUN cp -r backend/.migrations /app/.migrations
|
||||||
|
|
||||||
FROM base AS runtime
|
FROM base AS runtime
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY --from=build /app .
|
COPY --from=build /app .
|
||||||
EXPOSE 3000
|
EXPOSE 3000
|
||||||
CMD [ "node", "dist/main" ]
|
ENV NODE_ENV=production
|
||||||
|
CMD [ "node", "dist/src/main" ]
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
},
|
},
|
||||||
"files": {
|
"files": {
|
||||||
"ignoreUnknown": true,
|
"ignoreUnknown": true,
|
||||||
"includes": ["**", "!node_modules", "!dist", "!build"]
|
"includes": ["**", "!node_modules", "!dist", "!build", "!.migrations"]
|
||||||
},
|
},
|
||||||
"formatter": {
|
"formatter": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
|
|||||||
@@ -6,7 +6,9 @@
|
|||||||
"private": true,
|
"private": true,
|
||||||
"license": "UNLICENSED",
|
"license": "UNLICENSED",
|
||||||
"files": [
|
"files": [
|
||||||
"dist"
|
"dist",
|
||||||
|
".migrations",
|
||||||
|
"drizzle.config.ts"
|
||||||
],
|
],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "nest build",
|
"build": "nest build",
|
||||||
@@ -60,10 +62,22 @@
|
|||||||
"rxjs": "^7.8.1",
|
"rxjs": "^7.8.1",
|
||||||
"sharp": "^0.34.5",
|
"sharp": "^0.34.5",
|
||||||
"uuid": "^13.0.0",
|
"uuid": "^13.0.0",
|
||||||
"zod": "^4.3.5"
|
"zod": "^4.3.5",
|
||||||
|
"drizzle-kit": "^0.31.8"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@nestjs/cli": "^11.0.0",
|
"@nestjs/cli": "^11.0.0",
|
||||||
|
"globals": "^16.0.0",
|
||||||
|
"jest": "^30.0.0",
|
||||||
|
"source-map-support": "^0.5.21",
|
||||||
|
"supertest": "^7.0.0",
|
||||||
|
"ts-jest": "^29.2.5",
|
||||||
|
"ts-loader": "^9.5.2",
|
||||||
|
"ts-node": "^10.9.2",
|
||||||
|
"tsconfig-paths": "^4.2.0",
|
||||||
|
"tsx": "^4.21.0",
|
||||||
|
"typescript": "^5.7.3",
|
||||||
|
"typescript-eslint": "^8.20.0",
|
||||||
"@nestjs/schematics": "^11.0.0",
|
"@nestjs/schematics": "^11.0.0",
|
||||||
"@nestjs/testing": "^11.0.1",
|
"@nestjs/testing": "^11.0.1",
|
||||||
"@types/express": "^5.0.0",
|
"@types/express": "^5.0.0",
|
||||||
@@ -77,18 +91,7 @@
|
|||||||
"@types/sharp": "^0.32.0",
|
"@types/sharp": "^0.32.0",
|
||||||
"@types/supertest": "^6.0.2",
|
"@types/supertest": "^6.0.2",
|
||||||
"@types/uuid": "^11.0.0",
|
"@types/uuid": "^11.0.0",
|
||||||
"drizzle-kit": "^0.31.8",
|
"drizzle-kit": "^0.31.8"
|
||||||
"globals": "^16.0.0",
|
|
||||||
"jest": "^30.0.0",
|
|
||||||
"source-map-support": "^0.5.21",
|
|
||||||
"supertest": "^7.0.0",
|
|
||||||
"ts-jest": "^29.2.5",
|
|
||||||
"ts-loader": "^9.5.2",
|
|
||||||
"ts-node": "^10.9.2",
|
|
||||||
"tsconfig-paths": "^4.2.0",
|
|
||||||
"tsx": "^4.21.0",
|
|
||||||
"typescript": "^5.7.3",
|
|
||||||
"typescript-eslint": "^8.20.0"
|
|
||||||
},
|
},
|
||||||
"jest": {
|
"jest": {
|
||||||
"moduleFileExtensions": [
|
"moduleFileExtensions": [
|
||||||
@@ -104,7 +107,7 @@
|
|||||||
"coverageDirectory": "../coverage",
|
"coverageDirectory": "../coverage",
|
||||||
"testEnvironment": "node",
|
"testEnvironment": "node",
|
||||||
"transformIgnorePatterns": [
|
"transformIgnorePatterns": [
|
||||||
"node_modules/(?!(jose|@noble)/)"
|
"node_modules/(?!(.pnpm/)?(jose|@noble|uuid)/)"
|
||||||
],
|
],
|
||||||
"transform": {
|
"transform": {
|
||||||
"^.+\\.(t|j)sx?$": "ts-jest"
|
"^.+\\.(t|j)sx?$": "ts-jest"
|
||||||
|
|||||||
17
backend/src/admin/admin.controller.ts
Normal file
17
backend/src/admin/admin.controller.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import { Controller, Get, UseGuards } from "@nestjs/common";
|
||||||
|
import { Roles } from "../auth/decorators/roles.decorator";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { RolesGuard } from "../auth/guards/roles.guard";
|
||||||
|
import { AdminService } from "./admin.service";
|
||||||
|
|
||||||
|
@Controller("admin")
|
||||||
|
@UseGuards(AuthGuard, RolesGuard)
|
||||||
|
@Roles("admin")
|
||||||
|
export class AdminController {
|
||||||
|
constructor(private readonly adminService: AdminService) {}
|
||||||
|
|
||||||
|
@Get("stats")
|
||||||
|
getStats() {
|
||||||
|
return this.adminService.getStats();
|
||||||
|
}
|
||||||
|
}
|
||||||
14
backend/src/admin/admin.module.ts
Normal file
14
backend/src/admin/admin.module.ts
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import { Module } from "@nestjs/common";
|
||||||
|
import { AuthModule } from "../auth/auth.module";
|
||||||
|
import { CategoriesModule } from "../categories/categories.module";
|
||||||
|
import { ContentsModule } from "../contents/contents.module";
|
||||||
|
import { UsersModule } from "../users/users.module";
|
||||||
|
import { AdminController } from "./admin.controller";
|
||||||
|
import { AdminService } from "./admin.service";
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
imports: [AuthModule, UsersModule, ContentsModule, CategoriesModule],
|
||||||
|
controllers: [AdminController],
|
||||||
|
providers: [AdminService],
|
||||||
|
})
|
||||||
|
export class AdminModule {}
|
||||||
27
backend/src/admin/admin.service.ts
Normal file
27
backend/src/admin/admin.service.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import { Injectable } from "@nestjs/common";
|
||||||
|
import { CategoriesRepository } from "../categories/repositories/categories.repository";
|
||||||
|
import { ContentsRepository } from "../contents/repositories/contents.repository";
|
||||||
|
import { UsersRepository } from "../users/repositories/users.repository";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class AdminService {
|
||||||
|
constructor(
|
||||||
|
private readonly usersRepository: UsersRepository,
|
||||||
|
private readonly contentsRepository: ContentsRepository,
|
||||||
|
private readonly categoriesRepository: CategoriesRepository,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
async getStats() {
|
||||||
|
const [userCount, contentCount, categoryCount] = await Promise.all([
|
||||||
|
this.usersRepository.countAll(),
|
||||||
|
this.contentsRepository.count({}),
|
||||||
|
this.categoriesRepository.countAll(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
users: userCount,
|
||||||
|
contents: contentCount,
|
||||||
|
categories: categoryCount,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -11,6 +11,7 @@ import {
|
|||||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
import type { AuthenticatedRequest } from "../common/interfaces/request.interface";
|
import type { AuthenticatedRequest } from "../common/interfaces/request.interface";
|
||||||
import { ApiKeysService } from "./api-keys.service";
|
import { ApiKeysService } from "./api-keys.service";
|
||||||
|
import { CreateApiKeyDto } from "./dto/create-api-key.dto";
|
||||||
|
|
||||||
@Controller("api-keys")
|
@Controller("api-keys")
|
||||||
@UseGuards(AuthGuard)
|
@UseGuards(AuthGuard)
|
||||||
@@ -20,13 +21,12 @@ export class ApiKeysController {
|
|||||||
@Post()
|
@Post()
|
||||||
create(
|
create(
|
||||||
@Req() req: AuthenticatedRequest,
|
@Req() req: AuthenticatedRequest,
|
||||||
@Body("name") name: string,
|
@Body() createApiKeyDto: CreateApiKeyDto,
|
||||||
@Body("expiresAt") expiresAt?: string,
|
|
||||||
) {
|
) {
|
||||||
return this.apiKeysService.create(
|
return this.apiKeysService.create(
|
||||||
req.user.sub,
|
req.user.sub,
|
||||||
name,
|
createApiKeyDto.name,
|
||||||
expiresAt ? new Date(expiresAt) : undefined,
|
createApiKeyDto.expiresAt ? new Date(createApiKeyDto.expiresAt) : undefined,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
import { Module } from "@nestjs/common";
|
import { forwardRef, Module } from "@nestjs/common";
|
||||||
import { AuthModule } from "../auth/auth.module";
|
import { AuthModule } from "../auth/auth.module";
|
||||||
import { CryptoModule } from "../crypto/crypto.module";
|
|
||||||
import { DatabaseModule } from "../database/database.module";
|
|
||||||
import { ApiKeysController } from "./api-keys.controller";
|
import { ApiKeysController } from "./api-keys.controller";
|
||||||
import { ApiKeysService } from "./api-keys.service";
|
import { ApiKeysService } from "./api-keys.service";
|
||||||
|
import { ApiKeysRepository } from "./repositories/api-keys.repository";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [DatabaseModule, AuthModule, CryptoModule],
|
imports: [forwardRef(() => AuthModule)],
|
||||||
controllers: [ApiKeysController],
|
controllers: [ApiKeysController],
|
||||||
providers: [ApiKeysService],
|
providers: [ApiKeysService, ApiKeysRepository],
|
||||||
exports: [ApiKeysService],
|
exports: [ApiKeysService, ApiKeysRepository],
|
||||||
})
|
})
|
||||||
export class ApiKeysModule {}
|
export class ApiKeysModule {}
|
||||||
|
|||||||
@@ -1,58 +1,43 @@
|
|||||||
import { createHash } from "node:crypto";
|
|
||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
import { DatabaseService } from "../database/database.service";
|
import { HashingService } from "../crypto/services/hashing.service";
|
||||||
import { apiKeys } from "../database/schemas";
|
|
||||||
import { ApiKeysService } from "./api-keys.service";
|
import { ApiKeysService } from "./api-keys.service";
|
||||||
|
import { ApiKeysRepository } from "./repositories/api-keys.repository";
|
||||||
|
|
||||||
describe("ApiKeysService", () => {
|
describe("ApiKeysService", () => {
|
||||||
let service: ApiKeysService;
|
let service: ApiKeysService;
|
||||||
|
let repository: ApiKeysRepository;
|
||||||
|
|
||||||
const mockDb = {
|
const mockApiKeysRepository = {
|
||||||
insert: jest.fn(),
|
create: jest.fn(),
|
||||||
values: jest.fn(),
|
findAll: jest.fn(),
|
||||||
select: jest.fn(),
|
revoke: jest.fn(),
|
||||||
from: jest.fn(),
|
findActiveByKeyHash: jest.fn(),
|
||||||
where: jest.fn(),
|
updateLastUsed: jest.fn(),
|
||||||
limit: jest.fn(),
|
};
|
||||||
update: jest.fn(),
|
|
||||||
set: jest.fn(),
|
const mockHashingService = {
|
||||||
returning: jest.fn(),
|
hashSha256: jest.fn().mockResolvedValue("hashed-key"),
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
jest.clearAllMocks();
|
jest.clearAllMocks();
|
||||||
|
|
||||||
mockDb.insert.mockReturnThis();
|
|
||||||
mockDb.values.mockResolvedValue(undefined);
|
|
||||||
mockDb.select.mockReturnThis();
|
|
||||||
mockDb.from.mockReturnThis();
|
|
||||||
mockDb.where.mockReturnThis();
|
|
||||||
mockDb.limit.mockReturnThis();
|
|
||||||
mockDb.update.mockReturnThis();
|
|
||||||
mockDb.set.mockReturnThis();
|
|
||||||
mockDb.returning.mockResolvedValue([]);
|
|
||||||
|
|
||||||
// Default for findAll which is awaited on where()
|
|
||||||
mockDb.where.mockImplementation(() => {
|
|
||||||
const chain = {
|
|
||||||
returning: jest.fn().mockResolvedValue([]),
|
|
||||||
};
|
|
||||||
return Object.assign(Promise.resolve([]), chain);
|
|
||||||
});
|
|
||||||
|
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
providers: [
|
providers: [
|
||||||
ApiKeysService,
|
ApiKeysService,
|
||||||
{
|
{
|
||||||
provide: DatabaseService,
|
provide: ApiKeysRepository,
|
||||||
useValue: {
|
useValue: mockApiKeysRepository,
|
||||||
db: mockDb,
|
},
|
||||||
},
|
{
|
||||||
|
provide: HashingService,
|
||||||
|
useValue: mockHashingService,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
service = module.get<ApiKeysService>(ApiKeysService);
|
service = module.get<ApiKeysService>(ApiKeysService);
|
||||||
|
repository = module.get<ApiKeysRepository>(ApiKeysRepository);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should be defined", () => {
|
it("should be defined", () => {
|
||||||
@@ -67,8 +52,7 @@ describe("ApiKeysService", () => {
|
|||||||
|
|
||||||
const result = await service.create(userId, name, expiresAt);
|
const result = await service.create(userId, name, expiresAt);
|
||||||
|
|
||||||
expect(mockDb.insert).toHaveBeenCalledWith(apiKeys);
|
expect(repository.create).toHaveBeenCalledWith(
|
||||||
expect(mockDb.values).toHaveBeenCalledWith(
|
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
userId,
|
userId,
|
||||||
name,
|
name,
|
||||||
@@ -87,12 +71,11 @@ describe("ApiKeysService", () => {
|
|||||||
it("should find all API keys for a user", async () => {
|
it("should find all API keys for a user", async () => {
|
||||||
const userId = "user-id";
|
const userId = "user-id";
|
||||||
const expectedKeys = [{ id: "1", name: "Key 1" }];
|
const expectedKeys = [{ id: "1", name: "Key 1" }];
|
||||||
(mockDb.where as jest.Mock).mockResolvedValue(expectedKeys);
|
mockApiKeysRepository.findAll.mockResolvedValue(expectedKeys);
|
||||||
|
|
||||||
const result = await service.findAll(userId);
|
const result = await service.findAll(userId);
|
||||||
|
|
||||||
expect(mockDb.select).toHaveBeenCalled();
|
expect(repository.findAll).toHaveBeenCalledWith(userId);
|
||||||
expect(mockDb.from).toHaveBeenCalledWith(apiKeys);
|
|
||||||
expect(result).toEqual(expectedKeys);
|
expect(result).toEqual(expectedKeys);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -102,17 +85,11 @@ describe("ApiKeysService", () => {
|
|||||||
const userId = "user-id";
|
const userId = "user-id";
|
||||||
const keyId = "key-id";
|
const keyId = "key-id";
|
||||||
const expectedResult = [{ id: keyId, isActive: false }];
|
const expectedResult = [{ id: keyId, isActive: false }];
|
||||||
|
mockApiKeysRepository.revoke.mockResolvedValue(expectedResult);
|
||||||
mockDb.where.mockReturnValue({
|
|
||||||
returning: jest.fn().mockResolvedValue(expectedResult),
|
|
||||||
});
|
|
||||||
|
|
||||||
const result = await service.revoke(userId, keyId);
|
const result = await service.revoke(userId, keyId);
|
||||||
|
|
||||||
expect(mockDb.update).toHaveBeenCalledWith(apiKeys);
|
expect(repository.revoke).toHaveBeenCalledWith(userId, keyId);
|
||||||
expect(mockDb.set).toHaveBeenCalledWith(
|
|
||||||
expect.objectContaining({ isActive: false }),
|
|
||||||
);
|
|
||||||
expect(result).toEqual(expectedResult);
|
expect(result).toEqual(expectedResult);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -120,42 +97,19 @@ describe("ApiKeysService", () => {
|
|||||||
describe("validateKey", () => {
|
describe("validateKey", () => {
|
||||||
it("should validate a valid API key", async () => {
|
it("should validate a valid API key", async () => {
|
||||||
const key = "mg_live_testkey";
|
const key = "mg_live_testkey";
|
||||||
const keyHash = createHash("sha256").update(key).digest("hex");
|
const apiKey = { id: "1", isActive: true, expiresAt: null };
|
||||||
const apiKey = { id: "1", keyHash, isActive: true, expiresAt: null };
|
mockApiKeysRepository.findActiveByKeyHash.mockResolvedValue(apiKey);
|
||||||
|
|
||||||
(mockDb.limit as jest.Mock).mockResolvedValue([apiKey]);
|
|
||||||
(mockDb.where as jest.Mock).mockResolvedValue([apiKey]); // For the update later if needed, but here it's for select
|
|
||||||
|
|
||||||
// We need to be careful with chaining mockDb.where is used in both select and update
|
|
||||||
const mockSelect = {
|
|
||||||
from: jest.fn().mockReturnThis(),
|
|
||||||
where: jest.fn().mockReturnThis(),
|
|
||||||
limit: jest.fn().mockResolvedValue([apiKey]),
|
|
||||||
};
|
|
||||||
const mockUpdate = {
|
|
||||||
set: jest.fn().mockReturnThis(),
|
|
||||||
where: jest.fn().mockResolvedValue(undefined),
|
|
||||||
};
|
|
||||||
|
|
||||||
(mockDb.select as jest.Mock).mockReturnValue(mockSelect);
|
|
||||||
(mockDb.update as jest.Mock).mockReturnValue(mockUpdate);
|
|
||||||
|
|
||||||
const result = await service.validateKey(key);
|
const result = await service.validateKey(key);
|
||||||
|
|
||||||
expect(result).toEqual(apiKey);
|
expect(result).toEqual(apiKey);
|
||||||
expect(mockDb.select).toHaveBeenCalled();
|
expect(repository.findActiveByKeyHash).toHaveBeenCalled();
|
||||||
expect(mockDb.update).toHaveBeenCalledWith(apiKeys);
|
expect(repository.updateLastUsed).toHaveBeenCalledWith(apiKey.id);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should return null for invalid API key", async () => {
|
it("should return null for invalid API key", async () => {
|
||||||
(mockDb.select as jest.Mock).mockReturnValue({
|
mockApiKeysRepository.findActiveByKeyHash.mockResolvedValue(null);
|
||||||
from: jest.fn().mockReturnThis(),
|
|
||||||
where: jest.fn().mockReturnThis(),
|
|
||||||
limit: jest.fn().mockResolvedValue([]),
|
|
||||||
});
|
|
||||||
|
|
||||||
const result = await service.validateKey("invalid-key");
|
const result = await service.validateKey("invalid-key");
|
||||||
|
|
||||||
expect(result).toBeNull();
|
expect(result).toBeNull();
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -164,12 +118,7 @@ describe("ApiKeysService", () => {
|
|||||||
const expiredDate = new Date();
|
const expiredDate = new Date();
|
||||||
expiredDate.setFullYear(expiredDate.getFullYear() - 1);
|
expiredDate.setFullYear(expiredDate.getFullYear() - 1);
|
||||||
const apiKey = { id: "1", isActive: true, expiresAt: expiredDate };
|
const apiKey = { id: "1", isActive: true, expiresAt: expiredDate };
|
||||||
|
mockApiKeysRepository.findActiveByKeyHash.mockResolvedValue(apiKey);
|
||||||
(mockDb.select as jest.Mock).mockReturnValue({
|
|
||||||
from: jest.fn().mockReturnThis(),
|
|
||||||
where: jest.fn().mockReturnThis(),
|
|
||||||
limit: jest.fn().mockResolvedValue([apiKey]),
|
|
||||||
});
|
|
||||||
|
|
||||||
const result = await service.validateKey(key);
|
const result = await service.validateKey(key);
|
||||||
|
|
||||||
|
|||||||
@@ -1,14 +1,16 @@
|
|||||||
import { createHash, randomBytes } from "node:crypto";
|
import { randomBytes } from "node:crypto";
|
||||||
import { Injectable, Logger } from "@nestjs/common";
|
import { Injectable, Logger } from "@nestjs/common";
|
||||||
import { and, eq } from "drizzle-orm";
|
import { HashingService } from "../crypto/services/hashing.service";
|
||||||
import { DatabaseService } from "../database/database.service";
|
import { ApiKeysRepository } from "./repositories/api-keys.repository";
|
||||||
import { apiKeys } from "../database/schemas";
|
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class ApiKeysService {
|
export class ApiKeysService {
|
||||||
private readonly logger = new Logger(ApiKeysService.name);
|
private readonly logger = new Logger(ApiKeysService.name);
|
||||||
|
|
||||||
constructor(private readonly databaseService: DatabaseService) {}
|
constructor(
|
||||||
|
private readonly apiKeysRepository: ApiKeysRepository,
|
||||||
|
private readonly hashingService: HashingService,
|
||||||
|
) {}
|
||||||
|
|
||||||
async create(userId: string, name: string, expiresAt?: Date) {
|
async create(userId: string, name: string, expiresAt?: Date) {
|
||||||
this.logger.log(`Creating API key for user ${userId}: ${name}`);
|
this.logger.log(`Creating API key for user ${userId}: ${name}`);
|
||||||
@@ -16,9 +18,9 @@ export class ApiKeysService {
|
|||||||
const randomPart = randomBytes(24).toString("hex");
|
const randomPart = randomBytes(24).toString("hex");
|
||||||
const key = `${prefix}${randomPart}`;
|
const key = `${prefix}${randomPart}`;
|
||||||
|
|
||||||
const keyHash = createHash("sha256").update(key).digest("hex");
|
const keyHash = await this.hashingService.hashSha256(key);
|
||||||
|
|
||||||
await this.databaseService.db.insert(apiKeys).values({
|
await this.apiKeysRepository.create({
|
||||||
userId,
|
userId,
|
||||||
name,
|
name,
|
||||||
prefix: prefix.substring(0, 8),
|
prefix: prefix.substring(0, 8),
|
||||||
@@ -34,37 +36,18 @@ export class ApiKeysService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async findAll(userId: string) {
|
async findAll(userId: string) {
|
||||||
return await this.databaseService.db
|
return await this.apiKeysRepository.findAll(userId);
|
||||||
.select({
|
|
||||||
id: apiKeys.id,
|
|
||||||
name: apiKeys.name,
|
|
||||||
prefix: apiKeys.prefix,
|
|
||||||
isActive: apiKeys.isActive,
|
|
||||||
lastUsedAt: apiKeys.lastUsedAt,
|
|
||||||
expiresAt: apiKeys.expiresAt,
|
|
||||||
createdAt: apiKeys.createdAt,
|
|
||||||
})
|
|
||||||
.from(apiKeys)
|
|
||||||
.where(eq(apiKeys.userId, userId));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async revoke(userId: string, keyId: string) {
|
async revoke(userId: string, keyId: string) {
|
||||||
this.logger.log(`Revoking API key ${keyId} for user ${userId}`);
|
this.logger.log(`Revoking API key ${keyId} for user ${userId}`);
|
||||||
return await this.databaseService.db
|
return await this.apiKeysRepository.revoke(userId, keyId);
|
||||||
.update(apiKeys)
|
|
||||||
.set({ isActive: false, updatedAt: new Date() })
|
|
||||||
.where(and(eq(apiKeys.id, keyId), eq(apiKeys.userId, userId)))
|
|
||||||
.returning();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async validateKey(key: string) {
|
async validateKey(key: string) {
|
||||||
const keyHash = createHash("sha256").update(key).digest("hex");
|
const keyHash = await this.hashingService.hashSha256(key);
|
||||||
|
|
||||||
const [apiKey] = await this.databaseService.db
|
const apiKey = await this.apiKeysRepository.findActiveByKeyHash(keyHash);
|
||||||
.select()
|
|
||||||
.from(apiKeys)
|
|
||||||
.where(and(eq(apiKeys.keyHash, keyHash), eq(apiKeys.isActive, true)))
|
|
||||||
.limit(1);
|
|
||||||
|
|
||||||
if (!apiKey) return null;
|
if (!apiKey) return null;
|
||||||
|
|
||||||
@@ -73,10 +56,7 @@ export class ApiKeysService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Update last used at
|
// Update last used at
|
||||||
await this.databaseService.db
|
await this.apiKeysRepository.updateLastUsed(apiKey.id);
|
||||||
.update(apiKeys)
|
|
||||||
.set({ lastUsedAt: new Date() })
|
|
||||||
.where(eq(apiKeys.id, apiKey.id));
|
|
||||||
|
|
||||||
return apiKey;
|
return apiKey;
|
||||||
}
|
}
|
||||||
|
|||||||
18
backend/src/api-keys/dto/create-api-key.dto.ts
Normal file
18
backend/src/api-keys/dto/create-api-key.dto.ts
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import {
|
||||||
|
IsDateString,
|
||||||
|
IsNotEmpty,
|
||||||
|
IsOptional,
|
||||||
|
IsString,
|
||||||
|
MaxLength,
|
||||||
|
} from "class-validator";
|
||||||
|
|
||||||
|
export class CreateApiKeyDto {
|
||||||
|
@IsString()
|
||||||
|
@IsNotEmpty()
|
||||||
|
@MaxLength(128)
|
||||||
|
name!: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsDateString()
|
||||||
|
expiresAt?: string;
|
||||||
|
}
|
||||||
58
backend/src/api-keys/repositories/api-keys.repository.ts
Normal file
58
backend/src/api-keys/repositories/api-keys.repository.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
import { Injectable } from "@nestjs/common";
|
||||||
|
import { and, eq } from "drizzle-orm";
|
||||||
|
import { DatabaseService } from "../../database/database.service";
|
||||||
|
import { apiKeys } from "../../database/schemas";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class ApiKeysRepository {
|
||||||
|
constructor(private readonly databaseService: DatabaseService) {}
|
||||||
|
|
||||||
|
async create(data: {
|
||||||
|
userId: string;
|
||||||
|
name: string;
|
||||||
|
prefix: string;
|
||||||
|
keyHash: string;
|
||||||
|
expiresAt?: Date;
|
||||||
|
}) {
|
||||||
|
return await this.databaseService.db.insert(apiKeys).values(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
async findAll(userId: string) {
|
||||||
|
return await this.databaseService.db
|
||||||
|
.select({
|
||||||
|
id: apiKeys.id,
|
||||||
|
name: apiKeys.name,
|
||||||
|
prefix: apiKeys.prefix,
|
||||||
|
isActive: apiKeys.isActive,
|
||||||
|
lastUsedAt: apiKeys.lastUsedAt,
|
||||||
|
expiresAt: apiKeys.expiresAt,
|
||||||
|
createdAt: apiKeys.createdAt,
|
||||||
|
})
|
||||||
|
.from(apiKeys)
|
||||||
|
.where(eq(apiKeys.userId, userId));
|
||||||
|
}
|
||||||
|
|
||||||
|
async revoke(userId: string, keyId: string) {
|
||||||
|
return await this.databaseService.db
|
||||||
|
.update(apiKeys)
|
||||||
|
.set({ isActive: false, updatedAt: new Date() })
|
||||||
|
.where(and(eq(apiKeys.id, keyId), eq(apiKeys.userId, userId)))
|
||||||
|
.returning();
|
||||||
|
}
|
||||||
|
|
||||||
|
async findActiveByKeyHash(keyHash: string) {
|
||||||
|
const result = await this.databaseService.db
|
||||||
|
.select()
|
||||||
|
.from(apiKeys)
|
||||||
|
.where(and(eq(apiKeys.keyHash, keyHash), eq(apiKeys.isActive, true)))
|
||||||
|
.limit(1);
|
||||||
|
return result[0] || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateLastUsed(id: string) {
|
||||||
|
return await this.databaseService.db
|
||||||
|
.update(apiKeys)
|
||||||
|
.set({ lastUsedAt: new Date() })
|
||||||
|
.where(eq(apiKeys.id, id));
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,15 +1,18 @@
|
|||||||
import { CacheModule } from "@nestjs/cache-manager";
|
import { CacheModule } from "@nestjs/cache-manager";
|
||||||
import { Module } from "@nestjs/common";
|
import { MiddlewareConsumer, Module, NestModule } from "@nestjs/common";
|
||||||
import { ConfigModule, ConfigService } from "@nestjs/config";
|
import { ConfigModule, ConfigService } from "@nestjs/config";
|
||||||
import { ScheduleModule } from "@nestjs/schedule";
|
import { ScheduleModule } from "@nestjs/schedule";
|
||||||
import { ThrottlerModule } from "@nestjs/throttler";
|
import { ThrottlerModule } from "@nestjs/throttler";
|
||||||
import { redisStore } from "cache-manager-redis-yet";
|
import { redisStore } from "cache-manager-redis-yet";
|
||||||
|
import { AdminModule } from "./admin/admin.module";
|
||||||
import { ApiKeysModule } from "./api-keys/api-keys.module";
|
import { ApiKeysModule } from "./api-keys/api-keys.module";
|
||||||
import { AppController } from "./app.controller";
|
import { AppController } from "./app.controller";
|
||||||
import { AppService } from "./app.service";
|
import { AppService } from "./app.service";
|
||||||
import { AuthModule } from "./auth/auth.module";
|
import { AuthModule } from "./auth/auth.module";
|
||||||
import { CategoriesModule } from "./categories/categories.module";
|
import { CategoriesModule } from "./categories/categories.module";
|
||||||
import { CommonModule } from "./common/common.module";
|
import { CommonModule } from "./common/common.module";
|
||||||
|
import { CrawlerDetectionMiddleware } from "./common/middlewares/crawler-detection.middleware";
|
||||||
|
import { HTTPLoggerMiddleware } from "./common/middlewares/http-logger.middleware";
|
||||||
import { validateEnv } from "./config/env.schema";
|
import { validateEnv } from "./config/env.schema";
|
||||||
import { ContentsModule } from "./contents/contents.module";
|
import { ContentsModule } from "./contents/contents.module";
|
||||||
import { CryptoModule } from "./crypto/crypto.module";
|
import { CryptoModule } from "./crypto/crypto.module";
|
||||||
@@ -41,6 +44,7 @@ import { UsersModule } from "./users/users.module";
|
|||||||
SessionsModule,
|
SessionsModule,
|
||||||
ReportsModule,
|
ReportsModule,
|
||||||
ApiKeysModule,
|
ApiKeysModule,
|
||||||
|
AdminModule,
|
||||||
ScheduleModule.forRoot(),
|
ScheduleModule.forRoot(),
|
||||||
ThrottlerModule.forRootAsync({
|
ThrottlerModule.forRootAsync({
|
||||||
imports: [ConfigModule],
|
imports: [ConfigModule],
|
||||||
@@ -71,4 +75,10 @@ import { UsersModule } from "./users/users.module";
|
|||||||
controllers: [AppController, HealthController],
|
controllers: [AppController, HealthController],
|
||||||
providers: [AppService],
|
providers: [AppService],
|
||||||
})
|
})
|
||||||
export class AppModule {}
|
export class AppModule implements NestModule {
|
||||||
|
configure(consumer: MiddlewareConsumer) {
|
||||||
|
consumer
|
||||||
|
.apply(HTTPLoggerMiddleware, CrawlerDetectionMiddleware)
|
||||||
|
.forRoutes("*");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,16 +1,32 @@
|
|||||||
import { Module } from "@nestjs/common";
|
import { forwardRef, Module } from "@nestjs/common";
|
||||||
import { CryptoModule } from "../crypto/crypto.module";
|
|
||||||
import { DatabaseModule } from "../database/database.module";
|
|
||||||
import { SessionsModule } from "../sessions/sessions.module";
|
import { SessionsModule } from "../sessions/sessions.module";
|
||||||
import { UsersModule } from "../users/users.module";
|
import { UsersModule } from "../users/users.module";
|
||||||
import { AuthController } from "./auth.controller";
|
import { AuthController } from "./auth.controller";
|
||||||
import { AuthService } from "./auth.service";
|
import { AuthService } from "./auth.service";
|
||||||
|
import { AuthGuard } from "./guards/auth.guard";
|
||||||
|
import { OptionalAuthGuard } from "./guards/optional-auth.guard";
|
||||||
|
import { RolesGuard } from "./guards/roles.guard";
|
||||||
import { RbacService } from "./rbac.service";
|
import { RbacService } from "./rbac.service";
|
||||||
|
import { RbacRepository } from "./repositories/rbac.repository";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [UsersModule, CryptoModule, SessionsModule, DatabaseModule],
|
imports: [forwardRef(() => UsersModule), SessionsModule],
|
||||||
controllers: [AuthController],
|
controllers: [AuthController],
|
||||||
providers: [AuthService, RbacService],
|
providers: [
|
||||||
exports: [AuthService, RbacService],
|
AuthService,
|
||||||
|
RbacService,
|
||||||
|
RbacRepository,
|
||||||
|
AuthGuard,
|
||||||
|
OptionalAuthGuard,
|
||||||
|
RolesGuard,
|
||||||
|
],
|
||||||
|
exports: [
|
||||||
|
AuthService,
|
||||||
|
RbacService,
|
||||||
|
RbacRepository,
|
||||||
|
AuthGuard,
|
||||||
|
OptionalAuthGuard,
|
||||||
|
RolesGuard,
|
||||||
|
],
|
||||||
})
|
})
|
||||||
export class AuthModule {}
|
export class AuthModule {}
|
||||||
|
|||||||
@@ -1,3 +1,7 @@
|
|||||||
|
jest.mock("uuid", () => ({
|
||||||
|
v4: jest.fn(() => "mocked-uuid"),
|
||||||
|
}));
|
||||||
|
|
||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
|
||||||
jest.mock("@noble/post-quantum/ml-kem.js", () => ({
|
jest.mock("@noble/post-quantum/ml-kem.js", () => ({
|
||||||
@@ -17,14 +21,14 @@ import { BadRequestException, UnauthorizedException } from "@nestjs/common";
|
|||||||
import { ConfigService } from "@nestjs/config";
|
import { ConfigService } from "@nestjs/config";
|
||||||
import { authenticator } from "otplib";
|
import { authenticator } from "otplib";
|
||||||
import * as qrcode from "qrcode";
|
import * as qrcode from "qrcode";
|
||||||
import { CryptoService } from "../crypto/crypto.service";
|
import { HashingService } from "../crypto/services/hashing.service";
|
||||||
|
import { JwtService } from "../crypto/services/jwt.service";
|
||||||
import { SessionsService } from "../sessions/sessions.service";
|
import { SessionsService } from "../sessions/sessions.service";
|
||||||
import { UsersService } from "../users/users.service";
|
import { UsersService } from "../users/users.service";
|
||||||
import { AuthService } from "./auth.service";
|
import { AuthService } from "./auth.service";
|
||||||
|
|
||||||
jest.mock("otplib");
|
jest.mock("otplib");
|
||||||
jest.mock("qrcode");
|
jest.mock("qrcode");
|
||||||
jest.mock("../crypto/crypto.service");
|
|
||||||
jest.mock("../users/users.service");
|
jest.mock("../users/users.service");
|
||||||
jest.mock("../sessions/sessions.service");
|
jest.mock("../sessions/sessions.service");
|
||||||
|
|
||||||
@@ -41,10 +45,13 @@ describe("AuthService", () => {
|
|||||||
findOneWithPrivateData: jest.fn(),
|
findOneWithPrivateData: jest.fn(),
|
||||||
};
|
};
|
||||||
|
|
||||||
const mockCryptoService = {
|
const mockHashingService = {
|
||||||
hashPassword: jest.fn(),
|
hashPassword: jest.fn(),
|
||||||
hashEmail: jest.fn(),
|
hashEmail: jest.fn(),
|
||||||
verifyPassword: jest.fn(),
|
verifyPassword: jest.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockJwtService = {
|
||||||
generateJwt: jest.fn(),
|
generateJwt: jest.fn(),
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -62,7 +69,8 @@ describe("AuthService", () => {
|
|||||||
providers: [
|
providers: [
|
||||||
AuthService,
|
AuthService,
|
||||||
{ provide: UsersService, useValue: mockUsersService },
|
{ provide: UsersService, useValue: mockUsersService },
|
||||||
{ provide: CryptoService, useValue: mockCryptoService },
|
{ provide: HashingService, useValue: mockHashingService },
|
||||||
|
{ provide: JwtService, useValue: mockJwtService },
|
||||||
{ provide: SessionsService, useValue: mockSessionsService },
|
{ provide: SessionsService, useValue: mockSessionsService },
|
||||||
{ provide: ConfigService, useValue: mockConfigService },
|
{ provide: ConfigService, useValue: mockConfigService },
|
||||||
],
|
],
|
||||||
@@ -142,8 +150,8 @@ describe("AuthService", () => {
|
|||||||
email: "test@example.com",
|
email: "test@example.com",
|
||||||
password: "password",
|
password: "password",
|
||||||
};
|
};
|
||||||
mockCryptoService.hashPassword.mockResolvedValue("hashed-password");
|
mockHashingService.hashPassword.mockResolvedValue("hashed-password");
|
||||||
mockCryptoService.hashEmail.mockResolvedValue("hashed-email");
|
mockHashingService.hashEmail.mockResolvedValue("hashed-email");
|
||||||
mockUsersService.create.mockResolvedValue({ uuid: "new-user-id" });
|
mockUsersService.create.mockResolvedValue({ uuid: "new-user-id" });
|
||||||
|
|
||||||
const result = await service.register(dto);
|
const result = await service.register(dto);
|
||||||
@@ -164,10 +172,10 @@ describe("AuthService", () => {
|
|||||||
passwordHash: "hash",
|
passwordHash: "hash",
|
||||||
isTwoFactorEnabled: false,
|
isTwoFactorEnabled: false,
|
||||||
};
|
};
|
||||||
mockCryptoService.hashEmail.mockResolvedValue("hashed-email");
|
mockHashingService.hashEmail.mockResolvedValue("hashed-email");
|
||||||
mockUsersService.findByEmailHash.mockResolvedValue(user);
|
mockUsersService.findByEmailHash.mockResolvedValue(user);
|
||||||
mockCryptoService.verifyPassword.mockResolvedValue(true);
|
mockHashingService.verifyPassword.mockResolvedValue(true);
|
||||||
mockCryptoService.generateJwt.mockResolvedValue("access-token");
|
mockJwtService.generateJwt.mockResolvedValue("access-token");
|
||||||
mockSessionsService.createSession.mockResolvedValue({
|
mockSessionsService.createSession.mockResolvedValue({
|
||||||
refreshToken: "refresh-token",
|
refreshToken: "refresh-token",
|
||||||
});
|
});
|
||||||
@@ -189,9 +197,9 @@ describe("AuthService", () => {
|
|||||||
passwordHash: "hash",
|
passwordHash: "hash",
|
||||||
isTwoFactorEnabled: true,
|
isTwoFactorEnabled: true,
|
||||||
};
|
};
|
||||||
mockCryptoService.hashEmail.mockResolvedValue("hashed-email");
|
mockHashingService.hashEmail.mockResolvedValue("hashed-email");
|
||||||
mockUsersService.findByEmailHash.mockResolvedValue(user);
|
mockUsersService.findByEmailHash.mockResolvedValue(user);
|
||||||
mockCryptoService.verifyPassword.mockResolvedValue(true);
|
mockHashingService.verifyPassword.mockResolvedValue(true);
|
||||||
|
|
||||||
const result = await service.login(dto);
|
const result = await service.login(dto);
|
||||||
|
|
||||||
@@ -218,7 +226,7 @@ describe("AuthService", () => {
|
|||||||
mockUsersService.findOneWithPrivateData.mockResolvedValue(user);
|
mockUsersService.findOneWithPrivateData.mockResolvedValue(user);
|
||||||
mockUsersService.getTwoFactorSecret.mockResolvedValue("secret");
|
mockUsersService.getTwoFactorSecret.mockResolvedValue("secret");
|
||||||
(authenticator.verify as jest.Mock).mockReturnValue(true);
|
(authenticator.verify as jest.Mock).mockReturnValue(true);
|
||||||
mockCryptoService.generateJwt.mockResolvedValue("access-token");
|
mockJwtService.generateJwt.mockResolvedValue("access-token");
|
||||||
mockSessionsService.createSession.mockResolvedValue({
|
mockSessionsService.createSession.mockResolvedValue({
|
||||||
refreshToken: "refresh-token",
|
refreshToken: "refresh-token",
|
||||||
});
|
});
|
||||||
@@ -240,7 +248,7 @@ describe("AuthService", () => {
|
|||||||
const user = { uuid: "user-id", username: "test" };
|
const user = { uuid: "user-id", username: "test" };
|
||||||
mockSessionsService.refreshSession.mockResolvedValue(session);
|
mockSessionsService.refreshSession.mockResolvedValue(session);
|
||||||
mockUsersService.findOne.mockResolvedValue(user);
|
mockUsersService.findOne.mockResolvedValue(user);
|
||||||
mockCryptoService.generateJwt.mockResolvedValue("new-access");
|
mockJwtService.generateJwt.mockResolvedValue("new-access");
|
||||||
|
|
||||||
const result = await service.refresh(refreshToken);
|
const result = await service.refresh(refreshToken);
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
import {
|
import {
|
||||||
BadRequestException,
|
BadRequestException,
|
||||||
|
forwardRef,
|
||||||
|
Inject,
|
||||||
Injectable,
|
Injectable,
|
||||||
Logger,
|
Logger,
|
||||||
UnauthorizedException,
|
UnauthorizedException,
|
||||||
@@ -7,7 +9,8 @@ import {
|
|||||||
import { ConfigService } from "@nestjs/config";
|
import { ConfigService } from "@nestjs/config";
|
||||||
import { authenticator } from "otplib";
|
import { authenticator } from "otplib";
|
||||||
import { toDataURL } from "qrcode";
|
import { toDataURL } from "qrcode";
|
||||||
import { CryptoService } from "../crypto/crypto.service";
|
import { HashingService } from "../crypto/services/hashing.service";
|
||||||
|
import { JwtService } from "../crypto/services/jwt.service";
|
||||||
import { SessionsService } from "../sessions/sessions.service";
|
import { SessionsService } from "../sessions/sessions.service";
|
||||||
import { UsersService } from "../users/users.service";
|
import { UsersService } from "../users/users.service";
|
||||||
import { LoginDto } from "./dto/login.dto";
|
import { LoginDto } from "./dto/login.dto";
|
||||||
@@ -18,8 +21,10 @@ export class AuthService {
|
|||||||
private readonly logger = new Logger(AuthService.name);
|
private readonly logger = new Logger(AuthService.name);
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
|
@Inject(forwardRef(() => UsersService))
|
||||||
private readonly usersService: UsersService,
|
private readonly usersService: UsersService,
|
||||||
private readonly cryptoService: CryptoService,
|
private readonly hashingService: HashingService,
|
||||||
|
private readonly jwtService: JwtService,
|
||||||
private readonly sessionsService: SessionsService,
|
private readonly sessionsService: SessionsService,
|
||||||
private readonly configService: ConfigService,
|
private readonly configService: ConfigService,
|
||||||
) {}
|
) {}
|
||||||
@@ -81,8 +86,8 @@ export class AuthService {
|
|||||||
this.logger.log(`Registering new user: ${dto.username}`);
|
this.logger.log(`Registering new user: ${dto.username}`);
|
||||||
const { username, email, password } = dto;
|
const { username, email, password } = dto;
|
||||||
|
|
||||||
const passwordHash = await this.cryptoService.hashPassword(password);
|
const passwordHash = await this.hashingService.hashPassword(password);
|
||||||
const emailHash = await this.cryptoService.hashEmail(email);
|
const emailHash = await this.hashingService.hashEmail(email);
|
||||||
|
|
||||||
const user = await this.usersService.create({
|
const user = await this.usersService.create({
|
||||||
username,
|
username,
|
||||||
@@ -101,23 +106,26 @@ export class AuthService {
|
|||||||
this.logger.log(`Login attempt for email: ${dto.email}`);
|
this.logger.log(`Login attempt for email: ${dto.email}`);
|
||||||
const { email, password } = dto;
|
const { email, password } = dto;
|
||||||
|
|
||||||
const emailHash = await this.cryptoService.hashEmail(email);
|
const emailHash = await this.hashingService.hashEmail(email);
|
||||||
const user = await this.usersService.findByEmailHash(emailHash);
|
const user = await this.usersService.findByEmailHash(emailHash);
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
|
this.logger.warn(`Login failed: user not found for email hash`);
|
||||||
throw new UnauthorizedException("Invalid credentials");
|
throw new UnauthorizedException("Invalid credentials");
|
||||||
}
|
}
|
||||||
|
|
||||||
const isPasswordValid = await this.cryptoService.verifyPassword(
|
const isPasswordValid = await this.hashingService.verifyPassword(
|
||||||
password,
|
password,
|
||||||
user.passwordHash,
|
user.passwordHash,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!isPasswordValid) {
|
if (!isPasswordValid) {
|
||||||
|
this.logger.warn(`Login failed: invalid password for user ${user.uuid}`);
|
||||||
throw new UnauthorizedException("Invalid credentials");
|
throw new UnauthorizedException("Invalid credentials");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (user.isTwoFactorEnabled) {
|
if (user.isTwoFactorEnabled) {
|
||||||
|
this.logger.log(`2FA required for user ${user.uuid}`);
|
||||||
return {
|
return {
|
||||||
message: "2FA required",
|
message: "2FA required",
|
||||||
requires2FA: true,
|
requires2FA: true,
|
||||||
@@ -125,7 +133,7 @@ export class AuthService {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const accessToken = await this.cryptoService.generateJwt({
|
const accessToken = await this.jwtService.generateJwt({
|
||||||
sub: user.uuid,
|
sub: user.uuid,
|
||||||
username: user.username,
|
username: user.username,
|
||||||
});
|
});
|
||||||
@@ -136,6 +144,7 @@ export class AuthService {
|
|||||||
ip,
|
ip,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
this.logger.log(`User ${user.uuid} logged in successfully`);
|
||||||
return {
|
return {
|
||||||
message: "User logged in successfully",
|
message: "User logged in successfully",
|
||||||
access_token: accessToken,
|
access_token: accessToken,
|
||||||
@@ -160,10 +169,13 @@ export class AuthService {
|
|||||||
|
|
||||||
const isValid = authenticator.verify({ token, secret });
|
const isValid = authenticator.verify({ token, secret });
|
||||||
if (!isValid) {
|
if (!isValid) {
|
||||||
|
this.logger.warn(
|
||||||
|
`2FA verification failed for user ${userId}: invalid token`,
|
||||||
|
);
|
||||||
throw new UnauthorizedException("Invalid 2FA token");
|
throw new UnauthorizedException("Invalid 2FA token");
|
||||||
}
|
}
|
||||||
|
|
||||||
const accessToken = await this.cryptoService.generateJwt({
|
const accessToken = await this.jwtService.generateJwt({
|
||||||
sub: user.uuid,
|
sub: user.uuid,
|
||||||
username: user.username,
|
username: user.username,
|
||||||
});
|
});
|
||||||
@@ -174,6 +186,7 @@ export class AuthService {
|
|||||||
ip,
|
ip,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
this.logger.log(`User ${userId} logged in successfully via 2FA`);
|
||||||
return {
|
return {
|
||||||
message: "User logged in successfully (2FA)",
|
message: "User logged in successfully (2FA)",
|
||||||
access_token: accessToken,
|
access_token: accessToken,
|
||||||
@@ -189,7 +202,7 @@ export class AuthService {
|
|||||||
throw new UnauthorizedException("User not found");
|
throw new UnauthorizedException("User not found");
|
||||||
}
|
}
|
||||||
|
|
||||||
const accessToken = await this.cryptoService.generateJwt({
|
const accessToken = await this.jwtService.generateJwt({
|
||||||
sub: user.uuid,
|
sub: user.uuid,
|
||||||
username: user.username,
|
username: user.username,
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,10 +1,21 @@
|
|||||||
import { IsEmail, IsNotEmpty, IsString, MinLength } from "class-validator";
|
import {
|
||||||
|
IsEmail,
|
||||||
|
IsNotEmpty,
|
||||||
|
IsString,
|
||||||
|
MaxLength,
|
||||||
|
MinLength,
|
||||||
|
} from "class-validator";
|
||||||
|
|
||||||
export class RegisterDto {
|
export class RegisterDto {
|
||||||
@IsString()
|
@IsString()
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
|
@MaxLength(32)
|
||||||
username!: string;
|
username!: string;
|
||||||
|
|
||||||
|
@IsString()
|
||||||
|
@MaxLength(32)
|
||||||
|
displayName?: string;
|
||||||
|
|
||||||
@IsEmail()
|
@IsEmail()
|
||||||
email!: string;
|
email!: string;
|
||||||
|
|
||||||
|
|||||||
@@ -6,13 +6,13 @@ import {
|
|||||||
} from "@nestjs/common";
|
} from "@nestjs/common";
|
||||||
import { ConfigService } from "@nestjs/config";
|
import { ConfigService } from "@nestjs/config";
|
||||||
import { getIronSession } from "iron-session";
|
import { getIronSession } from "iron-session";
|
||||||
import { CryptoService } from "../../crypto/crypto.service";
|
import { JwtService } from "../../crypto/services/jwt.service";
|
||||||
import { getSessionOptions, SessionData } from "../session.config";
|
import { getSessionOptions, SessionData } from "../session.config";
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class AuthGuard implements CanActivate {
|
export class AuthGuard implements CanActivate {
|
||||||
constructor(
|
constructor(
|
||||||
private readonly cryptoService: CryptoService,
|
private readonly jwtService: JwtService,
|
||||||
private readonly configService: ConfigService,
|
private readonly configService: ConfigService,
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
@@ -33,7 +33,7 @@ export class AuthGuard implements CanActivate {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const payload = await this.cryptoService.verifyJwt(token);
|
const payload = await this.jwtService.verifyJwt(token);
|
||||||
request.user = payload;
|
request.user = payload;
|
||||||
} catch {
|
} catch {
|
||||||
throw new UnauthorizedException();
|
throw new UnauthorizedException();
|
||||||
|
|||||||
39
backend/src/auth/guards/optional-auth.guard.ts
Normal file
39
backend/src/auth/guards/optional-auth.guard.ts
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import { CanActivate, ExecutionContext, Injectable } from "@nestjs/common";
|
||||||
|
import { ConfigService } from "@nestjs/config";
|
||||||
|
import { getIronSession } from "iron-session";
|
||||||
|
import { JwtService } from "../../crypto/services/jwt.service";
|
||||||
|
import { getSessionOptions, SessionData } from "../session.config";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class OptionalAuthGuard implements CanActivate {
|
||||||
|
constructor(
|
||||||
|
private readonly jwtService: JwtService,
|
||||||
|
private readonly configService: ConfigService,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||||
|
const request = context.switchToHttp().getRequest();
|
||||||
|
const response = context.switchToHttp().getResponse();
|
||||||
|
|
||||||
|
const session = await getIronSession<SessionData>(
|
||||||
|
request,
|
||||||
|
response,
|
||||||
|
getSessionOptions(this.configService.get("SESSION_PASSWORD") as string),
|
||||||
|
);
|
||||||
|
|
||||||
|
const token = session.accessToken;
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const payload = await this.jwtService.verifyJwt(token);
|
||||||
|
request.user = payload;
|
||||||
|
} catch {
|
||||||
|
// Ignore invalid tokens for optional auth
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,15 +1,14 @@
|
|||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
import { DatabaseService } from "../database/database.service";
|
|
||||||
import { RbacService } from "./rbac.service";
|
import { RbacService } from "./rbac.service";
|
||||||
|
import { RbacRepository } from "./repositories/rbac.repository";
|
||||||
|
|
||||||
describe("RbacService", () => {
|
describe("RbacService", () => {
|
||||||
let service: RbacService;
|
let service: RbacService;
|
||||||
|
let repository: RbacRepository;
|
||||||
|
|
||||||
const mockDb = {
|
const mockRbacRepository = {
|
||||||
select: jest.fn().mockReturnThis(),
|
findRolesByUserId: jest.fn(),
|
||||||
from: jest.fn().mockReturnThis(),
|
findPermissionsByUserId: jest.fn(),
|
||||||
innerJoin: jest.fn().mockReturnThis(),
|
|
||||||
where: jest.fn(),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
@@ -18,15 +17,14 @@ describe("RbacService", () => {
|
|||||||
providers: [
|
providers: [
|
||||||
RbacService,
|
RbacService,
|
||||||
{
|
{
|
||||||
provide: DatabaseService,
|
provide: RbacRepository,
|
||||||
useValue: {
|
useValue: mockRbacRepository,
|
||||||
db: mockDb,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
service = module.get<RbacService>(RbacService);
|
service = module.get<RbacService>(RbacService);
|
||||||
|
repository = module.get<RbacRepository>(RbacRepository);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should be defined", () => {
|
it("should be defined", () => {
|
||||||
@@ -36,34 +34,28 @@ describe("RbacService", () => {
|
|||||||
describe("getUserRoles", () => {
|
describe("getUserRoles", () => {
|
||||||
it("should return user roles", async () => {
|
it("should return user roles", async () => {
|
||||||
const userId = "user-id";
|
const userId = "user-id";
|
||||||
const mockRoles = [{ slug: "admin" }, { slug: "user" }];
|
const mockRoles = ["admin", "user"];
|
||||||
mockDb.where.mockResolvedValue(mockRoles);
|
mockRbacRepository.findRolesByUserId.mockResolvedValue(mockRoles);
|
||||||
|
|
||||||
const result = await service.getUserRoles(userId);
|
const result = await service.getUserRoles(userId);
|
||||||
|
|
||||||
expect(result).toEqual(["admin", "user"]);
|
expect(result).toEqual(mockRoles);
|
||||||
expect(mockDb.select).toHaveBeenCalled();
|
expect(repository.findRolesByUserId).toHaveBeenCalledWith(userId);
|
||||||
expect(mockDb.from).toHaveBeenCalled();
|
|
||||||
expect(mockDb.innerJoin).toHaveBeenCalled();
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("getUserPermissions", () => {
|
describe("getUserPermissions", () => {
|
||||||
it("should return unique user permissions", async () => {
|
it("should return user permissions", async () => {
|
||||||
const userId = "user-id";
|
const userId = "user-id";
|
||||||
const mockPermissions = [
|
const mockPermissions = ["read", "write"];
|
||||||
{ slug: "read" },
|
mockRbacRepository.findPermissionsByUserId.mockResolvedValue(
|
||||||
{ slug: "write" },
|
mockPermissions,
|
||||||
{ slug: "read" }, // Duplicate
|
);
|
||||||
];
|
|
||||||
mockDb.where.mockResolvedValue(mockPermissions);
|
|
||||||
|
|
||||||
const result = await service.getUserPermissions(userId);
|
const result = await service.getUserPermissions(userId);
|
||||||
|
|
||||||
expect(result).toEqual(["read", "write"]);
|
expect(result).toEqual(mockPermissions);
|
||||||
expect(mockDb.select).toHaveBeenCalled();
|
expect(repository.findPermissionsByUserId).toHaveBeenCalledWith(userId);
|
||||||
expect(mockDb.from).toHaveBeenCalled();
|
|
||||||
expect(mockDb.innerJoin).toHaveBeenCalledTimes(2);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,42 +1,15 @@
|
|||||||
import { Injectable } from "@nestjs/common";
|
import { Injectable } from "@nestjs/common";
|
||||||
import { eq } from "drizzle-orm";
|
import { RbacRepository } from "./repositories/rbac.repository";
|
||||||
import { DatabaseService } from "../database/database.service";
|
|
||||||
import {
|
|
||||||
permissions,
|
|
||||||
roles,
|
|
||||||
rolesToPermissions,
|
|
||||||
usersToRoles,
|
|
||||||
} from "../database/schemas";
|
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class RbacService {
|
export class RbacService {
|
||||||
constructor(private readonly databaseService: DatabaseService) {}
|
constructor(private readonly rbacRepository: RbacRepository) {}
|
||||||
|
|
||||||
async getUserRoles(userId: string) {
|
async getUserRoles(userId: string) {
|
||||||
const result = await this.databaseService.db
|
return this.rbacRepository.findRolesByUserId(userId);
|
||||||
.select({
|
|
||||||
slug: roles.slug,
|
|
||||||
})
|
|
||||||
.from(usersToRoles)
|
|
||||||
.innerJoin(roles, eq(usersToRoles.roleId, roles.id))
|
|
||||||
.where(eq(usersToRoles.userId, userId));
|
|
||||||
|
|
||||||
return result.map((r) => r.slug);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async getUserPermissions(userId: string) {
|
async getUserPermissions(userId: string) {
|
||||||
const result = await this.databaseService.db
|
return this.rbacRepository.findPermissionsByUserId(userId);
|
||||||
.select({
|
|
||||||
slug: permissions.slug,
|
|
||||||
})
|
|
||||||
.from(usersToRoles)
|
|
||||||
.innerJoin(
|
|
||||||
rolesToPermissions,
|
|
||||||
eq(usersToRoles.roleId, rolesToPermissions.roleId),
|
|
||||||
)
|
|
||||||
.innerJoin(permissions, eq(rolesToPermissions.permissionId, permissions.id))
|
|
||||||
.where(eq(usersToRoles.userId, userId));
|
|
||||||
|
|
||||||
return Array.from(new Set(result.map((p) => p.slug)));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
42
backend/src/auth/repositories/rbac.repository.ts
Normal file
42
backend/src/auth/repositories/rbac.repository.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import { Injectable } from "@nestjs/common";
|
||||||
|
import { eq } from "drizzle-orm";
|
||||||
|
import { DatabaseService } from "../../database/database.service";
|
||||||
|
import {
|
||||||
|
permissions,
|
||||||
|
roles,
|
||||||
|
rolesToPermissions,
|
||||||
|
usersToRoles,
|
||||||
|
} from "../../database/schemas";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class RbacRepository {
|
||||||
|
constructor(private readonly databaseService: DatabaseService) {}
|
||||||
|
|
||||||
|
async findRolesByUserId(userId: string) {
|
||||||
|
const result = await this.databaseService.db
|
||||||
|
.select({
|
||||||
|
slug: roles.slug,
|
||||||
|
})
|
||||||
|
.from(usersToRoles)
|
||||||
|
.innerJoin(roles, eq(usersToRoles.roleId, roles.id))
|
||||||
|
.where(eq(usersToRoles.userId, userId));
|
||||||
|
|
||||||
|
return result.map((r) => r.slug);
|
||||||
|
}
|
||||||
|
|
||||||
|
async findPermissionsByUserId(userId: string) {
|
||||||
|
const result = await this.databaseService.db
|
||||||
|
.select({
|
||||||
|
slug: permissions.slug,
|
||||||
|
})
|
||||||
|
.from(usersToRoles)
|
||||||
|
.innerJoin(
|
||||||
|
rolesToPermissions,
|
||||||
|
eq(usersToRoles.roleId, rolesToPermissions.roleId),
|
||||||
|
)
|
||||||
|
.innerJoin(permissions, eq(rolesToPermissions.permissionId, permissions.id))
|
||||||
|
.where(eq(usersToRoles.userId, userId));
|
||||||
|
|
||||||
|
return Array.from(new Set(result.map((p) => p.slug)));
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import { CacheInterceptor, CacheKey, CacheTTL } from "@nestjs/cache-manager";
|
||||||
import {
|
import {
|
||||||
Body,
|
Body,
|
||||||
Controller,
|
Controller,
|
||||||
@@ -9,7 +10,6 @@ import {
|
|||||||
UseGuards,
|
UseGuards,
|
||||||
UseInterceptors,
|
UseInterceptors,
|
||||||
} from "@nestjs/common";
|
} from "@nestjs/common";
|
||||||
import { CacheInterceptor, CacheKey, CacheTTL } from "@nestjs/cache-manager";
|
|
||||||
import { Roles } from "../auth/decorators/roles.decorator";
|
import { Roles } from "../auth/decorators/roles.decorator";
|
||||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
import { RolesGuard } from "../auth/guards/roles.guard";
|
import { RolesGuard } from "../auth/guards/roles.guard";
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
import { Module } from "@nestjs/common";
|
import { Module } from "@nestjs/common";
|
||||||
import { DatabaseModule } from "../database/database.module";
|
import { AuthModule } from "../auth/auth.module";
|
||||||
import { CategoriesController } from "./categories.controller";
|
import { CategoriesController } from "./categories.controller";
|
||||||
import { CategoriesService } from "./categories.service";
|
import { CategoriesService } from "./categories.service";
|
||||||
|
import { CategoriesRepository } from "./repositories/categories.repository";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [DatabaseModule],
|
imports: [AuthModule],
|
||||||
controllers: [CategoriesController],
|
controllers: [CategoriesController],
|
||||||
providers: [CategoriesService],
|
providers: [CategoriesService, CategoriesRepository],
|
||||||
exports: [CategoriesService],
|
exports: [CategoriesService, CategoriesRepository],
|
||||||
})
|
})
|
||||||
export class CategoriesModule {}
|
export class CategoriesModule {}
|
||||||
|
|||||||
@@ -1,25 +1,24 @@
|
|||||||
|
import { CACHE_MANAGER } from "@nestjs/cache-manager";
|
||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
import { DatabaseService } from "../database/database.service";
|
|
||||||
import { categories } from "../database/schemas";
|
|
||||||
import { CategoriesService } from "./categories.service";
|
import { CategoriesService } from "./categories.service";
|
||||||
import { CreateCategoryDto } from "./dto/create-category.dto";
|
import { CreateCategoryDto } from "./dto/create-category.dto";
|
||||||
import { UpdateCategoryDto } from "./dto/update-category.dto";
|
import { UpdateCategoryDto } from "./dto/update-category.dto";
|
||||||
|
import { CategoriesRepository } from "./repositories/categories.repository";
|
||||||
|
|
||||||
describe("CategoriesService", () => {
|
describe("CategoriesService", () => {
|
||||||
let service: CategoriesService;
|
let service: CategoriesService;
|
||||||
|
let repository: CategoriesRepository;
|
||||||
|
|
||||||
const mockDb = {
|
const mockCategoriesRepository = {
|
||||||
select: jest.fn().mockReturnThis(),
|
findAll: jest.fn(),
|
||||||
from: jest.fn().mockReturnThis(),
|
findOne: jest.fn(),
|
||||||
where: jest.fn().mockReturnThis(),
|
create: jest.fn(),
|
||||||
limit: jest.fn().mockResolvedValue([]),
|
update: jest.fn(),
|
||||||
orderBy: jest.fn().mockResolvedValue([]),
|
remove: jest.fn(),
|
||||||
insert: jest.fn().mockReturnThis(),
|
};
|
||||||
values: jest.fn().mockReturnThis(),
|
|
||||||
update: jest.fn().mockReturnThis(),
|
const mockCacheManager = {
|
||||||
set: jest.fn().mockReturnThis(),
|
del: jest.fn(),
|
||||||
delete: jest.fn().mockReturnThis(),
|
|
||||||
returning: jest.fn().mockResolvedValue([]),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
@@ -28,15 +27,15 @@ describe("CategoriesService", () => {
|
|||||||
providers: [
|
providers: [
|
||||||
CategoriesService,
|
CategoriesService,
|
||||||
{
|
{
|
||||||
provide: DatabaseService,
|
provide: CategoriesRepository,
|
||||||
useValue: {
|
useValue: mockCategoriesRepository,
|
||||||
db: mockDb,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
{ provide: CACHE_MANAGER, useValue: mockCacheManager },
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
service = module.get<CategoriesService>(CategoriesService);
|
service = module.get<CategoriesService>(CategoriesService);
|
||||||
|
repository = module.get<CategoriesRepository>(CategoriesRepository);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should be defined", () => {
|
it("should be defined", () => {
|
||||||
@@ -46,28 +45,28 @@ describe("CategoriesService", () => {
|
|||||||
describe("findAll", () => {
|
describe("findAll", () => {
|
||||||
it("should return all categories ordered by name", async () => {
|
it("should return all categories ordered by name", async () => {
|
||||||
const mockCategories = [{ name: "A" }, { name: "B" }];
|
const mockCategories = [{ name: "A" }, { name: "B" }];
|
||||||
mockDb.orderBy.mockResolvedValue(mockCategories);
|
mockCategoriesRepository.findAll.mockResolvedValue(mockCategories);
|
||||||
|
|
||||||
const result = await service.findAll();
|
const result = await service.findAll();
|
||||||
|
|
||||||
expect(result).toEqual(mockCategories);
|
expect(result).toEqual(mockCategories);
|
||||||
expect(mockDb.select).toHaveBeenCalled();
|
expect(repository.findAll).toHaveBeenCalled();
|
||||||
expect(mockDb.from).toHaveBeenCalledWith(categories);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("findOne", () => {
|
describe("findOne", () => {
|
||||||
it("should return a category by id", async () => {
|
it("should return a category by id", async () => {
|
||||||
const mockCategory = { id: "1", name: "Cat" };
|
const mockCategory = { id: "1", name: "Cat" };
|
||||||
mockDb.limit.mockResolvedValue([mockCategory]);
|
mockCategoriesRepository.findOne.mockResolvedValue(mockCategory);
|
||||||
|
|
||||||
const result = await service.findOne("1");
|
const result = await service.findOne("1");
|
||||||
|
|
||||||
expect(result).toEqual(mockCategory);
|
expect(result).toEqual(mockCategory);
|
||||||
|
expect(repository.findOne).toHaveBeenCalledWith("1");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should return null if category not found", async () => {
|
it("should return null if category not found", async () => {
|
||||||
mockDb.limit.mockResolvedValue([]);
|
mockCategoriesRepository.findOne.mockResolvedValue(null);
|
||||||
const result = await service.findOne("999");
|
const result = await service.findOne("999");
|
||||||
expect(result).toBeNull();
|
expect(result).toBeNull();
|
||||||
});
|
});
|
||||||
@@ -76,12 +75,13 @@ describe("CategoriesService", () => {
|
|||||||
describe("create", () => {
|
describe("create", () => {
|
||||||
it("should create a category and generate slug", async () => {
|
it("should create a category and generate slug", async () => {
|
||||||
const dto: CreateCategoryDto = { name: "Test Category" };
|
const dto: CreateCategoryDto = { name: "Test Category" };
|
||||||
mockDb.returning.mockResolvedValue([{ ...dto, slug: "test-category" }]);
|
mockCategoriesRepository.create.mockResolvedValue([
|
||||||
|
{ ...dto, slug: "test-category" },
|
||||||
|
]);
|
||||||
|
|
||||||
const result = await service.create(dto);
|
const result = await service.create(dto);
|
||||||
|
|
||||||
expect(mockDb.insert).toHaveBeenCalledWith(categories);
|
expect(repository.create).toHaveBeenCalledWith({
|
||||||
expect(mockDb.values).toHaveBeenCalledWith({
|
|
||||||
name: "Test Category",
|
name: "Test Category",
|
||||||
slug: "test-category",
|
slug: "test-category",
|
||||||
});
|
});
|
||||||
@@ -93,12 +93,14 @@ describe("CategoriesService", () => {
|
|||||||
it("should update a category and regenerate slug", async () => {
|
it("should update a category and regenerate slug", async () => {
|
||||||
const id = "1";
|
const id = "1";
|
||||||
const dto: UpdateCategoryDto = { name: "New Name" };
|
const dto: UpdateCategoryDto = { name: "New Name" };
|
||||||
mockDb.returning.mockResolvedValue([{ id, ...dto, slug: "new-name" }]);
|
mockCategoriesRepository.update.mockResolvedValue([
|
||||||
|
{ id, ...dto, slug: "new-name" },
|
||||||
|
]);
|
||||||
|
|
||||||
const result = await service.update(id, dto);
|
const result = await service.update(id, dto);
|
||||||
|
|
||||||
expect(mockDb.update).toHaveBeenCalledWith(categories);
|
expect(repository.update).toHaveBeenCalledWith(
|
||||||
expect(mockDb.set).toHaveBeenCalledWith(
|
id,
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
name: "New Name",
|
name: "New Name",
|
||||||
slug: "new-name",
|
slug: "new-name",
|
||||||
@@ -111,11 +113,11 @@ describe("CategoriesService", () => {
|
|||||||
describe("remove", () => {
|
describe("remove", () => {
|
||||||
it("should remove a category", async () => {
|
it("should remove a category", async () => {
|
||||||
const id = "1";
|
const id = "1";
|
||||||
mockDb.returning.mockResolvedValue([{ id }]);
|
mockCategoriesRepository.remove.mockResolvedValue([{ id }]);
|
||||||
|
|
||||||
const result = await service.remove(id);
|
const result = await service.remove(id);
|
||||||
|
|
||||||
expect(mockDb.delete).toHaveBeenCalledWith(categories);
|
expect(repository.remove).toHaveBeenCalledWith(id);
|
||||||
expect(result).toEqual([{ id }]);
|
expect(result).toEqual([{ id }]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,18 +1,16 @@
|
|||||||
import { Injectable, Logger, Inject } from "@nestjs/common";
|
|
||||||
import { CACHE_MANAGER } from "@nestjs/cache-manager";
|
import { CACHE_MANAGER } from "@nestjs/cache-manager";
|
||||||
import { Cache } from "cache-manager";
|
import { Inject, Injectable, Logger } from "@nestjs/common";
|
||||||
import { eq } from "drizzle-orm";
|
import type { Cache } from "cache-manager";
|
||||||
import { DatabaseService } from "../database/database.service";
|
|
||||||
import { categories } from "../database/schemas";
|
|
||||||
import { CreateCategoryDto } from "./dto/create-category.dto";
|
import { CreateCategoryDto } from "./dto/create-category.dto";
|
||||||
import { UpdateCategoryDto } from "./dto/update-category.dto";
|
import { UpdateCategoryDto } from "./dto/update-category.dto";
|
||||||
|
import { CategoriesRepository } from "./repositories/categories.repository";
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class CategoriesService {
|
export class CategoriesService {
|
||||||
private readonly logger = new Logger(CategoriesService.name);
|
private readonly logger = new Logger(CategoriesService.name);
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private readonly databaseService: DatabaseService,
|
private readonly categoriesRepository: CategoriesRepository,
|
||||||
@Inject(CACHE_MANAGER) private cacheManager: Cache,
|
@Inject(CACHE_MANAGER) private cacheManager: Cache,
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
@@ -22,20 +20,11 @@ export class CategoriesService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async findAll() {
|
async findAll() {
|
||||||
return await this.databaseService.db
|
return await this.categoriesRepository.findAll();
|
||||||
.select()
|
|
||||||
.from(categories)
|
|
||||||
.orderBy(categories.name);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async findOne(id: string) {
|
async findOne(id: string) {
|
||||||
const result = await this.databaseService.db
|
return await this.categoriesRepository.findOne(id);
|
||||||
.select()
|
|
||||||
.from(categories)
|
|
||||||
.where(eq(categories.id, id))
|
|
||||||
.limit(1);
|
|
||||||
|
|
||||||
return result[0] || null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async create(data: CreateCategoryDto) {
|
async create(data: CreateCategoryDto) {
|
||||||
@@ -44,10 +33,7 @@ export class CategoriesService {
|
|||||||
.toLowerCase()
|
.toLowerCase()
|
||||||
.replace(/ /g, "-")
|
.replace(/ /g, "-")
|
||||||
.replace(/[^\w-]/g, "");
|
.replace(/[^\w-]/g, "");
|
||||||
const result = await this.databaseService.db
|
const result = await this.categoriesRepository.create({ ...data, slug });
|
||||||
.insert(categories)
|
|
||||||
.values({ ...data, slug })
|
|
||||||
.returning();
|
|
||||||
|
|
||||||
await this.clearCategoriesCache();
|
await this.clearCategoriesCache();
|
||||||
return result;
|
return result;
|
||||||
@@ -65,11 +51,7 @@ export class CategoriesService {
|
|||||||
.replace(/[^\w-]/g, "")
|
.replace(/[^\w-]/g, "")
|
||||||
: undefined,
|
: undefined,
|
||||||
};
|
};
|
||||||
const result = await this.databaseService.db
|
const result = await this.categoriesRepository.update(id, updateData);
|
||||||
.update(categories)
|
|
||||||
.set(updateData)
|
|
||||||
.where(eq(categories.id, id))
|
|
||||||
.returning();
|
|
||||||
|
|
||||||
await this.clearCategoriesCache();
|
await this.clearCategoriesCache();
|
||||||
return result;
|
return result;
|
||||||
@@ -77,10 +59,7 @@ export class CategoriesService {
|
|||||||
|
|
||||||
async remove(id: string) {
|
async remove(id: string) {
|
||||||
this.logger.log(`Removing category: ${id}`);
|
this.logger.log(`Removing category: ${id}`);
|
||||||
const result = await this.databaseService.db
|
const result = await this.categoriesRepository.remove(id);
|
||||||
.delete(categories)
|
|
||||||
.where(eq(categories.id, id))
|
|
||||||
.returning();
|
|
||||||
|
|
||||||
await this.clearCategoriesCache();
|
await this.clearCategoriesCache();
|
||||||
return result;
|
return result;
|
||||||
|
|||||||
@@ -1,15 +1,18 @@
|
|||||||
import { IsNotEmpty, IsOptional, IsString } from "class-validator";
|
import { IsNotEmpty, IsOptional, IsString, MaxLength } from "class-validator";
|
||||||
|
|
||||||
export class CreateCategoryDto {
|
export class CreateCategoryDto {
|
||||||
@IsString()
|
@IsString()
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
|
@MaxLength(64)
|
||||||
name!: string;
|
name!: string;
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsString()
|
@IsString()
|
||||||
|
@MaxLength(255)
|
||||||
description?: string;
|
description?: string;
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsString()
|
@IsString()
|
||||||
|
@MaxLength(512)
|
||||||
iconUrl?: string;
|
iconUrl?: string;
|
||||||
}
|
}
|
||||||
|
|||||||
60
backend/src/categories/repositories/categories.repository.ts
Normal file
60
backend/src/categories/repositories/categories.repository.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import { Injectable } from "@nestjs/common";
|
||||||
|
import { eq, sql } from "drizzle-orm";
|
||||||
|
import { DatabaseService } from "../../database/database.service";
|
||||||
|
import { categories } from "../../database/schemas";
|
||||||
|
import type { CreateCategoryDto } from "../dto/create-category.dto";
|
||||||
|
import type { UpdateCategoryDto } from "../dto/update-category.dto";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class CategoriesRepository {
|
||||||
|
constructor(private readonly databaseService: DatabaseService) {}
|
||||||
|
|
||||||
|
async findAll() {
|
||||||
|
return await this.databaseService.db
|
||||||
|
.select()
|
||||||
|
.from(categories)
|
||||||
|
.orderBy(categories.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
async countAll() {
|
||||||
|
const result = await this.databaseService.db
|
||||||
|
.select({ count: sql<number>`count(*)` })
|
||||||
|
.from(categories);
|
||||||
|
return Number(result[0].count);
|
||||||
|
}
|
||||||
|
|
||||||
|
async findOne(id: string) {
|
||||||
|
const result = await this.databaseService.db
|
||||||
|
.select()
|
||||||
|
.from(categories)
|
||||||
|
.where(eq(categories.id, id))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
return result[0] || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(data: CreateCategoryDto & { slug: string }) {
|
||||||
|
return await this.databaseService.db
|
||||||
|
.insert(categories)
|
||||||
|
.values(data)
|
||||||
|
.returning();
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(
|
||||||
|
id: string,
|
||||||
|
data: UpdateCategoryDto & { slug?: string; updatedAt: Date },
|
||||||
|
) {
|
||||||
|
return await this.databaseService.db
|
||||||
|
.update(categories)
|
||||||
|
.set(data)
|
||||||
|
.where(eq(categories.id, id))
|
||||||
|
.returning();
|
||||||
|
}
|
||||||
|
|
||||||
|
async remove(id: string) {
|
||||||
|
return await this.databaseService.db
|
||||||
|
.delete(categories)
|
||||||
|
.where(eq(categories.id, id))
|
||||||
|
.returning();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,10 +1,20 @@
|
|||||||
import { Global, Module } from "@nestjs/common";
|
import { forwardRef, Global, Module } from "@nestjs/common";
|
||||||
|
import { ContentsModule } from "../contents/contents.module";
|
||||||
import { DatabaseModule } from "../database/database.module";
|
import { DatabaseModule } from "../database/database.module";
|
||||||
|
import { ReportsModule } from "../reports/reports.module";
|
||||||
|
import { SessionsModule } from "../sessions/sessions.module";
|
||||||
|
import { UsersModule } from "../users/users.module";
|
||||||
import { PurgeService } from "./services/purge.service";
|
import { PurgeService } from "./services/purge.service";
|
||||||
|
|
||||||
@Global()
|
@Global()
|
||||||
@Module({
|
@Module({
|
||||||
imports: [DatabaseModule],
|
imports: [
|
||||||
|
DatabaseModule,
|
||||||
|
forwardRef(() => SessionsModule),
|
||||||
|
forwardRef(() => ReportsModule),
|
||||||
|
forwardRef(() => UsersModule),
|
||||||
|
forwardRef(() => ContentsModule),
|
||||||
|
],
|
||||||
providers: [PurgeService],
|
providers: [PurgeService],
|
||||||
exports: [PurgeService],
|
exports: [PurgeService],
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -9,6 +9,14 @@ import {
|
|||||||
import * as Sentry from "@sentry/nestjs";
|
import * as Sentry from "@sentry/nestjs";
|
||||||
import { Request, Response } from "express";
|
import { Request, Response } from "express";
|
||||||
|
|
||||||
|
interface RequestWithUser extends Request {
|
||||||
|
user?: {
|
||||||
|
sub?: string;
|
||||||
|
username?: string;
|
||||||
|
id?: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@Catch()
|
@Catch()
|
||||||
export class AllExceptionsFilter implements ExceptionFilter {
|
export class AllExceptionsFilter implements ExceptionFilter {
|
||||||
private readonly logger = new Logger("ExceptionFilter");
|
private readonly logger = new Logger("ExceptionFilter");
|
||||||
@@ -16,7 +24,7 @@ export class AllExceptionsFilter implements ExceptionFilter {
|
|||||||
catch(exception: unknown, host: ArgumentsHost) {
|
catch(exception: unknown, host: ArgumentsHost) {
|
||||||
const ctx = host.switchToHttp();
|
const ctx = host.switchToHttp();
|
||||||
const response = ctx.getResponse<Response>();
|
const response = ctx.getResponse<Response>();
|
||||||
const request = ctx.getRequest<Request>();
|
const request = ctx.getRequest<RequestWithUser>();
|
||||||
|
|
||||||
const status =
|
const status =
|
||||||
exception instanceof HttpException
|
exception instanceof HttpException
|
||||||
@@ -28,6 +36,9 @@ export class AllExceptionsFilter implements ExceptionFilter {
|
|||||||
? exception.getResponse()
|
? exception.getResponse()
|
||||||
: "Internal server error";
|
: "Internal server error";
|
||||||
|
|
||||||
|
const userId = request.user?.sub || request.user?.id;
|
||||||
|
const userPart = userId ? `[User: ${userId}] ` : "";
|
||||||
|
|
||||||
const errorResponse = {
|
const errorResponse = {
|
||||||
statusCode: status,
|
statusCode: status,
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
@@ -42,12 +53,12 @@ export class AllExceptionsFilter implements ExceptionFilter {
|
|||||||
if (status === HttpStatus.INTERNAL_SERVER_ERROR) {
|
if (status === HttpStatus.INTERNAL_SERVER_ERROR) {
|
||||||
Sentry.captureException(exception);
|
Sentry.captureException(exception);
|
||||||
this.logger.error(
|
this.logger.error(
|
||||||
`${request.method} ${request.url} - Error: ${exception instanceof Error ? exception.message : "Unknown error"}`,
|
`${userPart}${request.method} ${request.url} - Error: ${exception instanceof Error ? exception.message : "Unknown error"}`,
|
||||||
exception instanceof Error ? exception.stack : "",
|
exception instanceof Error ? exception.stack : "",
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
this.logger.warn(
|
this.logger.warn(
|
||||||
`${request.method} ${request.url} - Status: ${status} - Message: ${JSON.stringify(message)}`,
|
`${userPart}${request.method} ${request.url} - Status: ${status} - Message: ${JSON.stringify(message)}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
4
backend/src/common/interfaces/mail.interface.ts
Normal file
4
backend/src/common/interfaces/mail.interface.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
export interface IMailService {
|
||||||
|
sendEmailValidation(email: string, token: string): Promise<void>;
|
||||||
|
sendPasswordReset(email: string, token: string): Promise<void>;
|
||||||
|
}
|
||||||
26
backend/src/common/interfaces/media.interface.ts
Normal file
26
backend/src/common/interfaces/media.interface.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
export interface MediaProcessingResult {
|
||||||
|
buffer: Buffer;
|
||||||
|
mimeType: string;
|
||||||
|
extension: string;
|
||||||
|
width?: number;
|
||||||
|
height?: number;
|
||||||
|
size: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ScanResult {
|
||||||
|
isInfected: boolean;
|
||||||
|
virusName?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IMediaService {
|
||||||
|
scanFile(buffer: Buffer, filename: string): Promise<ScanResult>;
|
||||||
|
processImage(
|
||||||
|
buffer: Buffer,
|
||||||
|
format?: "webp" | "avif",
|
||||||
|
resize?: { width?: number; height?: number },
|
||||||
|
): Promise<MediaProcessingResult>;
|
||||||
|
processVideo(
|
||||||
|
buffer: Buffer,
|
||||||
|
format?: "webm" | "av1",
|
||||||
|
): Promise<MediaProcessingResult>;
|
||||||
|
}
|
||||||
38
backend/src/common/interfaces/storage.interface.ts
Normal file
38
backend/src/common/interfaces/storage.interface.ts
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
import type { Readable } from "node:stream";
|
||||||
|
|
||||||
|
export interface IStorageService {
|
||||||
|
uploadFile(
|
||||||
|
fileName: string,
|
||||||
|
file: Buffer,
|
||||||
|
mimeType: string,
|
||||||
|
metaData?: Record<string, string>,
|
||||||
|
bucketName?: string,
|
||||||
|
): Promise<string>;
|
||||||
|
|
||||||
|
getFile(fileName: string, bucketName?: string): Promise<Readable>;
|
||||||
|
|
||||||
|
getFileUrl(
|
||||||
|
fileName: string,
|
||||||
|
expiry?: number,
|
||||||
|
bucketName?: string,
|
||||||
|
): Promise<string>;
|
||||||
|
|
||||||
|
getUploadUrl(
|
||||||
|
fileName: string,
|
||||||
|
expiry?: number,
|
||||||
|
bucketName?: string,
|
||||||
|
): Promise<string>;
|
||||||
|
|
||||||
|
deleteFile(fileName: string, bucketName?: string): Promise<void>;
|
||||||
|
|
||||||
|
getFileInfo(fileName: string, bucketName?: string): Promise<unknown>;
|
||||||
|
|
||||||
|
moveFile(
|
||||||
|
sourceFileName: string,
|
||||||
|
destinationFileName: string,
|
||||||
|
sourceBucketName?: string,
|
||||||
|
destinationBucketName?: string,
|
||||||
|
): Promise<string>;
|
||||||
|
|
||||||
|
getPublicUrl(storageKey: string): string;
|
||||||
|
}
|
||||||
@@ -0,0 +1,67 @@
|
|||||||
|
import { Injectable, Logger, NestMiddleware } from "@nestjs/common";
|
||||||
|
import type { NextFunction, Request, Response } from "express";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class CrawlerDetectionMiddleware implements NestMiddleware {
|
||||||
|
private readonly logger = new Logger("CrawlerDetection");
|
||||||
|
|
||||||
|
private readonly SUSPICIOUS_PATTERNS = [
|
||||||
|
/\.env/,
|
||||||
|
/wp-admin/,
|
||||||
|
/wp-login/,
|
||||||
|
/\.git/,
|
||||||
|
/\.php$/,
|
||||||
|
/xmlrpc/,
|
||||||
|
/config/,
|
||||||
|
/setup/,
|
||||||
|
/wp-config/,
|
||||||
|
/_next/,
|
||||||
|
/install/,
|
||||||
|
/admin/,
|
||||||
|
/phpmyadmin/,
|
||||||
|
/sql/,
|
||||||
|
/backup/,
|
||||||
|
/db\./,
|
||||||
|
/backup\./,
|
||||||
|
/cgi-bin/,
|
||||||
|
/\.well-known\/security\.txt/, // Bien que légitime, souvent scanné
|
||||||
|
];
|
||||||
|
|
||||||
|
private readonly BOT_USER_AGENTS = [
|
||||||
|
/bot/i,
|
||||||
|
/crawler/i,
|
||||||
|
/spider/i,
|
||||||
|
/python/i,
|
||||||
|
/curl/i,
|
||||||
|
/wget/i,
|
||||||
|
/nmap/i,
|
||||||
|
/nikto/i,
|
||||||
|
/zgrab/i,
|
||||||
|
/masscan/i,
|
||||||
|
];
|
||||||
|
|
||||||
|
use(req: Request, res: Response, next: NextFunction) {
|
||||||
|
const { method, url, ip } = req;
|
||||||
|
const userAgent = req.get("user-agent") || "unknown";
|
||||||
|
|
||||||
|
res.on("finish", () => {
|
||||||
|
if (res.statusCode === 404) {
|
||||||
|
const isSuspiciousPath = this.SUSPICIOUS_PATTERNS.some((pattern) =>
|
||||||
|
pattern.test(url),
|
||||||
|
);
|
||||||
|
const isBotUserAgent = this.BOT_USER_AGENTS.some((pattern) =>
|
||||||
|
pattern.test(userAgent),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (isSuspiciousPath || isBotUserAgent) {
|
||||||
|
this.logger.warn(
|
||||||
|
`Potential crawler detected: [${ip}] ${method} ${url} - User-Agent: ${userAgent}`,
|
||||||
|
);
|
||||||
|
// Ici, on pourrait ajouter une logique pour bannir l'IP temporairement via Redis
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
}
|
||||||
37
backend/src/common/middlewares/http-logger.middleware.ts
Normal file
37
backend/src/common/middlewares/http-logger.middleware.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import { createHash } from "node:crypto";
|
||||||
|
import { Injectable, Logger, NestMiddleware } from "@nestjs/common";
|
||||||
|
import { NextFunction, Request, Response } from "express";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class HTTPLoggerMiddleware implements NestMiddleware {
|
||||||
|
private readonly logger = new Logger("HTTP");
|
||||||
|
|
||||||
|
use(request: Request, response: Response, next: NextFunction): void {
|
||||||
|
const { method, originalUrl, ip } = request;
|
||||||
|
const userAgent = request.get("user-agent") || "";
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
response.on("finish", () => {
|
||||||
|
const { statusCode } = response;
|
||||||
|
const contentLength = response.get("content-length");
|
||||||
|
const duration = Date.now() - startTime;
|
||||||
|
|
||||||
|
const hashedIp = createHash("sha256")
|
||||||
|
.update(ip as string)
|
||||||
|
.digest("hex");
|
||||||
|
const message = `${method} ${originalUrl} ${statusCode} ${contentLength || 0} - ${userAgent} ${hashedIp} +${duration}ms`;
|
||||||
|
|
||||||
|
if (statusCode >= 500) {
|
||||||
|
return this.logger.error(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (statusCode >= 400) {
|
||||||
|
return this.logger.warn(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.logger.log(message);
|
||||||
|
});
|
||||||
|
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,15 +1,23 @@
|
|||||||
import { Logger } from "@nestjs/common";
|
import { Logger } from "@nestjs/common";
|
||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
import { DatabaseService } from "../../database/database.service";
|
import { ContentsRepository } from "../../contents/repositories/contents.repository";
|
||||||
|
import { ReportsRepository } from "../../reports/repositories/reports.repository";
|
||||||
|
import { SessionsRepository } from "../../sessions/repositories/sessions.repository";
|
||||||
|
import { UsersRepository } from "../../users/repositories/users.repository";
|
||||||
import { PurgeService } from "./purge.service";
|
import { PurgeService } from "./purge.service";
|
||||||
|
|
||||||
describe("PurgeService", () => {
|
describe("PurgeService", () => {
|
||||||
let service: PurgeService;
|
let service: PurgeService;
|
||||||
|
|
||||||
const mockDb = {
|
const mockSessionsRepository = {
|
||||||
delete: jest.fn(),
|
purgeExpired: jest.fn().mockResolvedValue([]),
|
||||||
where: jest.fn(),
|
};
|
||||||
returning: jest.fn(),
|
const mockReportsRepository = {
|
||||||
|
purgeObsolete: jest.fn().mockResolvedValue([]),
|
||||||
|
};
|
||||||
|
const mockUsersRepository = { purgeDeleted: jest.fn().mockResolvedValue([]) };
|
||||||
|
const mockContentsRepository = {
|
||||||
|
purgeSoftDeleted: jest.fn().mockResolvedValue([]),
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
@@ -17,22 +25,13 @@ describe("PurgeService", () => {
|
|||||||
jest.spyOn(Logger.prototype, "error").mockImplementation(() => {});
|
jest.spyOn(Logger.prototype, "error").mockImplementation(() => {});
|
||||||
jest.spyOn(Logger.prototype, "log").mockImplementation(() => {});
|
jest.spyOn(Logger.prototype, "log").mockImplementation(() => {});
|
||||||
|
|
||||||
const chain = {
|
|
||||||
delete: jest.fn().mockReturnThis(),
|
|
||||||
where: jest.fn().mockReturnThis(),
|
|
||||||
returning: jest.fn().mockResolvedValue([]),
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockImplementation = () => Object.assign(Promise.resolve([]), chain);
|
|
||||||
for (const mock of Object.values(chain)) {
|
|
||||||
mock.mockImplementation(mockImplementation);
|
|
||||||
}
|
|
||||||
Object.assign(mockDb, chain);
|
|
||||||
|
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
providers: [
|
providers: [
|
||||||
PurgeService,
|
PurgeService,
|
||||||
{ provide: DatabaseService, useValue: { db: mockDb } },
|
{ provide: SessionsRepository, useValue: mockSessionsRepository },
|
||||||
|
{ provide: ReportsRepository, useValue: mockReportsRepository },
|
||||||
|
{ provide: UsersRepository, useValue: mockUsersRepository },
|
||||||
|
{ provide: ContentsRepository, useValue: mockContentsRepository },
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
@@ -44,23 +43,22 @@ describe("PurgeService", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe("purgeExpiredData", () => {
|
describe("purgeExpiredData", () => {
|
||||||
it("should purge data", async () => {
|
it("should purge data using repositories", async () => {
|
||||||
mockDb.returning
|
mockSessionsRepository.purgeExpired.mockResolvedValue([{ id: "s1" }]);
|
||||||
.mockResolvedValueOnce([{ id: "s1" }]) // sessions
|
mockReportsRepository.purgeObsolete.mockResolvedValue([{ id: "r1" }]);
|
||||||
.mockResolvedValueOnce([{ id: "r1" }]) // reports
|
mockUsersRepository.purgeDeleted.mockResolvedValue([{ id: "u1" }]);
|
||||||
.mockResolvedValueOnce([{ id: "u1" }]) // users
|
mockContentsRepository.purgeSoftDeleted.mockResolvedValue([{ id: "c1" }]);
|
||||||
.mockResolvedValueOnce([{ id: "c1" }]); // contents
|
|
||||||
|
|
||||||
await service.purgeExpiredData();
|
await service.purgeExpiredData();
|
||||||
|
|
||||||
expect(mockDb.delete).toHaveBeenCalledTimes(4);
|
expect(mockSessionsRepository.purgeExpired).toHaveBeenCalled();
|
||||||
expect(mockDb.returning).toHaveBeenCalledTimes(4);
|
expect(mockReportsRepository.purgeObsolete).toHaveBeenCalled();
|
||||||
|
expect(mockUsersRepository.purgeDeleted).toHaveBeenCalled();
|
||||||
|
expect(mockContentsRepository.purgeSoftDeleted).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should handle errors", async () => {
|
it("should handle errors", async () => {
|
||||||
mockDb.delete.mockImplementation(() => {
|
mockSessionsRepository.purgeExpired.mockRejectedValue(new Error("Db error"));
|
||||||
throw new Error("Db error");
|
|
||||||
});
|
|
||||||
await expect(service.purgeExpiredData()).resolves.not.toThrow();
|
await expect(service.purgeExpiredData()).resolves.not.toThrow();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,14 +1,20 @@
|
|||||||
import { Injectable, Logger } from "@nestjs/common";
|
import { Injectable, Logger } from "@nestjs/common";
|
||||||
import { Cron, CronExpression } from "@nestjs/schedule";
|
import { Cron, CronExpression } from "@nestjs/schedule";
|
||||||
import { and, eq, isNotNull, lte } from "drizzle-orm";
|
import { ContentsRepository } from "../../contents/repositories/contents.repository";
|
||||||
import { DatabaseService } from "../../database/database.service";
|
import { ReportsRepository } from "../../reports/repositories/reports.repository";
|
||||||
import { contents, reports, sessions, users } from "../../database/schemas";
|
import { SessionsRepository } from "../../sessions/repositories/sessions.repository";
|
||||||
|
import { UsersRepository } from "../../users/repositories/users.repository";
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class PurgeService {
|
export class PurgeService {
|
||||||
private readonly logger = new Logger(PurgeService.name);
|
private readonly logger = new Logger(PurgeService.name);
|
||||||
|
|
||||||
constructor(private readonly databaseService: DatabaseService) {}
|
constructor(
|
||||||
|
private readonly sessionsRepository: SessionsRepository,
|
||||||
|
private readonly reportsRepository: ReportsRepository,
|
||||||
|
private readonly usersRepository: UsersRepository,
|
||||||
|
private readonly contentsRepository: ContentsRepository,
|
||||||
|
) {}
|
||||||
|
|
||||||
// Toutes les nuits à minuit
|
// Toutes les nuits à minuit
|
||||||
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT)
|
@Cron(CronExpression.EVERY_DAY_AT_MIDNIGHT)
|
||||||
@@ -19,40 +25,25 @@ export class PurgeService {
|
|||||||
const now = new Date();
|
const now = new Date();
|
||||||
|
|
||||||
// 1. Purge des sessions expirées
|
// 1. Purge des sessions expirées
|
||||||
const deletedSessions = await this.databaseService.db
|
const deletedSessions = await this.sessionsRepository.purgeExpired(now);
|
||||||
.delete(sessions)
|
|
||||||
.where(lte(sessions.expiresAt, now))
|
|
||||||
.returning();
|
|
||||||
this.logger.log(`Purged ${deletedSessions.length} expired sessions.`);
|
this.logger.log(`Purged ${deletedSessions.length} expired sessions.`);
|
||||||
|
|
||||||
// 2. Purge des signalements obsolètes
|
// 2. Purge des signalements obsolètes
|
||||||
const deletedReports = await this.databaseService.db
|
const deletedReports = await this.reportsRepository.purgeObsolete(now);
|
||||||
.delete(reports)
|
|
||||||
.where(lte(reports.expiresAt, now))
|
|
||||||
.returning();
|
|
||||||
this.logger.log(`Purged ${deletedReports.length} obsolete reports.`);
|
this.logger.log(`Purged ${deletedReports.length} obsolete reports.`);
|
||||||
|
|
||||||
// 3. Purge des utilisateurs supprimés (Soft Delete > 30 jours)
|
// 3. Purge des utilisateurs supprimés (Soft Delete > 30 jours)
|
||||||
const thirtyDaysAgo = new Date();
|
const thirtyDaysAgo = new Date();
|
||||||
thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
|
thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
|
||||||
|
|
||||||
const deletedUsers = await this.databaseService.db
|
const deletedUsers = await this.usersRepository.purgeDeleted(thirtyDaysAgo);
|
||||||
.delete(users)
|
|
||||||
.where(
|
|
||||||
and(eq(users.status, "deleted"), lte(users.deletedAt, thirtyDaysAgo)),
|
|
||||||
)
|
|
||||||
.returning();
|
|
||||||
this.logger.log(
|
this.logger.log(
|
||||||
`Purged ${deletedUsers.length} users marked for deletion more than 30 days ago.`,
|
`Purged ${deletedUsers.length} users marked for deletion more than 30 days ago.`,
|
||||||
);
|
);
|
||||||
|
|
||||||
// 4. Purge des contenus supprimés (Soft Delete > 30 jours)
|
// 4. Purge des contenus supprimés (Soft Delete > 30 jours)
|
||||||
const deletedContents = await this.databaseService.db
|
const deletedContents =
|
||||||
.delete(contents)
|
await this.contentsRepository.purgeSoftDeleted(thirtyDaysAgo);
|
||||||
.where(
|
|
||||||
and(isNotNull(contents.deletedAt), lte(contents.deletedAt, thirtyDaysAgo)),
|
|
||||||
)
|
|
||||||
.returning();
|
|
||||||
this.logger.log(
|
this.logger.log(
|
||||||
`Purged ${deletedContents.length} contents marked for deletion more than 30 days ago.`,
|
`Purged ${deletedContents.length} contents marked for deletion more than 30 days ago.`,
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -33,6 +33,7 @@ export const envSchema = z.object({
|
|||||||
MAIL_FROM: z.string().email(),
|
MAIL_FROM: z.string().email(),
|
||||||
|
|
||||||
DOMAIN_NAME: z.string(),
|
DOMAIN_NAME: z.string(),
|
||||||
|
API_URL: z.string().url().optional(),
|
||||||
|
|
||||||
// Sentry
|
// Sentry
|
||||||
SENTRY_DSN: z.string().optional(),
|
SENTRY_DSN: z.string().optional(),
|
||||||
|
|||||||
@@ -19,8 +19,11 @@ import {
|
|||||||
UseInterceptors,
|
UseInterceptors,
|
||||||
} from "@nestjs/common";
|
} from "@nestjs/common";
|
||||||
import { FileInterceptor } from "@nestjs/platform-express";
|
import { FileInterceptor } from "@nestjs/platform-express";
|
||||||
import type { Request, Response } from "express";
|
import type { Response } from "express";
|
||||||
|
import { Roles } from "../auth/decorators/roles.decorator";
|
||||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { OptionalAuthGuard } from "../auth/guards/optional-auth.guard";
|
||||||
|
import { RolesGuard } from "../auth/guards/roles.guard";
|
||||||
import type { AuthenticatedRequest } from "../common/interfaces/request.interface";
|
import type { AuthenticatedRequest } from "../common/interfaces/request.interface";
|
||||||
import { ContentsService } from "./contents.service";
|
import { ContentsService } from "./contents.service";
|
||||||
import { CreateContentDto } from "./dto/create-content.dto";
|
import { CreateContentDto } from "./dto/create-content.dto";
|
||||||
@@ -65,10 +68,12 @@ export class ContentsController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Get("explore")
|
@Get("explore")
|
||||||
|
@UseGuards(OptionalAuthGuard)
|
||||||
@UseInterceptors(CacheInterceptor)
|
@UseInterceptors(CacheInterceptor)
|
||||||
@CacheTTL(60)
|
@CacheTTL(60)
|
||||||
@Header("Cache-Control", "public, max-age=60")
|
@Header("Cache-Control", "public, max-age=60")
|
||||||
explore(
|
explore(
|
||||||
|
@Req() req: AuthenticatedRequest,
|
||||||
@Query("limit", new DefaultValuePipe(10), ParseIntPipe) limit: number,
|
@Query("limit", new DefaultValuePipe(10), ParseIntPipe) limit: number,
|
||||||
@Query("offset", new DefaultValuePipe(0), ParseIntPipe) offset: number,
|
@Query("offset", new DefaultValuePipe(0), ParseIntPipe) offset: number,
|
||||||
@Query("sort") sort?: "trend" | "recent",
|
@Query("sort") sort?: "trend" | "recent",
|
||||||
@@ -78,7 +83,7 @@ export class ContentsController {
|
|||||||
@Query("query") query?: string,
|
@Query("query") query?: string,
|
||||||
@Query("favoritesOnly", new DefaultValuePipe(false), ParseBoolPipe)
|
@Query("favoritesOnly", new DefaultValuePipe(false), ParseBoolPipe)
|
||||||
favoritesOnly?: boolean,
|
favoritesOnly?: boolean,
|
||||||
@Query("userId") userId?: string,
|
@Query("userId") userIdQuery?: string,
|
||||||
) {
|
) {
|
||||||
return this.contentsService.findAll({
|
return this.contentsService.findAll({
|
||||||
limit,
|
limit,
|
||||||
@@ -89,42 +94,57 @@ export class ContentsController {
|
|||||||
author,
|
author,
|
||||||
query,
|
query,
|
||||||
favoritesOnly,
|
favoritesOnly,
|
||||||
userId,
|
userId: userIdQuery || req.user?.sub,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Get("trends")
|
@Get("trends")
|
||||||
|
@UseGuards(OptionalAuthGuard)
|
||||||
@UseInterceptors(CacheInterceptor)
|
@UseInterceptors(CacheInterceptor)
|
||||||
@CacheTTL(300)
|
@CacheTTL(300)
|
||||||
@Header("Cache-Control", "public, max-age=300")
|
@Header("Cache-Control", "public, max-age=300")
|
||||||
trends(
|
trends(
|
||||||
|
@Req() req: AuthenticatedRequest,
|
||||||
@Query("limit", new DefaultValuePipe(10), ParseIntPipe) limit: number,
|
@Query("limit", new DefaultValuePipe(10), ParseIntPipe) limit: number,
|
||||||
@Query("offset", new DefaultValuePipe(0), ParseIntPipe) offset: number,
|
@Query("offset", new DefaultValuePipe(0), ParseIntPipe) offset: number,
|
||||||
) {
|
) {
|
||||||
return this.contentsService.findAll({ limit, offset, sortBy: "trend" });
|
return this.contentsService.findAll({
|
||||||
|
limit,
|
||||||
|
offset,
|
||||||
|
sortBy: "trend",
|
||||||
|
userId: req.user?.sub,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Get("recent")
|
@Get("recent")
|
||||||
|
@UseGuards(OptionalAuthGuard)
|
||||||
@UseInterceptors(CacheInterceptor)
|
@UseInterceptors(CacheInterceptor)
|
||||||
@CacheTTL(60)
|
@CacheTTL(60)
|
||||||
@Header("Cache-Control", "public, max-age=60")
|
@Header("Cache-Control", "public, max-age=60")
|
||||||
recent(
|
recent(
|
||||||
|
@Req() req: AuthenticatedRequest,
|
||||||
@Query("limit", new DefaultValuePipe(10), ParseIntPipe) limit: number,
|
@Query("limit", new DefaultValuePipe(10), ParseIntPipe) limit: number,
|
||||||
@Query("offset", new DefaultValuePipe(0), ParseIntPipe) offset: number,
|
@Query("offset", new DefaultValuePipe(0), ParseIntPipe) offset: number,
|
||||||
) {
|
) {
|
||||||
return this.contentsService.findAll({ limit, offset, sortBy: "recent" });
|
return this.contentsService.findAll({
|
||||||
|
limit,
|
||||||
|
offset,
|
||||||
|
sortBy: "recent",
|
||||||
|
userId: req.user?.sub,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Get(":idOrSlug")
|
@Get(":idOrSlug")
|
||||||
|
@UseGuards(OptionalAuthGuard)
|
||||||
@UseInterceptors(CacheInterceptor)
|
@UseInterceptors(CacheInterceptor)
|
||||||
@CacheTTL(3600)
|
@CacheTTL(3600)
|
||||||
@Header("Cache-Control", "public, max-age=3600")
|
@Header("Cache-Control", "public, max-age=3600")
|
||||||
async findOne(
|
async findOne(
|
||||||
@Param("idOrSlug") idOrSlug: string,
|
@Param("idOrSlug") idOrSlug: string,
|
||||||
@Req() req: Request,
|
@Req() req: AuthenticatedRequest,
|
||||||
@Res() res: Response,
|
@Res() res: Response,
|
||||||
) {
|
) {
|
||||||
const content = await this.contentsService.findOne(idOrSlug);
|
const content = await this.contentsService.findOne(idOrSlug, req.user?.sub);
|
||||||
if (!content) {
|
if (!content) {
|
||||||
throw new NotFoundException("Contenu non trouvé");
|
throw new NotFoundException("Contenu non trouvé");
|
||||||
}
|
}
|
||||||
@@ -136,25 +156,7 @@ export class ContentsController {
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (isBot) {
|
if (isBot) {
|
||||||
const imageUrl = this.contentsService.getFileUrl(content.storageKey);
|
const html = this.contentsService.generateBotHtml(content);
|
||||||
const html = `<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<title>${content.title}</title>
|
|
||||||
<meta property="og:title" content="${content.title}" />
|
|
||||||
<meta property="og:type" content="website" />
|
|
||||||
<meta property="og:image" content="${imageUrl}" />
|
|
||||||
<meta property="og:description" content="Découvrez ce meme sur Memegoat" />
|
|
||||||
<meta name="twitter:card" content="summary_large_image" />
|
|
||||||
<meta name="twitter:title" content="${content.title}" />
|
|
||||||
<meta name="twitter:image" content="${imageUrl}" />
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<h1>${content.title}</h1>
|
|
||||||
<img src="${imageUrl}" alt="${content.title}" />
|
|
||||||
</body>
|
|
||||||
</html>`;
|
|
||||||
return res.send(html);
|
return res.send(html);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -176,4 +178,11 @@ export class ContentsController {
|
|||||||
remove(@Param("id") id: string, @Req() req: AuthenticatedRequest) {
|
remove(@Param("id") id: string, @Req() req: AuthenticatedRequest) {
|
||||||
return this.contentsService.remove(id, req.user.sub);
|
return this.contentsService.remove(id, req.user.sub);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Delete(":id/admin")
|
||||||
|
@UseGuards(AuthGuard, RolesGuard)
|
||||||
|
@Roles("admin")
|
||||||
|
removeAdmin(@Param("id") id: string) {
|
||||||
|
return this.contentsService.removeAdmin(id);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,15 +1,15 @@
|
|||||||
import { Module } from "@nestjs/common";
|
import { Module } from "@nestjs/common";
|
||||||
import { AuthModule } from "../auth/auth.module";
|
import { AuthModule } from "../auth/auth.module";
|
||||||
import { CryptoModule } from "../crypto/crypto.module";
|
|
||||||
import { DatabaseModule } from "../database/database.module";
|
|
||||||
import { MediaModule } from "../media/media.module";
|
import { MediaModule } from "../media/media.module";
|
||||||
import { S3Module } from "../s3/s3.module";
|
import { S3Module } from "../s3/s3.module";
|
||||||
import { ContentsController } from "./contents.controller";
|
import { ContentsController } from "./contents.controller";
|
||||||
import { ContentsService } from "./contents.service";
|
import { ContentsService } from "./contents.service";
|
||||||
|
import { ContentsRepository } from "./repositories/contents.repository";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [DatabaseModule, S3Module, AuthModule, CryptoModule, MediaModule],
|
imports: [S3Module, AuthModule, MediaModule],
|
||||||
controllers: [ContentsController],
|
controllers: [ContentsController],
|
||||||
providers: [ContentsService],
|
providers: [ContentsService, ContentsRepository],
|
||||||
|
exports: [ContentsRepository],
|
||||||
})
|
})
|
||||||
export class ContentsModule {}
|
export class ContentsModule {}
|
||||||
|
|||||||
@@ -2,40 +2,35 @@ jest.mock("uuid", () => ({
|
|||||||
v4: jest.fn(() => "mocked-uuid"),
|
v4: jest.fn(() => "mocked-uuid"),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
import { CACHE_MANAGER } from "@nestjs/cache-manager";
|
||||||
import { BadRequestException } from "@nestjs/common";
|
import { BadRequestException } from "@nestjs/common";
|
||||||
import { ConfigService } from "@nestjs/config";
|
import { ConfigService } from "@nestjs/config";
|
||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
import { DatabaseService } from "../database/database.service";
|
|
||||||
import { MediaService } from "../media/media.service";
|
import { MediaService } from "../media/media.service";
|
||||||
import { S3Service } from "../s3/s3.service";
|
import { S3Service } from "../s3/s3.service";
|
||||||
import { ContentsService } from "./contents.service";
|
import { ContentsService } from "./contents.service";
|
||||||
|
import { ContentsRepository } from "./repositories/contents.repository";
|
||||||
|
|
||||||
describe("ContentsService", () => {
|
describe("ContentsService", () => {
|
||||||
let service: ContentsService;
|
let service: ContentsService;
|
||||||
let s3Service: S3Service;
|
let s3Service: S3Service;
|
||||||
let mediaService: MediaService;
|
let mediaService: MediaService;
|
||||||
|
|
||||||
const mockDb = {
|
const mockContentsRepository = {
|
||||||
select: jest.fn().mockReturnThis(),
|
findAll: jest.fn(),
|
||||||
from: jest.fn().mockReturnThis(),
|
count: jest.fn(),
|
||||||
where: jest.fn().mockReturnThis(),
|
create: jest.fn(),
|
||||||
limit: jest.fn().mockReturnThis(),
|
incrementViews: jest.fn(),
|
||||||
offset: jest.fn().mockReturnThis(),
|
incrementUsage: jest.fn(),
|
||||||
orderBy: jest.fn().mockReturnThis(),
|
softDelete: jest.fn(),
|
||||||
innerJoin: jest.fn().mockReturnThis(),
|
findOne: jest.fn(),
|
||||||
insert: jest.fn().mockReturnThis(),
|
findBySlug: jest.fn(),
|
||||||
values: jest.fn().mockReturnThis(),
|
|
||||||
update: jest.fn().mockReturnThis(),
|
|
||||||
set: jest.fn().mockReturnThis(),
|
|
||||||
returning: jest.fn().mockResolvedValue([]),
|
|
||||||
onConflictDoNothing: jest.fn().mockReturnThis(),
|
|
||||||
transaction: jest.fn().mockImplementation((cb) => cb(mockDb)),
|
|
||||||
execute: jest.fn().mockResolvedValue([]),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const mockS3Service = {
|
const mockS3Service = {
|
||||||
getUploadUrl: jest.fn(),
|
getUploadUrl: jest.fn(),
|
||||||
uploadFile: jest.fn(),
|
uploadFile: jest.fn(),
|
||||||
|
getPublicUrl: jest.fn(),
|
||||||
};
|
};
|
||||||
|
|
||||||
const mockMediaService = {
|
const mockMediaService = {
|
||||||
@@ -48,46 +43,22 @@ describe("ContentsService", () => {
|
|||||||
get: jest.fn(),
|
get: jest.fn(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const mockCacheManager = {
|
||||||
|
clear: jest.fn(),
|
||||||
|
del: jest.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
jest.clearAllMocks();
|
jest.clearAllMocks();
|
||||||
|
|
||||||
const chain = {
|
|
||||||
select: jest.fn().mockReturnThis(),
|
|
||||||
from: jest.fn().mockReturnThis(),
|
|
||||||
where: jest.fn().mockReturnThis(),
|
|
||||||
orderBy: jest.fn().mockReturnThis(),
|
|
||||||
limit: jest.fn().mockReturnThis(),
|
|
||||||
offset: jest.fn().mockReturnThis(),
|
|
||||||
innerJoin: jest.fn().mockReturnThis(),
|
|
||||||
insert: jest.fn().mockReturnThis(),
|
|
||||||
values: jest.fn().mockReturnThis(),
|
|
||||||
update: jest.fn().mockReturnThis(),
|
|
||||||
set: jest.fn().mockReturnThis(),
|
|
||||||
returning: jest.fn().mockReturnThis(),
|
|
||||||
onConflictDoNothing: jest.fn().mockReturnThis(),
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockImplementation = () => {
|
|
||||||
return Object.assign(Promise.resolve([]), chain);
|
|
||||||
};
|
|
||||||
|
|
||||||
for (const mock of Object.values(chain)) {
|
|
||||||
|
|
||||||
//TODO Fix : TS2774: This condition will always return true since this function is always defined. Did you mean to call it instead?
|
|
||||||
if (mock.mockReturnValue) {
|
|
||||||
mock.mockImplementation(mockImplementation);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Object.assign(mockDb, chain);
|
|
||||||
|
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
providers: [
|
providers: [
|
||||||
ContentsService,
|
ContentsService,
|
||||||
{ provide: DatabaseService, useValue: { db: mockDb } },
|
{ provide: ContentsRepository, useValue: mockContentsRepository },
|
||||||
{ provide: S3Service, useValue: mockS3Service },
|
{ provide: S3Service, useValue: mockS3Service },
|
||||||
{ provide: MediaService, useValue: mockMediaService },
|
{ provide: MediaService, useValue: mockMediaService },
|
||||||
{ provide: ConfigService, useValue: mockConfigService },
|
{ provide: ConfigService, useValue: mockConfigService },
|
||||||
|
{ provide: CACHE_MANAGER, useValue: mockCacheManager },
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
@@ -127,7 +98,8 @@ describe("ContentsService", () => {
|
|||||||
mimeType: "image/webp",
|
mimeType: "image/webp",
|
||||||
size: 500,
|
size: 500,
|
||||||
});
|
});
|
||||||
mockDb.returning.mockResolvedValue([{ id: "content-id" }]);
|
mockContentsRepository.findBySlug.mockResolvedValue(null);
|
||||||
|
mockContentsRepository.create.mockResolvedValue({ id: "content-id" });
|
||||||
|
|
||||||
const result = await service.uploadAndProcess("user1", file, {
|
const result = await service.uploadAndProcess("user1", file, {
|
||||||
title: "Meme",
|
title: "Meme",
|
||||||
@@ -155,8 +127,8 @@ describe("ContentsService", () => {
|
|||||||
|
|
||||||
describe("findAll", () => {
|
describe("findAll", () => {
|
||||||
it("should return contents and total count", async () => {
|
it("should return contents and total count", async () => {
|
||||||
mockDb.where.mockResolvedValueOnce([{ count: 10 }]); // for count
|
mockContentsRepository.count.mockResolvedValue(10);
|
||||||
mockDb.offset.mockResolvedValueOnce([{ id: "1" }]); // for data
|
mockContentsRepository.findAll.mockResolvedValue([{ id: "1" }]);
|
||||||
|
|
||||||
const result = await service.findAll({ limit: 10, offset: 0 });
|
const result = await service.findAll({ limit: 10, offset: 0 });
|
||||||
|
|
||||||
@@ -167,9 +139,11 @@ describe("ContentsService", () => {
|
|||||||
|
|
||||||
describe("incrementViews", () => {
|
describe("incrementViews", () => {
|
||||||
it("should increment views", async () => {
|
it("should increment views", async () => {
|
||||||
mockDb.returning.mockResolvedValue([{ id: "1", views: 1 }]);
|
mockContentsRepository.incrementViews.mockResolvedValue([
|
||||||
|
{ id: "1", views: 1 },
|
||||||
|
]);
|
||||||
const result = await service.incrementViews("1");
|
const result = await service.incrementViews("1");
|
||||||
expect(mockDb.update).toHaveBeenCalled();
|
expect(mockContentsRepository.incrementViews).toHaveBeenCalledWith("1");
|
||||||
expect(result[0].views).toBe(1);
|
expect(result[0].views).toBe(1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,52 +1,39 @@
|
|||||||
import { BadRequestException, Injectable, Logger } from "@nestjs/common";
|
|
||||||
import { CACHE_MANAGER } from "@nestjs/cache-manager";
|
import { CACHE_MANAGER } from "@nestjs/cache-manager";
|
||||||
import { Cache } from "cache-manager";
|
import {
|
||||||
import { Inject } from "@nestjs/common";
|
BadRequestException,
|
||||||
|
Inject,
|
||||||
|
Injectable,
|
||||||
|
Logger,
|
||||||
|
} from "@nestjs/common";
|
||||||
import { ConfigService } from "@nestjs/config";
|
import { ConfigService } from "@nestjs/config";
|
||||||
import {
|
import type { Cache } from "cache-manager";
|
||||||
and,
|
|
||||||
desc,
|
|
||||||
eq,
|
|
||||||
exists,
|
|
||||||
ilike,
|
|
||||||
isNull,
|
|
||||||
type SQL,
|
|
||||||
sql,
|
|
||||||
} from "drizzle-orm";
|
|
||||||
import { v4 as uuidv4 } from "uuid";
|
import { v4 as uuidv4 } from "uuid";
|
||||||
import { DatabaseService } from "../database/database.service";
|
import type {
|
||||||
import {
|
IMediaService,
|
||||||
categories,
|
MediaProcessingResult,
|
||||||
contents,
|
} from "../common/interfaces/media.interface";
|
||||||
contentsToTags,
|
import type { IStorageService } from "../common/interfaces/storage.interface";
|
||||||
favorites,
|
|
||||||
tags,
|
|
||||||
users,
|
|
||||||
} from "../database/schemas";
|
|
||||||
import type { MediaProcessingResult } from "../media/interfaces/media.interface";
|
|
||||||
import { MediaService } from "../media/media.service";
|
import { MediaService } from "../media/media.service";
|
||||||
import { S3Service } from "../s3/s3.service";
|
import { S3Service } from "../s3/s3.service";
|
||||||
import { CreateContentDto } from "./dto/create-content.dto";
|
import { CreateContentDto } from "./dto/create-content.dto";
|
||||||
|
import { UploadContentDto } from "./dto/upload-content.dto";
|
||||||
|
import { ContentsRepository } from "./repositories/contents.repository";
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class ContentsService {
|
export class ContentsService {
|
||||||
private readonly logger = new Logger(ContentsService.name);
|
private readonly logger = new Logger(ContentsService.name);
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private readonly databaseService: DatabaseService,
|
private readonly contentsRepository: ContentsRepository,
|
||||||
private readonly s3Service: S3Service,
|
@Inject(S3Service) private readonly s3Service: IStorageService,
|
||||||
private readonly mediaService: MediaService,
|
@Inject(MediaService) private readonly mediaService: IMediaService,
|
||||||
private readonly configService: ConfigService,
|
private readonly configService: ConfigService,
|
||||||
@Inject(CACHE_MANAGER) private cacheManager: Cache,
|
@Inject(CACHE_MANAGER) private cacheManager: Cache,
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
private async clearContentsCache() {
|
private async clearContentsCache() {
|
||||||
this.logger.log("Clearing contents cache");
|
this.logger.log("Clearing contents cache");
|
||||||
const keys = await this.cacheManager.store.keys();
|
await this.cacheManager.clear();
|
||||||
const contentsKeys = keys.filter((key) => key.startsWith("contents/"));
|
|
||||||
for (const key of contentsKeys) {
|
|
||||||
await this.cacheManager.del(key);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async getUploadUrl(userId: string, fileName: string) {
|
async getUploadUrl(userId: string, fileName: string) {
|
||||||
@@ -58,12 +45,7 @@ export class ContentsService {
|
|||||||
async uploadAndProcess(
|
async uploadAndProcess(
|
||||||
userId: string,
|
userId: string,
|
||||||
file: Express.Multer.File,
|
file: Express.Multer.File,
|
||||||
data: {
|
data: UploadContentDto,
|
||||||
title: string;
|
|
||||||
type: "meme" | "gif";
|
|
||||||
categoryId?: string;
|
|
||||||
tags?: string[];
|
|
||||||
},
|
|
||||||
) {
|
) {
|
||||||
this.logger.log(`Uploading and processing file for user ${userId}`);
|
this.logger.log(`Uploading and processing file for user ${userId}`);
|
||||||
// 0. Validation du format et de la taille
|
// 0. Validation du format et de la taille
|
||||||
@@ -118,6 +100,7 @@ export class ContentsService {
|
|||||||
// 3. Upload vers S3
|
// 3. Upload vers S3
|
||||||
const key = `contents/${userId}/${Date.now()}-${uuidv4()}.${processed.extension}`;
|
const key = `contents/${userId}/${Date.now()}-${uuidv4()}.${processed.extension}`;
|
||||||
await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType);
|
await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType);
|
||||||
|
this.logger.log(`File uploaded successfully to S3: ${key}`);
|
||||||
|
|
||||||
// 4. Création en base de données
|
// 4. Création en base de données
|
||||||
return await this.create(userId, {
|
return await this.create(userId, {
|
||||||
@@ -139,105 +122,23 @@ export class ContentsService {
|
|||||||
favoritesOnly?: boolean;
|
favoritesOnly?: boolean;
|
||||||
userId?: string; // Nécessaire si favoritesOnly est vrai
|
userId?: string; // Nécessaire si favoritesOnly est vrai
|
||||||
}) {
|
}) {
|
||||||
const {
|
const [data, totalCount] = await Promise.all([
|
||||||
limit,
|
this.contentsRepository.findAll(options),
|
||||||
offset,
|
this.contentsRepository.count(options),
|
||||||
sortBy,
|
]);
|
||||||
tag,
|
|
||||||
category,
|
|
||||||
author,
|
|
||||||
query,
|
|
||||||
favoritesOnly,
|
|
||||||
userId,
|
|
||||||
} = options;
|
|
||||||
|
|
||||||
let whereClause: SQL | undefined = isNull(contents.deletedAt);
|
const processedData = data.map((content) => ({
|
||||||
|
...content,
|
||||||
|
url: this.s3Service.getPublicUrl(content.storageKey),
|
||||||
|
author: {
|
||||||
|
...content.author,
|
||||||
|
avatarUrl: content.author?.avatarUrl
|
||||||
|
? this.s3Service.getPublicUrl(content.author.avatarUrl)
|
||||||
|
: null,
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
if (tag) {
|
return { data: processedData, totalCount };
|
||||||
whereClause = and(
|
|
||||||
whereClause,
|
|
||||||
exists(
|
|
||||||
this.databaseService.db
|
|
||||||
.select()
|
|
||||||
.from(contentsToTags)
|
|
||||||
.innerJoin(tags, eq(contentsToTags.tagId, tags.id))
|
|
||||||
.where(
|
|
||||||
and(eq(contentsToTags.contentId, contents.id), eq(tags.name, tag)),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (author) {
|
|
||||||
whereClause = and(
|
|
||||||
whereClause,
|
|
||||||
exists(
|
|
||||||
this.databaseService.db
|
|
||||||
.select()
|
|
||||||
.from(users)
|
|
||||||
.where(and(eq(users.uuid, contents.userId), eq(users.username, author))),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (category) {
|
|
||||||
whereClause = and(
|
|
||||||
whereClause,
|
|
||||||
exists(
|
|
||||||
this.databaseService.db
|
|
||||||
.select()
|
|
||||||
.from(categories)
|
|
||||||
.where(
|
|
||||||
and(
|
|
||||||
eq(categories.id, contents.categoryId),
|
|
||||||
sql`(${categories.slug} = ${category} OR ${categories.id}::text = ${category})`,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (query) {
|
|
||||||
whereClause = and(whereClause, ilike(contents.title, `%${query}%`));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (favoritesOnly && userId) {
|
|
||||||
whereClause = and(
|
|
||||||
whereClause,
|
|
||||||
exists(
|
|
||||||
this.databaseService.db
|
|
||||||
.select()
|
|
||||||
.from(favorites)
|
|
||||||
.where(
|
|
||||||
and(eq(favorites.contentId, contents.id), eq(favorites.userId, userId)),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pagination Total Count
|
|
||||||
const totalCountResult = await this.databaseService.db
|
|
||||||
.select({ count: sql<number>`count(*)` })
|
|
||||||
.from(contents)
|
|
||||||
.where(whereClause);
|
|
||||||
|
|
||||||
const totalCount = Number(totalCountResult[0].count);
|
|
||||||
|
|
||||||
// Sorting
|
|
||||||
let orderBy: SQL = desc(contents.createdAt);
|
|
||||||
if (sortBy === "trend") {
|
|
||||||
orderBy = desc(sql`${contents.views} + ${contents.usageCount}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await this.databaseService.db
|
|
||||||
.select()
|
|
||||||
.from(contents)
|
|
||||||
.where(whereClause)
|
|
||||||
.orderBy(orderBy)
|
|
||||||
.limit(limit)
|
|
||||||
.offset(offset);
|
|
||||||
|
|
||||||
return { data, totalCount };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async create(userId: string, data: CreateContentDto) {
|
async create(userId: string, data: CreateContentDto) {
|
||||||
@@ -246,101 +147,79 @@ export class ContentsService {
|
|||||||
|
|
||||||
const slug = await this.ensureUniqueSlug(contentData.title);
|
const slug = await this.ensureUniqueSlug(contentData.title);
|
||||||
|
|
||||||
return await this.databaseService.db.transaction(async (tx) => {
|
const newContent = await this.contentsRepository.create(
|
||||||
const [newContent] = await tx
|
{ ...contentData, userId, slug },
|
||||||
.insert(contents)
|
tagNames,
|
||||||
.values({ ...contentData, userId, slug })
|
);
|
||||||
.returning();
|
|
||||||
|
|
||||||
if (tagNames && tagNames.length > 0) {
|
await this.clearContentsCache();
|
||||||
for (const tagName of tagNames) {
|
return newContent;
|
||||||
const slug = tagName
|
|
||||||
.toLowerCase()
|
|
||||||
.replace(/ /g, "-")
|
|
||||||
.replace(/[^\w-]/g, "");
|
|
||||||
|
|
||||||
// Trouver ou créer le tag
|
|
||||||
let [tag] = await tx
|
|
||||||
.select()
|
|
||||||
.from(tags)
|
|
||||||
.where(eq(tags.slug, slug))
|
|
||||||
.limit(1);
|
|
||||||
|
|
||||||
if (!tag) {
|
|
||||||
[tag] = await tx
|
|
||||||
.insert(tags)
|
|
||||||
.values({ name: tagName, slug, userId })
|
|
||||||
.returning();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Lier le tag au contenu
|
|
||||||
await tx
|
|
||||||
.insert(contentsToTags)
|
|
||||||
.values({ contentId: newContent.id, tagId: tag.id })
|
|
||||||
.onConflictDoNothing();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.clearContentsCache();
|
|
||||||
return newContent;
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async incrementViews(id: string) {
|
async incrementViews(id: string) {
|
||||||
return await this.databaseService.db
|
return await this.contentsRepository.incrementViews(id);
|
||||||
.update(contents)
|
|
||||||
.set({ views: sql`${contents.views} + 1` })
|
|
||||||
.where(eq(contents.id, id))
|
|
||||||
.returning();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async incrementUsage(id: string) {
|
async incrementUsage(id: string) {
|
||||||
return await this.databaseService.db
|
return await this.contentsRepository.incrementUsage(id);
|
||||||
.update(contents)
|
|
||||||
.set({ usageCount: sql`${contents.usageCount} + 1` })
|
|
||||||
.where(eq(contents.id, id))
|
|
||||||
.returning();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async remove(id: string, userId: string) {
|
async remove(id: string, userId: string) {
|
||||||
this.logger.log(`Removing content ${id} for user ${userId}`);
|
this.logger.log(`Removing content ${id} for user ${userId}`);
|
||||||
const result = await this.databaseService.db
|
const deleted = await this.contentsRepository.softDelete(id, userId);
|
||||||
.update(contents)
|
|
||||||
.set({ deletedAt: new Date() })
|
|
||||||
.where(and(eq(contents.id, id), eq(contents.userId, userId)))
|
|
||||||
.returning();
|
|
||||||
|
|
||||||
if (result.length > 0) {
|
if (deleted) {
|
||||||
await this.clearContentsCache();
|
await this.clearContentsCache();
|
||||||
}
|
}
|
||||||
return result;
|
return deleted;
|
||||||
}
|
}
|
||||||
|
|
||||||
async findOne(idOrSlug: string) {
|
async removeAdmin(id: string) {
|
||||||
const [content] = await this.databaseService.db
|
this.logger.log(`Removing content ${id} by admin`);
|
||||||
.select()
|
const deleted = await this.contentsRepository.softDeleteAdmin(id);
|
||||||
.from(contents)
|
|
||||||
.where(
|
|
||||||
and(
|
|
||||||
isNull(contents.deletedAt),
|
|
||||||
sql`(${contents.id}::text = ${idOrSlug} OR ${contents.slug} = ${idOrSlug})`,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.limit(1);
|
|
||||||
return content;
|
|
||||||
}
|
|
||||||
|
|
||||||
getFileUrl(storageKey: string): string {
|
if (deleted) {
|
||||||
const endpoint = this.configService.get("S3_ENDPOINT");
|
await this.clearContentsCache();
|
||||||
const port = this.configService.get("S3_PORT");
|
|
||||||
const protocol =
|
|
||||||
this.configService.get("S3_USE_SSL") === true ? "https" : "http";
|
|
||||||
const bucket = this.configService.get("S3_BUCKET_NAME");
|
|
||||||
|
|
||||||
if (endpoint === "localhost" || endpoint === "127.0.0.1") {
|
|
||||||
return `${protocol}://${endpoint}:${port}/${bucket}/${storageKey}`;
|
|
||||||
}
|
}
|
||||||
return `${protocol}://${endpoint}/${bucket}/${storageKey}`;
|
return deleted;
|
||||||
|
}
|
||||||
|
|
||||||
|
async findOne(idOrSlug: string, userId?: string) {
|
||||||
|
const content = await this.contentsRepository.findOne(idOrSlug, userId);
|
||||||
|
if (!content) return null;
|
||||||
|
|
||||||
|
return {
|
||||||
|
...content,
|
||||||
|
url: this.s3Service.getPublicUrl(content.storageKey),
|
||||||
|
author: {
|
||||||
|
...content.author,
|
||||||
|
avatarUrl: content.author?.avatarUrl
|
||||||
|
? this.s3Service.getPublicUrl(content.author.avatarUrl)
|
||||||
|
: null,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
generateBotHtml(content: { title: string; storageKey: string }): string {
|
||||||
|
const imageUrl = this.s3Service.getPublicUrl(content.storageKey);
|
||||||
|
return `<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>${content.title}</title>
|
||||||
|
<meta property="og:title" content="${content.title}" />
|
||||||
|
<meta property="og:type" content="website" />
|
||||||
|
<meta property="og:image" content="${imageUrl}" />
|
||||||
|
<meta property="og:description" content="Découvrez ce meme sur Memegoat" />
|
||||||
|
<meta name="twitter:card" content="summary_large_image" />
|
||||||
|
<meta name="twitter:title" content="${content.title}" />
|
||||||
|
<meta name="twitter:image" content="${imageUrl}" />
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>${content.title}</h1>
|
||||||
|
<img src="${imageUrl}" alt="${content.title}" />
|
||||||
|
</body>
|
||||||
|
</html>`;
|
||||||
}
|
}
|
||||||
|
|
||||||
private generateSlug(text: string): string {
|
private generateSlug(text: string): string {
|
||||||
@@ -359,11 +238,7 @@ export class ContentsService {
|
|||||||
let counter = 1;
|
let counter = 1;
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const [existing] = await this.databaseService.db
|
const existing = await this.contentsRepository.findBySlug(slug);
|
||||||
.select()
|
|
||||||
.from(contents)
|
|
||||||
.where(eq(contents.slug, slug))
|
|
||||||
.limit(1);
|
|
||||||
|
|
||||||
if (!existing) break;
|
if (!existing) break;
|
||||||
slug = `${baseSlug}-${counter++}`;
|
slug = `${baseSlug}-${counter++}`;
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import {
|
|||||||
IsOptional,
|
IsOptional,
|
||||||
IsString,
|
IsString,
|
||||||
IsUUID,
|
IsUUID,
|
||||||
|
MaxLength,
|
||||||
} from "class-validator";
|
} from "class-validator";
|
||||||
|
|
||||||
export enum ContentType {
|
export enum ContentType {
|
||||||
@@ -19,14 +20,17 @@ export class CreateContentDto {
|
|||||||
|
|
||||||
@IsString()
|
@IsString()
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
|
@MaxLength(255)
|
||||||
title!: string;
|
title!: string;
|
||||||
|
|
||||||
@IsString()
|
@IsString()
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
|
@MaxLength(512)
|
||||||
storageKey!: string;
|
storageKey!: string;
|
||||||
|
|
||||||
@IsString()
|
@IsString()
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
|
@MaxLength(128)
|
||||||
mimeType!: string;
|
mimeType!: string;
|
||||||
|
|
||||||
@IsInt()
|
@IsInt()
|
||||||
@@ -39,5 +43,6 @@ export class CreateContentDto {
|
|||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsArray()
|
@IsArray()
|
||||||
@IsString({ each: true })
|
@IsString({ each: true })
|
||||||
|
@MaxLength(64, { each: true })
|
||||||
tags?: string[];
|
tags?: string[];
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
import {
|
import {
|
||||||
|
IsArray,
|
||||||
IsEnum,
|
IsEnum,
|
||||||
IsNotEmpty,
|
IsNotEmpty,
|
||||||
IsOptional,
|
IsOptional,
|
||||||
IsString,
|
IsString,
|
||||||
IsUUID,
|
IsUUID,
|
||||||
|
MaxLength,
|
||||||
} from "class-validator";
|
} from "class-validator";
|
||||||
import { ContentType } from "./create-content.dto";
|
import { ContentType } from "./create-content.dto";
|
||||||
|
|
||||||
@@ -13,6 +15,7 @@ export class UploadContentDto {
|
|||||||
|
|
||||||
@IsString()
|
@IsString()
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
|
@MaxLength(255)
|
||||||
title!: string;
|
title!: string;
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@@ -20,6 +23,8 @@ export class UploadContentDto {
|
|||||||
categoryId?: string;
|
categoryId?: string;
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
|
@IsArray()
|
||||||
@IsString({ each: true })
|
@IsString({ each: true })
|
||||||
|
@MaxLength(64, { each: true })
|
||||||
tags?: string[];
|
tags?: string[];
|
||||||
}
|
}
|
||||||
|
|||||||
427
backend/src/contents/repositories/contents.repository.ts
Normal file
427
backend/src/contents/repositories/contents.repository.ts
Normal file
@@ -0,0 +1,427 @@
|
|||||||
|
import { Injectable } from "@nestjs/common";
|
||||||
|
import {
|
||||||
|
and,
|
||||||
|
desc,
|
||||||
|
eq,
|
||||||
|
exists,
|
||||||
|
ilike,
|
||||||
|
isNull,
|
||||||
|
lte,
|
||||||
|
type SQL,
|
||||||
|
sql,
|
||||||
|
} from "drizzle-orm";
|
||||||
|
import { DatabaseService } from "../../database/database.service";
|
||||||
|
import {
|
||||||
|
categories,
|
||||||
|
contents,
|
||||||
|
contentsToTags,
|
||||||
|
favorites,
|
||||||
|
tags,
|
||||||
|
users,
|
||||||
|
} from "../../database/schemas";
|
||||||
|
import type { NewContentInDb } from "../../database/schemas/content";
|
||||||
|
|
||||||
|
export interface FindAllOptions {
|
||||||
|
limit: number;
|
||||||
|
offset: number;
|
||||||
|
sortBy?: "trend" | "recent";
|
||||||
|
tag?: string;
|
||||||
|
category?: string;
|
||||||
|
author?: string;
|
||||||
|
query?: string;
|
||||||
|
favoritesOnly?: boolean;
|
||||||
|
userId?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class ContentsRepository {
|
||||||
|
constructor(private readonly databaseService: DatabaseService) {}
|
||||||
|
|
||||||
|
async findAll(options: FindAllOptions) {
|
||||||
|
const {
|
||||||
|
limit,
|
||||||
|
offset,
|
||||||
|
sortBy,
|
||||||
|
tag,
|
||||||
|
category,
|
||||||
|
author,
|
||||||
|
query,
|
||||||
|
favoritesOnly,
|
||||||
|
userId,
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
let whereClause: SQL | undefined = isNull(contents.deletedAt);
|
||||||
|
|
||||||
|
if (tag) {
|
||||||
|
whereClause = and(
|
||||||
|
whereClause,
|
||||||
|
exists(
|
||||||
|
this.databaseService.db
|
||||||
|
.select()
|
||||||
|
.from(contentsToTags)
|
||||||
|
.innerJoin(tags, eq(contentsToTags.tagId, tags.id))
|
||||||
|
.where(
|
||||||
|
and(eq(contentsToTags.contentId, contents.id), eq(tags.name, tag)),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (category) {
|
||||||
|
whereClause = and(
|
||||||
|
whereClause,
|
||||||
|
exists(
|
||||||
|
this.databaseService.db
|
||||||
|
.select()
|
||||||
|
.from(categories)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(contents.categoryId, categories.id),
|
||||||
|
sql`(${categories.id}::text = ${category} OR ${categories.slug} = ${category})`,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (author) {
|
||||||
|
whereClause = and(
|
||||||
|
whereClause,
|
||||||
|
exists(
|
||||||
|
this.databaseService.db
|
||||||
|
.select()
|
||||||
|
.from(users)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(contents.userId, users.uuid),
|
||||||
|
sql`(${users.uuid}::text = ${author} OR ${users.username} = ${author})`,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (query) {
|
||||||
|
whereClause = and(whereClause, ilike(contents.title, `%${query}%`));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (favoritesOnly && userId) {
|
||||||
|
whereClause = and(
|
||||||
|
whereClause,
|
||||||
|
exists(
|
||||||
|
this.databaseService.db
|
||||||
|
.select()
|
||||||
|
.from(favorites)
|
||||||
|
.where(
|
||||||
|
and(eq(favorites.contentId, contents.id), eq(favorites.userId, userId)),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let orderBy = desc(contents.createdAt);
|
||||||
|
if (sortBy === "trend") {
|
||||||
|
orderBy = desc(sql`${contents.views} + ${contents.usageCount} * 2`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const results = await this.databaseService.db
|
||||||
|
.select({
|
||||||
|
id: contents.id,
|
||||||
|
title: contents.title,
|
||||||
|
slug: contents.slug,
|
||||||
|
type: contents.type,
|
||||||
|
storageKey: contents.storageKey,
|
||||||
|
mimeType: contents.mimeType,
|
||||||
|
fileSize: contents.fileSize,
|
||||||
|
views: contents.views,
|
||||||
|
usageCount: contents.usageCount,
|
||||||
|
favoritesCount:
|
||||||
|
sql<number>`(SELECT count(*) FROM ${favorites} WHERE ${favorites.contentId} = ${contents.id})`.mapWith(
|
||||||
|
Number,
|
||||||
|
),
|
||||||
|
isLiked: userId
|
||||||
|
? sql<boolean>`EXISTS(SELECT 1 FROM ${favorites} WHERE ${favorites.contentId} = ${contents.id} AND ${favorites.userId} = ${userId})`
|
||||||
|
: sql<boolean>`false`,
|
||||||
|
createdAt: contents.createdAt,
|
||||||
|
updatedAt: contents.updatedAt,
|
||||||
|
author: {
|
||||||
|
id: users.uuid,
|
||||||
|
username: users.username,
|
||||||
|
displayName: users.displayName,
|
||||||
|
avatarUrl: users.avatarUrl,
|
||||||
|
},
|
||||||
|
category: {
|
||||||
|
id: categories.id,
|
||||||
|
name: categories.name,
|
||||||
|
slug: categories.slug,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.from(contents)
|
||||||
|
.leftJoin(users, eq(contents.userId, users.uuid))
|
||||||
|
.leftJoin(categories, eq(contents.categoryId, categories.id))
|
||||||
|
.where(whereClause)
|
||||||
|
.orderBy(orderBy)
|
||||||
|
.limit(limit)
|
||||||
|
.offset(offset);
|
||||||
|
|
||||||
|
const contentIds = results.map((r) => r.id);
|
||||||
|
const tagsForContents = contentIds.length
|
||||||
|
? await this.databaseService.db
|
||||||
|
.select({
|
||||||
|
contentId: contentsToTags.contentId,
|
||||||
|
name: tags.name,
|
||||||
|
})
|
||||||
|
.from(contentsToTags)
|
||||||
|
.innerJoin(tags, eq(contentsToTags.tagId, tags.id))
|
||||||
|
.where(sql`${contentsToTags.contentId} IN ${contentIds}`)
|
||||||
|
: [];
|
||||||
|
|
||||||
|
return results.map((r) => ({
|
||||||
|
...r,
|
||||||
|
tags: tagsForContents.filter((t) => t.contentId === r.id).map((t) => t.name),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(data: NewContentInDb & { userId: string }, tagNames?: string[]) {
|
||||||
|
return await this.databaseService.db.transaction(async (tx) => {
|
||||||
|
const [newContent] = await tx.insert(contents).values(data).returning();
|
||||||
|
|
||||||
|
if (tagNames && tagNames.length > 0) {
|
||||||
|
for (const tagName of tagNames) {
|
||||||
|
const slug = tagName
|
||||||
|
.toLowerCase()
|
||||||
|
.replace(/ /g, "-")
|
||||||
|
.replace(/[^\w-]/g, "");
|
||||||
|
|
||||||
|
let [tag] = await tx
|
||||||
|
.select()
|
||||||
|
.from(tags)
|
||||||
|
.where(eq(tags.slug, slug))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!tag) {
|
||||||
|
[tag] = await tx
|
||||||
|
.insert(tags)
|
||||||
|
.values({
|
||||||
|
name: tagName,
|
||||||
|
slug,
|
||||||
|
userId: data.userId,
|
||||||
|
})
|
||||||
|
.returning();
|
||||||
|
}
|
||||||
|
|
||||||
|
await tx
|
||||||
|
.insert(contentsToTags)
|
||||||
|
.values({
|
||||||
|
contentId: newContent.id,
|
||||||
|
tagId: tag.id,
|
||||||
|
})
|
||||||
|
.onConflictDoNothing();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return newContent;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async findOne(idOrSlug: string, userId?: string) {
|
||||||
|
const [result] = await this.databaseService.db
|
||||||
|
.select({
|
||||||
|
id: contents.id,
|
||||||
|
title: contents.title,
|
||||||
|
slug: contents.slug,
|
||||||
|
type: contents.type,
|
||||||
|
storageKey: contents.storageKey,
|
||||||
|
mimeType: contents.mimeType,
|
||||||
|
fileSize: contents.fileSize,
|
||||||
|
views: contents.views,
|
||||||
|
usageCount: contents.usageCount,
|
||||||
|
favoritesCount:
|
||||||
|
sql<number>`(SELECT count(*) FROM ${favorites} WHERE ${favorites.contentId} = ${contents.id})`.mapWith(
|
||||||
|
Number,
|
||||||
|
),
|
||||||
|
isLiked: userId
|
||||||
|
? sql<boolean>`EXISTS(SELECT 1 FROM ${favorites} WHERE ${favorites.contentId} = ${contents.id} AND ${favorites.userId} = ${userId})`
|
||||||
|
: sql<boolean>`false`,
|
||||||
|
createdAt: contents.createdAt,
|
||||||
|
updatedAt: contents.updatedAt,
|
||||||
|
userId: contents.userId,
|
||||||
|
author: {
|
||||||
|
id: users.uuid,
|
||||||
|
username: users.username,
|
||||||
|
displayName: users.displayName,
|
||||||
|
avatarUrl: users.avatarUrl,
|
||||||
|
},
|
||||||
|
category: {
|
||||||
|
id: categories.id,
|
||||||
|
name: categories.name,
|
||||||
|
slug: categories.slug,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.from(contents)
|
||||||
|
.leftJoin(users, eq(contents.userId, users.uuid))
|
||||||
|
.leftJoin(categories, eq(contents.categoryId, categories.id))
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
isNull(contents.deletedAt),
|
||||||
|
sql`(${contents.id}::text = ${idOrSlug} OR ${contents.slug} = ${idOrSlug})`,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!result) return null;
|
||||||
|
|
||||||
|
const tagsForContent = await this.databaseService.db
|
||||||
|
.select({
|
||||||
|
name: tags.name,
|
||||||
|
})
|
||||||
|
.from(contentsToTags)
|
||||||
|
.innerJoin(tags, eq(contentsToTags.tagId, tags.id))
|
||||||
|
.where(eq(contentsToTags.contentId, result.id));
|
||||||
|
|
||||||
|
return {
|
||||||
|
...result,
|
||||||
|
tags: tagsForContent.map((t) => t.name),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async count(options: {
|
||||||
|
tag?: string;
|
||||||
|
category?: string;
|
||||||
|
author?: string;
|
||||||
|
query?: string;
|
||||||
|
favoritesOnly?: boolean;
|
||||||
|
userId?: string;
|
||||||
|
}) {
|
||||||
|
const { tag, category, author, query, favoritesOnly, userId } = options;
|
||||||
|
|
||||||
|
let whereClause: SQL | undefined = isNull(contents.deletedAt);
|
||||||
|
|
||||||
|
if (tag) {
|
||||||
|
whereClause = and(
|
||||||
|
whereClause,
|
||||||
|
exists(
|
||||||
|
this.databaseService.db
|
||||||
|
.select()
|
||||||
|
.from(contentsToTags)
|
||||||
|
.innerJoin(tags, eq(contentsToTags.tagId, tags.id))
|
||||||
|
.where(
|
||||||
|
and(eq(contentsToTags.contentId, contents.id), eq(tags.name, tag)),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (category) {
|
||||||
|
whereClause = and(
|
||||||
|
whereClause,
|
||||||
|
exists(
|
||||||
|
this.databaseService.db
|
||||||
|
.select()
|
||||||
|
.from(categories)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(contents.categoryId, categories.id),
|
||||||
|
sql`(${categories.id}::text = ${category} OR ${categories.slug} = ${category})`,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (author) {
|
||||||
|
whereClause = and(
|
||||||
|
whereClause,
|
||||||
|
exists(
|
||||||
|
this.databaseService.db
|
||||||
|
.select()
|
||||||
|
.from(users)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(contents.userId, users.uuid),
|
||||||
|
sql`(${users.uuid}::text = ${author} OR ${users.username} = ${author})`,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (query) {
|
||||||
|
whereClause = and(whereClause, ilike(contents.title, `%${query}%`));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (favoritesOnly && userId) {
|
||||||
|
whereClause = and(
|
||||||
|
whereClause,
|
||||||
|
exists(
|
||||||
|
this.databaseService.db
|
||||||
|
.select()
|
||||||
|
.from(favorites)
|
||||||
|
.where(
|
||||||
|
and(eq(favorites.contentId, contents.id), eq(favorites.userId, userId)),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [result] = await this.databaseService.db
|
||||||
|
.select({ count: sql<number>`count(*)` })
|
||||||
|
.from(contents)
|
||||||
|
.where(whereClause);
|
||||||
|
|
||||||
|
return Number(result.count);
|
||||||
|
}
|
||||||
|
|
||||||
|
async incrementViews(id: string) {
|
||||||
|
await this.databaseService.db
|
||||||
|
.update(contents)
|
||||||
|
.set({ views: sql`${contents.views} + 1` })
|
||||||
|
.where(eq(contents.id, id));
|
||||||
|
}
|
||||||
|
|
||||||
|
async incrementUsage(id: string) {
|
||||||
|
await this.databaseService.db
|
||||||
|
.update(contents)
|
||||||
|
.set({ usageCount: sql`${contents.usageCount} + 1` })
|
||||||
|
.where(eq(contents.id, id));
|
||||||
|
}
|
||||||
|
|
||||||
|
async softDelete(id: string, userId: string) {
|
||||||
|
const [deleted] = await this.databaseService.db
|
||||||
|
.update(contents)
|
||||||
|
.set({ deletedAt: new Date() })
|
||||||
|
.where(and(eq(contents.id, id), eq(contents.userId, userId)))
|
||||||
|
.returning();
|
||||||
|
return deleted;
|
||||||
|
}
|
||||||
|
|
||||||
|
async softDeleteAdmin(id: string) {
|
||||||
|
const [deleted] = await this.databaseService.db
|
||||||
|
.update(contents)
|
||||||
|
.set({ deletedAt: new Date() })
|
||||||
|
.where(eq(contents.id, id))
|
||||||
|
.returning();
|
||||||
|
return deleted;
|
||||||
|
}
|
||||||
|
|
||||||
|
async findBySlug(slug: string) {
|
||||||
|
const [result] = await this.databaseService.db
|
||||||
|
.select()
|
||||||
|
.from(contents)
|
||||||
|
.where(eq(contents.slug, slug))
|
||||||
|
.limit(1);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
async purgeSoftDeleted(before: Date) {
|
||||||
|
return await this.databaseService.db
|
||||||
|
.delete(contents)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
sql`${contents.deletedAt} IS NOT NULL`,
|
||||||
|
lte(contents.deletedAt, before),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.returning();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,8 +1,25 @@
|
|||||||
import { Module } from "@nestjs/common";
|
import { Global, Module } from "@nestjs/common";
|
||||||
import { CryptoService } from "./crypto.service";
|
import { CryptoService } from "./crypto.service";
|
||||||
|
import { EncryptionService } from "./services/encryption.service";
|
||||||
|
import { HashingService } from "./services/hashing.service";
|
||||||
|
import { JwtService } from "./services/jwt.service";
|
||||||
|
import { PostQuantumService } from "./services/post-quantum.service";
|
||||||
|
|
||||||
|
@Global()
|
||||||
@Module({
|
@Module({
|
||||||
providers: [CryptoService],
|
providers: [
|
||||||
exports: [CryptoService],
|
CryptoService,
|
||||||
|
HashingService,
|
||||||
|
JwtService,
|
||||||
|
EncryptionService,
|
||||||
|
PostQuantumService,
|
||||||
|
],
|
||||||
|
exports: [
|
||||||
|
CryptoService,
|
||||||
|
HashingService,
|
||||||
|
JwtService,
|
||||||
|
EncryptionService,
|
||||||
|
PostQuantumService,
|
||||||
|
],
|
||||||
})
|
})
|
||||||
export class CryptoModule {}
|
export class CryptoModule {}
|
||||||
|
|||||||
@@ -64,6 +64,10 @@ jest.mock("jose", () => ({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
import { CryptoService } from "./crypto.service";
|
import { CryptoService } from "./crypto.service";
|
||||||
|
import { EncryptionService } from "./services/encryption.service";
|
||||||
|
import { HashingService } from "./services/hashing.service";
|
||||||
|
import { JwtService } from "./services/jwt.service";
|
||||||
|
import { PostQuantumService } from "./services/post-quantum.service";
|
||||||
|
|
||||||
describe("CryptoService", () => {
|
describe("CryptoService", () => {
|
||||||
let service: CryptoService;
|
let service: CryptoService;
|
||||||
@@ -72,6 +76,10 @@ describe("CryptoService", () => {
|
|||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
providers: [
|
providers: [
|
||||||
CryptoService,
|
CryptoService,
|
||||||
|
HashingService,
|
||||||
|
JwtService,
|
||||||
|
EncryptionService,
|
||||||
|
PostQuantumService,
|
||||||
{
|
{
|
||||||
provide: ConfigService,
|
provide: ConfigService,
|
||||||
useValue: {
|
useValue: {
|
||||||
|
|||||||
@@ -1,151 +1,79 @@
|
|||||||
import { Injectable, Logger } from "@nestjs/common";
|
import { Injectable } from "@nestjs/common";
|
||||||
import { ConfigService } from "@nestjs/config";
|
import type * as jose from "jose";
|
||||||
import { ml_kem768 } from "@noble/post-quantum/ml-kem.js";
|
import { EncryptionService } from "./services/encryption.service";
|
||||||
import { hash, verify } from "@node-rs/argon2";
|
import { HashingService } from "./services/hashing.service";
|
||||||
import * as jose from "jose";
|
import { JwtService } from "./services/jwt.service";
|
||||||
|
import { PostQuantumService } from "./services/post-quantum.service";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated Use HashingService, JwtService, EncryptionService or PostQuantumService directly.
|
||||||
|
* This service acts as a Facade for backward compatibility.
|
||||||
|
*/
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class CryptoService {
|
export class CryptoService {
|
||||||
private readonly logger = new Logger(CryptoService.name);
|
constructor(
|
||||||
private readonly jwtSecret: Uint8Array;
|
private readonly hashingService: HashingService,
|
||||||
private readonly encryptionKey: Uint8Array;
|
private readonly jwtService: JwtService,
|
||||||
|
private readonly encryptionService: EncryptionService,
|
||||||
constructor(private configService: ConfigService) {
|
private readonly postQuantumService: PostQuantumService,
|
||||||
const secret = this.configService.get<string>("JWT_SECRET");
|
) {}
|
||||||
if (!secret) {
|
|
||||||
this.logger.warn(
|
|
||||||
"JWT_SECRET is not defined, using a default insecure secret for development",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
this.jwtSecret = new TextEncoder().encode(
|
|
||||||
secret || "default-secret-change-me-in-production",
|
|
||||||
);
|
|
||||||
|
|
||||||
const encKey = this.configService.get<string>("ENCRYPTION_KEY");
|
|
||||||
if (!encKey) {
|
|
||||||
this.logger.warn(
|
|
||||||
"ENCRYPTION_KEY is not defined, using a default insecure key for development",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
// Pour AES-GCM 256, on a besoin de 32 octets (256 bits)
|
|
||||||
const rawKey = encKey || "default-encryption-key-32-chars-";
|
|
||||||
this.encryptionKey = new TextEncoder().encode(
|
|
||||||
rawKey.padEnd(32, "0").substring(0, 32),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Blind Indexing (for search on encrypted data) ---
|
|
||||||
|
|
||||||
async hashEmail(email: string): Promise<string> {
|
async hashEmail(email: string): Promise<string> {
|
||||||
const normalizedEmail = email.toLowerCase().trim();
|
return this.hashingService.hashEmail(email);
|
||||||
const data = new TextEncoder().encode(normalizedEmail);
|
|
||||||
const hashBuffer = await crypto.subtle.digest("SHA-256", data);
|
|
||||||
return Array.from(new Uint8Array(hashBuffer))
|
|
||||||
.map((b) => b.toString(16).padStart(2, "0"))
|
|
||||||
.join("");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async hashIp(ip: string): Promise<string> {
|
async hashIp(ip: string): Promise<string> {
|
||||||
const data = new TextEncoder().encode(ip);
|
return this.hashingService.hashIp(ip);
|
||||||
const hashBuffer = await crypto.subtle.digest("SHA-256", data);
|
|
||||||
return Array.from(new Uint8Array(hashBuffer))
|
|
||||||
.map((b) => b.toString(16).padStart(2, "0"))
|
|
||||||
.join("");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
getPgpEncryptionKey(): string {
|
getPgpEncryptionKey(): string {
|
||||||
return (
|
return this.encryptionService.getPgpEncryptionKey();
|
||||||
this.configService.get<string>("PGP_ENCRYPTION_KEY") || "default-pgp-key"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Argon2 Hashing ---
|
|
||||||
|
|
||||||
async hashPassword(password: string): Promise<string> {
|
async hashPassword(password: string): Promise<string> {
|
||||||
return hash(password, {
|
return this.hashingService.hashPassword(password);
|
||||||
algorithm: 2,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async verifyPassword(password: string, hash: string): Promise<boolean> {
|
async verifyPassword(password: string, hash: string): Promise<boolean> {
|
||||||
return verify(hash, password);
|
return this.hashingService.verifyPassword(password, hash);
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- JWT Operations via jose ---
|
|
||||||
|
|
||||||
async generateJwt(
|
async generateJwt(
|
||||||
payload: jose.JWTPayload,
|
payload: jose.JWTPayload,
|
||||||
expiresIn = "2h",
|
expiresIn = "2h",
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
return new jose.SignJWT(payload)
|
return this.jwtService.generateJwt(payload, expiresIn);
|
||||||
.setProtectedHeader({ alg: "HS256" })
|
|
||||||
.setIssuedAt()
|
|
||||||
.setExpirationTime(expiresIn)
|
|
||||||
.sign(this.jwtSecret);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async verifyJwt<T extends jose.JWTPayload>(token: string): Promise<T> {
|
async verifyJwt<T extends jose.JWTPayload>(token: string): Promise<T> {
|
||||||
const { payload } = await jose.jwtVerify(token, this.jwtSecret);
|
return this.jwtService.verifyJwt<T>(token);
|
||||||
return payload as T;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Encryption & Decryption (JWE) ---
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Chiffre un contenu textuel en utilisant JWE (Compact Serialization)
|
|
||||||
* Algorithme: A256GCMKW pour la gestion des clés, A256GCM pour le chiffrement de contenu
|
|
||||||
*/
|
|
||||||
async encryptContent(content: string): Promise<string> {
|
async encryptContent(content: string): Promise<string> {
|
||||||
const data = new TextEncoder().encode(content);
|
return this.encryptionService.encryptContent(content);
|
||||||
return new jose.CompactEncrypt(data)
|
|
||||||
.setProtectedHeader({ alg: "dir", enc: "A256GCM" })
|
|
||||||
.encrypt(this.encryptionKey);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Déchiffre un contenu JWE
|
|
||||||
*/
|
|
||||||
async decryptContent(jwe: string): Promise<string> {
|
async decryptContent(jwe: string): Promise<string> {
|
||||||
const { plaintext } = await jose.compactDecrypt(jwe, this.encryptionKey);
|
return this.encryptionService.decryptContent(jwe);
|
||||||
return new TextDecoder().decode(plaintext);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Signature & Verification (JWS) ---
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Signe un contenu textuel en utilisant JWS (Compact Serialization)
|
|
||||||
* Algorithme: HS256 (HMAC-SHA256)
|
|
||||||
*/
|
|
||||||
async signContent(content: string): Promise<string> {
|
async signContent(content: string): Promise<string> {
|
||||||
const data = new TextEncoder().encode(content);
|
return this.encryptionService.signContent(content);
|
||||||
return new jose.CompactSign(data)
|
|
||||||
.setProtectedHeader({ alg: "HS256" })
|
|
||||||
.sign(this.jwtSecret);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Vérifie la signature JWS d'un contenu
|
|
||||||
*/
|
|
||||||
async verifyContentSignature(jws: string): Promise<string> {
|
async verifyContentSignature(jws: string): Promise<string> {
|
||||||
const { payload } = await jose.compactVerify(jws, this.jwtSecret);
|
return this.encryptionService.verifyContentSignature(jws);
|
||||||
return new TextDecoder().decode(payload);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Post-Quantum Cryptography via @noble/post-quantum ---
|
|
||||||
// Example: Kyber (ML-KEM) key encapsulation
|
|
||||||
|
|
||||||
generatePostQuantumKeyPair() {
|
generatePostQuantumKeyPair() {
|
||||||
const seed = new Uint8Array(64);
|
return this.postQuantumService.generatePostQuantumKeyPair();
|
||||||
crypto.getRandomValues(seed);
|
|
||||||
const { publicKey, secretKey } = ml_kem768.keygen(seed);
|
|
||||||
return { publicKey, secretKey };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
encapsulate(publicKey: Uint8Array) {
|
encapsulate(publicKey: Uint8Array) {
|
||||||
return ml_kem768.encapsulate(publicKey);
|
return this.postQuantumService.encapsulate(publicKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
decapsulate(cipherText: Uint8Array, secretKey: Uint8Array) {
|
decapsulate(cipherText: Uint8Array, secretKey: Uint8Array) {
|
||||||
return ml_kem768.decapsulate(cipherText, secretKey);
|
return this.postQuantumService.decapsulate(cipherText, secretKey);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
58
backend/src/crypto/services/encryption.service.ts
Normal file
58
backend/src/crypto/services/encryption.service.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
import { Injectable, Logger } from "@nestjs/common";
|
||||||
|
import { ConfigService } from "@nestjs/config";
|
||||||
|
import * as jose from "jose";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class EncryptionService {
|
||||||
|
private readonly logger = new Logger(EncryptionService.name);
|
||||||
|
private readonly jwtSecret: Uint8Array;
|
||||||
|
private readonly encryptionKey: Uint8Array;
|
||||||
|
|
||||||
|
constructor(private configService: ConfigService) {
|
||||||
|
const secret = this.configService.get<string>("JWT_SECRET");
|
||||||
|
this.jwtSecret = new TextEncoder().encode(
|
||||||
|
secret || "default-secret-change-me-in-production",
|
||||||
|
);
|
||||||
|
|
||||||
|
const encKey = this.configService.get<string>("ENCRYPTION_KEY");
|
||||||
|
if (!encKey) {
|
||||||
|
this.logger.warn(
|
||||||
|
"ENCRYPTION_KEY is not defined, using a default insecure key for development",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const rawKey = encKey || "default-encryption-key-32-chars-";
|
||||||
|
this.encryptionKey = new TextEncoder().encode(
|
||||||
|
rawKey.padEnd(32, "0").substring(0, 32),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async encryptContent(content: string): Promise<string> {
|
||||||
|
const data = new TextEncoder().encode(content);
|
||||||
|
return new jose.CompactEncrypt(data)
|
||||||
|
.setProtectedHeader({ alg: "dir", enc: "A256GCM" })
|
||||||
|
.encrypt(this.encryptionKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
async decryptContent(jwe: string): Promise<string> {
|
||||||
|
const { plaintext } = await jose.compactDecrypt(jwe, this.encryptionKey);
|
||||||
|
return new TextDecoder().decode(plaintext);
|
||||||
|
}
|
||||||
|
|
||||||
|
async signContent(content: string): Promise<string> {
|
||||||
|
const data = new TextEncoder().encode(content);
|
||||||
|
return new jose.CompactSign(data)
|
||||||
|
.setProtectedHeader({ alg: "HS256" })
|
||||||
|
.sign(this.jwtSecret);
|
||||||
|
}
|
||||||
|
|
||||||
|
async verifyContentSignature(jws: string): Promise<string> {
|
||||||
|
const { payload } = await jose.compactVerify(jws, this.jwtSecret);
|
||||||
|
return new TextDecoder().decode(payload);
|
||||||
|
}
|
||||||
|
|
||||||
|
getPgpEncryptionKey(): string {
|
||||||
|
return (
|
||||||
|
this.configService.get<string>("PGP_ENCRYPTION_KEY") || "default-pgp-key"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
32
backend/src/crypto/services/hashing.service.ts
Normal file
32
backend/src/crypto/services/hashing.service.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import { Injectable } from "@nestjs/common";
|
||||||
|
import { hash, verify } from "@node-rs/argon2";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class HashingService {
|
||||||
|
async hashEmail(email: string): Promise<string> {
|
||||||
|
const normalizedEmail = email.toLowerCase().trim();
|
||||||
|
return this.hashSha256(normalizedEmail);
|
||||||
|
}
|
||||||
|
|
||||||
|
async hashIp(ip: string): Promise<string> {
|
||||||
|
return this.hashSha256(ip);
|
||||||
|
}
|
||||||
|
|
||||||
|
async hashSha256(text: string): Promise<string> {
|
||||||
|
const data = new TextEncoder().encode(text);
|
||||||
|
const hashBuffer = await crypto.subtle.digest("SHA-256", data);
|
||||||
|
return Array.from(new Uint8Array(hashBuffer))
|
||||||
|
.map((b) => b.toString(16).padStart(2, "0"))
|
||||||
|
.join("");
|
||||||
|
}
|
||||||
|
|
||||||
|
async hashPassword(password: string): Promise<string> {
|
||||||
|
return hash(password, {
|
||||||
|
algorithm: 2,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async verifyPassword(password: string, hash: string): Promise<boolean> {
|
||||||
|
return verify(hash, password);
|
||||||
|
}
|
||||||
|
}
|
||||||
37
backend/src/crypto/services/jwt.service.ts
Normal file
37
backend/src/crypto/services/jwt.service.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import { Injectable, Logger } from "@nestjs/common";
|
||||||
|
import { ConfigService } from "@nestjs/config";
|
||||||
|
import * as jose from "jose";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class JwtService {
|
||||||
|
private readonly logger = new Logger(JwtService.name);
|
||||||
|
private readonly jwtSecret: Uint8Array;
|
||||||
|
|
||||||
|
constructor(private configService: ConfigService) {
|
||||||
|
const secret = this.configService.get<string>("JWT_SECRET");
|
||||||
|
if (!secret) {
|
||||||
|
this.logger.warn(
|
||||||
|
"JWT_SECRET is not defined, using a default insecure secret for development",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
this.jwtSecret = new TextEncoder().encode(
|
||||||
|
secret || "default-secret-change-me-in-production",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async generateJwt(
|
||||||
|
payload: jose.JWTPayload,
|
||||||
|
expiresIn = "2h",
|
||||||
|
): Promise<string> {
|
||||||
|
return new jose.SignJWT(payload)
|
||||||
|
.setProtectedHeader({ alg: "HS256" })
|
||||||
|
.setIssuedAt()
|
||||||
|
.setExpirationTime(expiresIn)
|
||||||
|
.sign(this.jwtSecret);
|
||||||
|
}
|
||||||
|
|
||||||
|
async verifyJwt<T extends jose.JWTPayload>(token: string): Promise<T> {
|
||||||
|
const { payload } = await jose.jwtVerify(token, this.jwtSecret);
|
||||||
|
return payload as T;
|
||||||
|
}
|
||||||
|
}
|
||||||
20
backend/src/crypto/services/post-quantum.service.ts
Normal file
20
backend/src/crypto/services/post-quantum.service.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { Injectable } from "@nestjs/common";
|
||||||
|
import { ml_kem768 } from "@noble/post-quantum/ml-kem.js";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class PostQuantumService {
|
||||||
|
generatePostQuantumKeyPair() {
|
||||||
|
const seed = new Uint8Array(64);
|
||||||
|
crypto.getRandomValues(seed);
|
||||||
|
const { publicKey, secretKey } = ml_kem768.keygen(seed);
|
||||||
|
return { publicKey, secretKey };
|
||||||
|
}
|
||||||
|
|
||||||
|
encapsulate(publicKey: Uint8Array) {
|
||||||
|
return ml_kem768.encapsulate(publicKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
decapsulate(cipherText: Uint8Array, secretKey: Uint8Array) {
|
||||||
|
return ml_kem768.decapsulate(cipherText, secretKey);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,7 +1,8 @@
|
|||||||
import { Module } from "@nestjs/common";
|
import { Global, Module } from "@nestjs/common";
|
||||||
import { ConfigModule } from "@nestjs/config";
|
import { ConfigModule } from "@nestjs/config";
|
||||||
import { DatabaseService } from "./database.service";
|
import { DatabaseService } from "./database.service";
|
||||||
|
|
||||||
|
@Global()
|
||||||
@Module({
|
@Module({
|
||||||
imports: [ConfigModule],
|
imports: [ConfigModule],
|
||||||
providers: [DatabaseService],
|
providers: [DatabaseService],
|
||||||
|
|||||||
@@ -3,9 +3,9 @@ export * from "./audit_logs";
|
|||||||
export * from "./categories";
|
export * from "./categories";
|
||||||
export * from "./content";
|
export * from "./content";
|
||||||
export * from "./favorites";
|
export * from "./favorites";
|
||||||
|
export * from "./pgp";
|
||||||
export * from "./rbac";
|
export * from "./rbac";
|
||||||
export * from "./reports";
|
export * from "./reports";
|
||||||
export * from "./sessions";
|
export * from "./sessions";
|
||||||
export * from "./tags";
|
export * from "./tags";
|
||||||
export * from "./users";
|
export * from "./users";
|
||||||
export * from "./pgp";
|
|
||||||
|
|||||||
@@ -55,6 +55,9 @@ export function pgpSymEncrypt(value: string | SQL, key: string | SQL) {
|
|||||||
/**
|
/**
|
||||||
* @deprecated Utiliser directement les colonnes de type pgpEncrypted qui gèrent maintenant le déchiffrement automatiquement.
|
* @deprecated Utiliser directement les colonnes de type pgpEncrypted qui gèrent maintenant le déchiffrement automatiquement.
|
||||||
*/
|
*/
|
||||||
export function pgpSymDecrypt(column: AnyPgColumn, key: string | SQL): SQL<string> {
|
export function pgpSymDecrypt(
|
||||||
|
column: AnyPgColumn,
|
||||||
|
key: string | SQL,
|
||||||
|
): SQL<string> {
|
||||||
return sql`pgp_sym_decrypt(${column}, ${key})`.mapWith(column) as SQL<string>;
|
return sql`pgp_sym_decrypt(${column}, ${key})`.mapWith(column) as SQL<string>;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { SQL, sql } from "drizzle-orm";
|
|
||||||
import {
|
import {
|
||||||
boolean,
|
boolean,
|
||||||
index,
|
index,
|
||||||
@@ -30,7 +29,9 @@ export const users = pgTable(
|
|||||||
displayName: varchar("display_name", { length: 32 }),
|
displayName: varchar("display_name", { length: 32 }),
|
||||||
|
|
||||||
username: varchar("username", { length: 32 }).notNull().unique(),
|
username: varchar("username", { length: 32 }).notNull().unique(),
|
||||||
passwordHash: varchar("password_hash", { length: 72 }).notNull(),
|
passwordHash: varchar("password_hash", { length: 100 }).notNull(),
|
||||||
|
avatarUrl: varchar("avatar_url", { length: 512 }),
|
||||||
|
bio: varchar("bio", { length: 255 }),
|
||||||
|
|
||||||
// Sécurité
|
// Sécurité
|
||||||
twoFactorSecret: pgpEncrypted("two_factor_secret"),
|
twoFactorSecret: pgpEncrypted("two_factor_secret"),
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
import { Module } from "@nestjs/common";
|
import { Module } from "@nestjs/common";
|
||||||
import { DatabaseModule } from "../database/database.module";
|
import { AuthModule } from "../auth/auth.module";
|
||||||
import { FavoritesController } from "./favorites.controller";
|
import { FavoritesController } from "./favorites.controller";
|
||||||
import { FavoritesService } from "./favorites.service";
|
import { FavoritesService } from "./favorites.service";
|
||||||
|
import { FavoritesRepository } from "./repositories/favorites.repository";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [DatabaseModule],
|
imports: [AuthModule],
|
||||||
controllers: [FavoritesController],
|
controllers: [FavoritesController],
|
||||||
providers: [FavoritesService],
|
providers: [FavoritesService, FavoritesRepository],
|
||||||
exports: [FavoritesService],
|
exports: [FavoritesService, FavoritesRepository],
|
||||||
})
|
})
|
||||||
export class FavoritesModule {}
|
export class FavoritesModule {}
|
||||||
|
|||||||
@@ -1,54 +1,31 @@
|
|||||||
import { ConflictException, NotFoundException } from "@nestjs/common";
|
import { ConflictException, NotFoundException } from "@nestjs/common";
|
||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
import { DatabaseService } from "../database/database.service";
|
|
||||||
import { FavoritesService } from "./favorites.service";
|
import { FavoritesService } from "./favorites.service";
|
||||||
|
import { FavoritesRepository } from "./repositories/favorites.repository";
|
||||||
|
|
||||||
describe("FavoritesService", () => {
|
describe("FavoritesService", () => {
|
||||||
let service: FavoritesService;
|
let service: FavoritesService;
|
||||||
|
let repository: FavoritesRepository;
|
||||||
|
|
||||||
const mockDb = {
|
const mockFavoritesRepository = {
|
||||||
select: jest.fn(),
|
findContentById: jest.fn(),
|
||||||
from: jest.fn(),
|
add: jest.fn(),
|
||||||
where: jest.fn(),
|
remove: jest.fn(),
|
||||||
limit: jest.fn(),
|
findByUserId: jest.fn(),
|
||||||
offset: jest.fn(),
|
|
||||||
innerJoin: jest.fn(),
|
|
||||||
insert: jest.fn(),
|
|
||||||
values: jest.fn(),
|
|
||||||
delete: jest.fn(),
|
|
||||||
returning: jest.fn(),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
jest.clearAllMocks();
|
jest.clearAllMocks();
|
||||||
|
|
||||||
const chain = {
|
|
||||||
select: jest.fn().mockReturnThis(),
|
|
||||||
from: jest.fn().mockReturnThis(),
|
|
||||||
where: jest.fn().mockReturnThis(),
|
|
||||||
limit: jest.fn().mockReturnThis(),
|
|
||||||
offset: jest.fn().mockReturnThis(),
|
|
||||||
innerJoin: jest.fn().mockReturnThis(),
|
|
||||||
insert: jest.fn().mockReturnThis(),
|
|
||||||
values: jest.fn().mockReturnThis(),
|
|
||||||
delete: jest.fn().mockReturnThis(),
|
|
||||||
returning: jest.fn().mockReturnThis(),
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockImplementation = () => Object.assign(Promise.resolve([]), chain);
|
|
||||||
for (const mock of Object.values(chain)) {
|
|
||||||
mock.mockImplementation(mockImplementation);
|
|
||||||
}
|
|
||||||
Object.assign(mockDb, chain);
|
|
||||||
|
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
providers: [
|
providers: [
|
||||||
FavoritesService,
|
FavoritesService,
|
||||||
{ provide: DatabaseService, useValue: { db: mockDb } },
|
{ provide: FavoritesRepository, useValue: mockFavoritesRepository },
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
service = module.get<FavoritesService>(FavoritesService);
|
service = module.get<FavoritesService>(FavoritesService);
|
||||||
|
repository = module.get<FavoritesRepository>(FavoritesRepository);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should be defined", () => {
|
it("should be defined", () => {
|
||||||
@@ -57,26 +34,31 @@ describe("FavoritesService", () => {
|
|||||||
|
|
||||||
describe("addFavorite", () => {
|
describe("addFavorite", () => {
|
||||||
it("should add a favorite", async () => {
|
it("should add a favorite", async () => {
|
||||||
mockDb.limit.mockResolvedValue([{ id: "content1" }]);
|
mockFavoritesRepository.findContentById.mockResolvedValue({
|
||||||
mockDb.returning.mockResolvedValue([
|
id: "content1",
|
||||||
|
});
|
||||||
|
mockFavoritesRepository.add.mockResolvedValue([
|
||||||
{ userId: "u1", contentId: "content1" },
|
{ userId: "u1", contentId: "content1" },
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const result = await service.addFavorite("u1", "content1");
|
const result = await service.addFavorite("u1", "content1");
|
||||||
|
|
||||||
expect(result).toEqual([{ userId: "u1", contentId: "content1" }]);
|
expect(result).toEqual([{ userId: "u1", contentId: "content1" }]);
|
||||||
|
expect(repository.add).toHaveBeenCalledWith("u1", "content1");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw NotFoundException if content does not exist", async () => {
|
it("should throw NotFoundException if content does not exist", async () => {
|
||||||
mockDb.limit.mockResolvedValue([]);
|
mockFavoritesRepository.findContentById.mockResolvedValue(null);
|
||||||
await expect(service.addFavorite("u1", "invalid")).rejects.toThrow(
|
await expect(service.addFavorite("u1", "invalid")).rejects.toThrow(
|
||||||
NotFoundException,
|
NotFoundException,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw ConflictException on duplicate favorite", async () => {
|
it("should throw ConflictException on duplicate favorite", async () => {
|
||||||
mockDb.limit.mockResolvedValue([{ id: "content1" }]);
|
mockFavoritesRepository.findContentById.mockResolvedValue({
|
||||||
mockDb.returning.mockRejectedValue(new Error("Duplicate"));
|
id: "content1",
|
||||||
|
});
|
||||||
|
mockFavoritesRepository.add.mockRejectedValue(new Error("Duplicate"));
|
||||||
await expect(service.addFavorite("u1", "content1")).rejects.toThrow(
|
await expect(service.addFavorite("u1", "content1")).rejects.toThrow(
|
||||||
ConflictException,
|
ConflictException,
|
||||||
);
|
);
|
||||||
@@ -85,13 +67,16 @@ describe("FavoritesService", () => {
|
|||||||
|
|
||||||
describe("removeFavorite", () => {
|
describe("removeFavorite", () => {
|
||||||
it("should remove a favorite", async () => {
|
it("should remove a favorite", async () => {
|
||||||
mockDb.returning.mockResolvedValue([{ userId: "u1", contentId: "c1" }]);
|
mockFavoritesRepository.remove.mockResolvedValue([
|
||||||
|
{ userId: "u1", contentId: "c1" },
|
||||||
|
]);
|
||||||
const result = await service.removeFavorite("u1", "c1");
|
const result = await service.removeFavorite("u1", "c1");
|
||||||
expect(result).toEqual({ userId: "u1", contentId: "c1" });
|
expect(result).toEqual({ userId: "u1", contentId: "c1" });
|
||||||
|
expect(repository.remove).toHaveBeenCalledWith("u1", "c1");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw NotFoundException if favorite not found", async () => {
|
it("should throw NotFoundException if favorite not found", async () => {
|
||||||
mockDb.returning.mockResolvedValue([]);
|
mockFavoritesRepository.remove.mockResolvedValue([]);
|
||||||
await expect(service.removeFavorite("u1", "c1")).rejects.toThrow(
|
await expect(service.removeFavorite("u1", "c1")).rejects.toThrow(
|
||||||
NotFoundException,
|
NotFoundException,
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -4,46 +4,32 @@ import {
|
|||||||
Logger,
|
Logger,
|
||||||
NotFoundException,
|
NotFoundException,
|
||||||
} from "@nestjs/common";
|
} from "@nestjs/common";
|
||||||
import { and, eq } from "drizzle-orm";
|
import { FavoritesRepository } from "./repositories/favorites.repository";
|
||||||
import { DatabaseService } from "../database/database.service";
|
|
||||||
import { contents, favorites } from "../database/schemas";
|
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class FavoritesService {
|
export class FavoritesService {
|
||||||
private readonly logger = new Logger(FavoritesService.name);
|
private readonly logger = new Logger(FavoritesService.name);
|
||||||
|
|
||||||
constructor(private readonly databaseService: DatabaseService) {}
|
constructor(private readonly favoritesRepository: FavoritesRepository) {}
|
||||||
|
|
||||||
async addFavorite(userId: string, contentId: string) {
|
async addFavorite(userId: string, contentId: string) {
|
||||||
this.logger.log(`Adding favorite: user ${userId}, content ${contentId}`);
|
this.logger.log(`Adding favorite: user ${userId}, content ${contentId}`);
|
||||||
// Vérifier si le contenu existe
|
|
||||||
const content = await this.databaseService.db
|
|
||||||
.select()
|
|
||||||
.from(contents)
|
|
||||||
.where(eq(contents.id, contentId))
|
|
||||||
.limit(1);
|
|
||||||
|
|
||||||
if (content.length === 0) {
|
const content = await this.favoritesRepository.findContentById(contentId);
|
||||||
|
if (!content) {
|
||||||
throw new NotFoundException("Content not found");
|
throw new NotFoundException("Content not found");
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return await this.databaseService.db
|
return await this.favoritesRepository.add(userId, contentId);
|
||||||
.insert(favorites)
|
|
||||||
.values({ userId, contentId })
|
|
||||||
.returning();
|
|
||||||
} catch (_error) {
|
} catch (_error) {
|
||||||
// Probablement une violation de clé primaire (déjà en favori)
|
|
||||||
throw new ConflictException("Content already in favorites");
|
throw new ConflictException("Content already in favorites");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async removeFavorite(userId: string, contentId: string) {
|
async removeFavorite(userId: string, contentId: string) {
|
||||||
this.logger.log(`Removing favorite: user ${userId}, content ${contentId}`);
|
this.logger.log(`Removing favorite: user ${userId}, content ${contentId}`);
|
||||||
const result = await this.databaseService.db
|
const result = await this.favoritesRepository.remove(userId, contentId);
|
||||||
.delete(favorites)
|
|
||||||
.where(and(eq(favorites.userId, userId), eq(favorites.contentId, contentId)))
|
|
||||||
.returning();
|
|
||||||
|
|
||||||
if (result.length === 0) {
|
if (result.length === 0) {
|
||||||
throw new NotFoundException("Favorite not found");
|
throw new NotFoundException("Favorite not found");
|
||||||
@@ -53,16 +39,6 @@ export class FavoritesService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async getUserFavorites(userId: string, limit: number, offset: number) {
|
async getUserFavorites(userId: string, limit: number, offset: number) {
|
||||||
const data = await this.databaseService.db
|
return await this.favoritesRepository.findByUserId(userId, limit, offset);
|
||||||
.select({
|
|
||||||
content: contents,
|
|
||||||
})
|
|
||||||
.from(favorites)
|
|
||||||
.innerJoin(contents, eq(favorites.contentId, contents.id))
|
|
||||||
.where(eq(favorites.userId, userId))
|
|
||||||
.limit(limit)
|
|
||||||
.offset(offset);
|
|
||||||
|
|
||||||
return data.map((item) => item.content);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
46
backend/src/favorites/repositories/favorites.repository.ts
Normal file
46
backend/src/favorites/repositories/favorites.repository.ts
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import { Injectable } from "@nestjs/common";
|
||||||
|
import { and, eq } from "drizzle-orm";
|
||||||
|
import { DatabaseService } from "../../database/database.service";
|
||||||
|
import { contents, favorites } from "../../database/schemas";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class FavoritesRepository {
|
||||||
|
constructor(private readonly databaseService: DatabaseService) {}
|
||||||
|
|
||||||
|
async findContentById(contentId: string) {
|
||||||
|
const result = await this.databaseService.db
|
||||||
|
.select()
|
||||||
|
.from(contents)
|
||||||
|
.where(eq(contents.id, contentId))
|
||||||
|
.limit(1);
|
||||||
|
return result[0] || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async add(userId: string, contentId: string) {
|
||||||
|
return await this.databaseService.db
|
||||||
|
.insert(favorites)
|
||||||
|
.values({ userId, contentId })
|
||||||
|
.returning();
|
||||||
|
}
|
||||||
|
|
||||||
|
async remove(userId: string, contentId: string) {
|
||||||
|
return await this.databaseService.db
|
||||||
|
.delete(favorites)
|
||||||
|
.where(and(eq(favorites.userId, userId), eq(favorites.contentId, contentId)))
|
||||||
|
.returning();
|
||||||
|
}
|
||||||
|
|
||||||
|
async findByUserId(userId: string, limit: number, offset: number) {
|
||||||
|
const data = await this.databaseService.db
|
||||||
|
.select({
|
||||||
|
content: contents,
|
||||||
|
})
|
||||||
|
.from(favorites)
|
||||||
|
.innerJoin(contents, eq(favorites.contentId, contents.id))
|
||||||
|
.where(eq(favorites.userId, userId))
|
||||||
|
.limit(limit)
|
||||||
|
.offset(offset);
|
||||||
|
|
||||||
|
return data.map((item) => item.content);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,9 +1,10 @@
|
|||||||
import { Injectable, Logger } from "@nestjs/common";
|
import { Injectable, Logger } from "@nestjs/common";
|
||||||
import { ConfigService } from "@nestjs/config";
|
import { ConfigService } from "@nestjs/config";
|
||||||
import { MailerService } from "@nestjs-modules/mailer";
|
import { MailerService } from "@nestjs-modules/mailer";
|
||||||
|
import type { IMailService } from "../common/interfaces/mail.interface";
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class MailService {
|
export class MailService implements IMailService {
|
||||||
private readonly logger = new Logger(MailService.name);
|
private readonly logger = new Logger(MailService.name);
|
||||||
private readonly domain: string;
|
private readonly domain: string;
|
||||||
|
|
||||||
|
|||||||
@@ -24,14 +24,31 @@ async function bootstrap() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Sécurité
|
// Sécurité
|
||||||
app.use(helmet());
|
app.use(
|
||||||
app.enableCors({
|
helmet({
|
||||||
origin:
|
crossOriginResourcePolicy: { policy: "cross-origin" },
|
||||||
configService.get("NODE_ENV") === "production"
|
}),
|
||||||
? [configService.get("DOMAIN_NAME") as string]
|
);
|
||||||
: true,
|
const corsEnabled = Boolean(configService.get<boolean>("ENABLE_CORS"));
|
||||||
credentials: true,
|
if (corsEnabled) {
|
||||||
});
|
const domainName = configService.get<string>("CORS_DOMAIN_NAME");
|
||||||
|
app.enableCors({
|
||||||
|
origin: (origin, callback) => {
|
||||||
|
if (!origin || domainName === "*") {
|
||||||
|
callback(null, true);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const allowedOrigins = domainName?.split(",").map((o) => o.trim()) || [];
|
||||||
|
if (allowedOrigins.includes(origin)) {
|
||||||
|
callback(null, true);
|
||||||
|
} else {
|
||||||
|
callback(null, false);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
credentials: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
// Validation Globale
|
// Validation Globale
|
||||||
app.useGlobalPipes(
|
app.useGlobalPipes(
|
||||||
@@ -49,4 +66,4 @@ async function bootstrap() {
|
|||||||
await app.listen(port);
|
await app.listen(port);
|
||||||
logger.log(`Application is running on: http://localhost:${port}`);
|
logger.log(`Application is running on: http://localhost:${port}`);
|
||||||
}
|
}
|
||||||
bootstrap();
|
bootstrap().then();
|
||||||
|
|||||||
61
backend/src/media/media.controller.spec.ts
Normal file
61
backend/src/media/media.controller.spec.ts
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
import { Readable } from "node:stream";
|
||||||
|
import { NotFoundException } from "@nestjs/common";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import type { Response } from "express";
|
||||||
|
import { S3Service } from "../s3/s3.service";
|
||||||
|
import { MediaController } from "./media.controller";
|
||||||
|
|
||||||
|
describe("MediaController", () => {
|
||||||
|
let controller: MediaController;
|
||||||
|
|
||||||
|
const mockS3Service = {
|
||||||
|
getFileInfo: jest.fn(),
|
||||||
|
getFile: jest.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
controllers: [MediaController],
|
||||||
|
providers: [{ provide: S3Service, useValue: mockS3Service }],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
controller = module.get<MediaController>(MediaController);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be defined", () => {
|
||||||
|
expect(controller).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getFile", () => {
|
||||||
|
it("should stream the file and set headers with path containing slashes", async () => {
|
||||||
|
const res = {
|
||||||
|
setHeader: jest.fn(),
|
||||||
|
} as unknown as Response;
|
||||||
|
const stream = new Readable();
|
||||||
|
stream.pipe = jest.fn();
|
||||||
|
const key = "contents/user-id/test.webp";
|
||||||
|
|
||||||
|
mockS3Service.getFileInfo.mockResolvedValue({
|
||||||
|
size: 100,
|
||||||
|
metaData: { "content-type": "image/webp" },
|
||||||
|
});
|
||||||
|
mockS3Service.getFile.mockResolvedValue(stream);
|
||||||
|
|
||||||
|
await controller.getFile(key, res);
|
||||||
|
|
||||||
|
expect(mockS3Service.getFileInfo).toHaveBeenCalledWith(key);
|
||||||
|
expect(res.setHeader).toHaveBeenCalledWith("Content-Type", "image/webp");
|
||||||
|
expect(res.setHeader).toHaveBeenCalledWith("Content-Length", 100);
|
||||||
|
expect(stream.pipe).toHaveBeenCalledWith(res);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException if file is not found", async () => {
|
||||||
|
mockS3Service.getFileInfo.mockRejectedValue(new Error("Not found"));
|
||||||
|
const res = {} as unknown as Response;
|
||||||
|
|
||||||
|
await expect(controller.getFile("invalid", res)).rejects.toThrow(
|
||||||
|
NotFoundException,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
30
backend/src/media/media.controller.ts
Normal file
30
backend/src/media/media.controller.ts
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import { Controller, Get, NotFoundException, Param, Res } from "@nestjs/common";
|
||||||
|
import type { Response } from "express";
|
||||||
|
import type { BucketItemStat } from "minio";
|
||||||
|
import { S3Service } from "../s3/s3.service";
|
||||||
|
|
||||||
|
@Controller("media")
|
||||||
|
export class MediaController {
|
||||||
|
constructor(private readonly s3Service: S3Service) {}
|
||||||
|
|
||||||
|
@Get("*key")
|
||||||
|
async getFile(@Param("key") key: string, @Res() res: Response) {
|
||||||
|
try {
|
||||||
|
const stats = (await this.s3Service.getFileInfo(key)) as BucketItemStat;
|
||||||
|
const stream = await this.s3Service.getFile(key);
|
||||||
|
|
||||||
|
const contentType =
|
||||||
|
stats.metaData?.["content-type"] ||
|
||||||
|
stats.metadata?.["content-type"] ||
|
||||||
|
"application/octet-stream";
|
||||||
|
|
||||||
|
res.setHeader("Content-Type", contentType);
|
||||||
|
res.setHeader("Content-Length", stats.size);
|
||||||
|
res.setHeader("Cache-Control", "public, max-age=31536000, immutable");
|
||||||
|
|
||||||
|
stream.pipe(res);
|
||||||
|
} catch (_error) {
|
||||||
|
throw new NotFoundException("Fichier non trouvé");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,8 +1,14 @@
|
|||||||
import { Module } from "@nestjs/common";
|
import { Module } from "@nestjs/common";
|
||||||
|
import { S3Module } from "../s3/s3.module";
|
||||||
|
import { MediaController } from "./media.controller";
|
||||||
import { MediaService } from "./media.service";
|
import { MediaService } from "./media.service";
|
||||||
|
import { ImageProcessorStrategy } from "./strategies/image-processor.strategy";
|
||||||
|
import { VideoProcessorStrategy } from "./strategies/video-processor.strategy";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
providers: [MediaService],
|
imports: [S3Module],
|
||||||
|
controllers: [MediaController],
|
||||||
|
providers: [MediaService, ImageProcessorStrategy, VideoProcessorStrategy],
|
||||||
exports: [MediaService],
|
exports: [MediaService],
|
||||||
})
|
})
|
||||||
export class MediaModule {}
|
export class MediaModule {}
|
||||||
|
|||||||
@@ -6,6 +6,9 @@ import ffmpeg from "fluent-ffmpeg";
|
|||||||
import sharp from "sharp";
|
import sharp from "sharp";
|
||||||
import { MediaService } from "./media.service";
|
import { MediaService } from "./media.service";
|
||||||
|
|
||||||
|
import { ImageProcessorStrategy } from "./strategies/image-processor.strategy";
|
||||||
|
import { VideoProcessorStrategy } from "./strategies/video-processor.strategy";
|
||||||
|
|
||||||
jest.mock("sharp");
|
jest.mock("sharp");
|
||||||
jest.mock("fluent-ffmpeg");
|
jest.mock("fluent-ffmpeg");
|
||||||
jest.mock("node:fs/promises");
|
jest.mock("node:fs/promises");
|
||||||
@@ -29,6 +32,8 @@ describe("MediaService", () => {
|
|||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
providers: [
|
providers: [
|
||||||
MediaService,
|
MediaService,
|
||||||
|
ImageProcessorStrategy,
|
||||||
|
VideoProcessorStrategy,
|
||||||
{
|
{
|
||||||
provide: ConfigService,
|
provide: ConfigService,
|
||||||
useValue: {
|
useValue: {
|
||||||
|
|||||||
@@ -1,22 +1,18 @@
|
|||||||
import { readFile, unlink, writeFile } from "node:fs/promises";
|
|
||||||
import { tmpdir } from "node:os";
|
|
||||||
import { join } from "node:path";
|
|
||||||
import { Readable } from "node:stream";
|
import { Readable } from "node:stream";
|
||||||
import {
|
import {
|
||||||
BadRequestException,
|
|
||||||
Injectable,
|
Injectable,
|
||||||
InternalServerErrorException,
|
InternalServerErrorException,
|
||||||
Logger,
|
Logger,
|
||||||
} from "@nestjs/common";
|
} from "@nestjs/common";
|
||||||
import { ConfigService } from "@nestjs/config";
|
import { ConfigService } from "@nestjs/config";
|
||||||
import * as NodeClam from "clamscan";
|
import NodeClam from "clamscan";
|
||||||
import ffmpeg from "fluent-ffmpeg";
|
|
||||||
import sharp from "sharp";
|
|
||||||
import { v4 as uuidv4 } from "uuid";
|
|
||||||
import type {
|
import type {
|
||||||
|
IMediaService,
|
||||||
MediaProcessingResult,
|
MediaProcessingResult,
|
||||||
ScanResult,
|
ScanResult,
|
||||||
} from "./interfaces/media.interface";
|
} from "../common/interfaces/media.interface";
|
||||||
|
import { ImageProcessorStrategy } from "./strategies/image-processor.strategy";
|
||||||
|
import { VideoProcessorStrategy } from "./strategies/video-processor.strategy";
|
||||||
|
|
||||||
interface ClamScanner {
|
interface ClamScanner {
|
||||||
scanStream(
|
scanStream(
|
||||||
@@ -25,12 +21,16 @@ interface ClamScanner {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class MediaService {
|
export class MediaService implements IMediaService {
|
||||||
private readonly logger = new Logger(MediaService.name);
|
private readonly logger = new Logger(MediaService.name);
|
||||||
private clamscan: ClamScanner | null = null;
|
private clamscan: ClamScanner | null = null;
|
||||||
private isClamAvInitialized = false;
|
private isClamAvInitialized = false;
|
||||||
|
|
||||||
constructor(private readonly configService: ConfigService) {
|
constructor(
|
||||||
|
private readonly configService: ConfigService,
|
||||||
|
private readonly imageProcessor: ImageProcessorStrategy,
|
||||||
|
private readonly videoProcessor: VideoProcessorStrategy,
|
||||||
|
) {
|
||||||
this.initClamScan();
|
this.initClamScan();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -83,83 +83,15 @@ export class MediaService {
|
|||||||
async processImage(
|
async processImage(
|
||||||
buffer: Buffer,
|
buffer: Buffer,
|
||||||
format: "webp" | "avif" = "webp",
|
format: "webp" | "avif" = "webp",
|
||||||
|
resize?: { width?: number; height?: number },
|
||||||
): Promise<MediaProcessingResult> {
|
): Promise<MediaProcessingResult> {
|
||||||
try {
|
return this.imageProcessor.process(buffer, { format, resize });
|
||||||
let pipeline = sharp(buffer);
|
|
||||||
const metadata = await pipeline.metadata();
|
|
||||||
|
|
||||||
if (format === "webp") {
|
|
||||||
pipeline = pipeline.webp({ quality: 80, effort: 6 });
|
|
||||||
} else {
|
|
||||||
pipeline = pipeline.avif({ quality: 65, effort: 6 });
|
|
||||||
}
|
|
||||||
|
|
||||||
const processedBuffer = await pipeline.toBuffer();
|
|
||||||
|
|
||||||
return {
|
|
||||||
buffer: processedBuffer,
|
|
||||||
mimeType: `image/${format}`,
|
|
||||||
extension: format,
|
|
||||||
width: metadata.width,
|
|
||||||
height: metadata.height,
|
|
||||||
size: processedBuffer.length,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
this.logger.error(`Error processing image: ${error.message}`);
|
|
||||||
throw new BadRequestException("Failed to process image");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async processVideo(
|
async processVideo(
|
||||||
buffer: Buffer,
|
buffer: Buffer,
|
||||||
format: "webm" | "av1" = "webm",
|
format: "webm" | "av1" = "webm",
|
||||||
): Promise<MediaProcessingResult> {
|
): Promise<MediaProcessingResult> {
|
||||||
const tempInput = join(tmpdir(), `${uuidv4()}.tmp`);
|
return this.videoProcessor.process(buffer, { format });
|
||||||
const tempOutput = join(
|
|
||||||
tmpdir(),
|
|
||||||
`${uuidv4()}.${format === "av1" ? "mp4" : "webm"}`,
|
|
||||||
);
|
|
||||||
|
|
||||||
try {
|
|
||||||
await writeFile(tempInput, buffer);
|
|
||||||
|
|
||||||
await new Promise<void>((resolve, reject) => {
|
|
||||||
let command = ffmpeg(tempInput);
|
|
||||||
|
|
||||||
if (format === "webm") {
|
|
||||||
command = command
|
|
||||||
.toFormat("webm")
|
|
||||||
.videoCodec("libvpx-vp9")
|
|
||||||
.audioCodec("libopus")
|
|
||||||
.outputOptions("-crf 30", "-b:v 0");
|
|
||||||
} else {
|
|
||||||
command = command
|
|
||||||
.toFormat("mp4")
|
|
||||||
.videoCodec("libaom-av1")
|
|
||||||
.audioCodec("libopus")
|
|
||||||
.outputOptions("-crf 34", "-b:v 0", "-strict experimental");
|
|
||||||
}
|
|
||||||
|
|
||||||
command
|
|
||||||
.on("end", () => resolve())
|
|
||||||
.on("error", (err) => reject(err))
|
|
||||||
.save(tempOutput);
|
|
||||||
});
|
|
||||||
|
|
||||||
const processedBuffer = await readFile(tempOutput);
|
|
||||||
|
|
||||||
return {
|
|
||||||
buffer: processedBuffer,
|
|
||||||
mimeType: format === "av1" ? "video/mp4" : "video/webm",
|
|
||||||
extension: format === "av1" ? "mp4" : "webm",
|
|
||||||
size: processedBuffer.length,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
this.logger.error(`Error processing video: ${error.message}`);
|
|
||||||
throw new BadRequestException("Failed to process video");
|
|
||||||
} finally {
|
|
||||||
await unlink(tempInput).catch(() => {});
|
|
||||||
await unlink(tempOutput).catch(() => {});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
55
backend/src/media/strategies/image-processor.strategy.ts
Normal file
55
backend/src/media/strategies/image-processor.strategy.ts
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import { BadRequestException, Injectable, Logger } from "@nestjs/common";
|
||||||
|
import sharp from "sharp";
|
||||||
|
import type { MediaProcessingResult } from "../../common/interfaces/media.interface";
|
||||||
|
import type { IMediaProcessorStrategy } from "./media-processor.strategy";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class ImageProcessorStrategy implements IMediaProcessorStrategy {
|
||||||
|
private readonly logger = new Logger(ImageProcessorStrategy.name);
|
||||||
|
|
||||||
|
canHandle(mimeType: string): boolean {
|
||||||
|
return mimeType.startsWith("image/");
|
||||||
|
}
|
||||||
|
|
||||||
|
async process(
|
||||||
|
buffer: Buffer,
|
||||||
|
options: {
|
||||||
|
format: "webp" | "avif";
|
||||||
|
resize?: { width?: number; height?: number };
|
||||||
|
} = { format: "webp" },
|
||||||
|
): Promise<MediaProcessingResult> {
|
||||||
|
try {
|
||||||
|
const { format, resize } = options;
|
||||||
|
let pipeline = sharp(buffer);
|
||||||
|
|
||||||
|
if (resize) {
|
||||||
|
pipeline = pipeline.resize(resize.width, resize.height, {
|
||||||
|
fit: "cover",
|
||||||
|
position: "center",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadata = await pipeline.metadata();
|
||||||
|
|
||||||
|
if (format === "webp") {
|
||||||
|
pipeline = pipeline.webp({ quality: 80, effort: 6 });
|
||||||
|
} else {
|
||||||
|
pipeline = pipeline.avif({ quality: 65, effort: 6 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const processedBuffer = await pipeline.toBuffer();
|
||||||
|
|
||||||
|
return {
|
||||||
|
buffer: processedBuffer,
|
||||||
|
mimeType: `image/${format}`,
|
||||||
|
extension: format,
|
||||||
|
width: metadata.width,
|
||||||
|
height: metadata.height,
|
||||||
|
size: processedBuffer.length,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.error(`Error processing image: ${error.message}`);
|
||||||
|
throw new BadRequestException("Failed to process image");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
9
backend/src/media/strategies/media-processor.strategy.ts
Normal file
9
backend/src/media/strategies/media-processor.strategy.ts
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
import type { MediaProcessingResult } from "../../common/interfaces/media.interface";
|
||||||
|
|
||||||
|
export interface IMediaProcessorStrategy {
|
||||||
|
canHandle(mimeType: string): boolean;
|
||||||
|
process(
|
||||||
|
buffer: Buffer,
|
||||||
|
options?: Record<string, unknown>,
|
||||||
|
): Promise<MediaProcessingResult>;
|
||||||
|
}
|
||||||
71
backend/src/media/strategies/video-processor.strategy.ts
Normal file
71
backend/src/media/strategies/video-processor.strategy.ts
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
import { readFile, unlink, writeFile } from "node:fs/promises";
|
||||||
|
import { tmpdir } from "node:os";
|
||||||
|
import { join } from "node:path";
|
||||||
|
import { BadRequestException, Injectable, Logger } from "@nestjs/common";
|
||||||
|
import ffmpeg from "fluent-ffmpeg";
|
||||||
|
import { v4 as uuidv4 } from "uuid";
|
||||||
|
import type { MediaProcessingResult } from "../../common/interfaces/media.interface";
|
||||||
|
import type { IMediaProcessorStrategy } from "./media-processor.strategy";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class VideoProcessorStrategy implements IMediaProcessorStrategy {
|
||||||
|
private readonly logger = new Logger(VideoProcessorStrategy.name);
|
||||||
|
|
||||||
|
canHandle(mimeType: string): boolean {
|
||||||
|
return mimeType.startsWith("video/");
|
||||||
|
}
|
||||||
|
|
||||||
|
async process(
|
||||||
|
buffer: Buffer,
|
||||||
|
options: { format: "webm" | "av1" } = { format: "webm" },
|
||||||
|
): Promise<MediaProcessingResult> {
|
||||||
|
const { format } = options;
|
||||||
|
const tempInput = join(tmpdir(), `${uuidv4()}.tmp`);
|
||||||
|
const tempOutput = join(
|
||||||
|
tmpdir(),
|
||||||
|
`${uuidv4()}.${format === "av1" ? "mp4" : "webm"}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await writeFile(tempInput, buffer);
|
||||||
|
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
let command = ffmpeg(tempInput);
|
||||||
|
|
||||||
|
if (format === "webm") {
|
||||||
|
command = command
|
||||||
|
.toFormat("webm")
|
||||||
|
.videoCodec("libvpx-vp9")
|
||||||
|
.audioCodec("libopus")
|
||||||
|
.outputOptions("-crf 30", "-b:v 0");
|
||||||
|
} else {
|
||||||
|
command = command
|
||||||
|
.toFormat("mp4")
|
||||||
|
.videoCodec("libaom-av1")
|
||||||
|
.audioCodec("libopus")
|
||||||
|
.outputOptions("-crf 34", "-b:v 0", "-strict experimental");
|
||||||
|
}
|
||||||
|
|
||||||
|
command
|
||||||
|
.on("end", () => resolve())
|
||||||
|
.on("error", (err) => reject(err))
|
||||||
|
.save(tempOutput);
|
||||||
|
});
|
||||||
|
|
||||||
|
const processedBuffer = await readFile(tempOutput);
|
||||||
|
|
||||||
|
return {
|
||||||
|
buffer: processedBuffer,
|
||||||
|
mimeType: format === "av1" ? "video/mp4" : "video/webm",
|
||||||
|
extension: format === "av1" ? "mp4" : "webm",
|
||||||
|
size: processedBuffer.length,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.error(`Error processing video: ${error.message}`);
|
||||||
|
throw new BadRequestException("Failed to process video");
|
||||||
|
} finally {
|
||||||
|
await unlink(tempInput).catch(() => {});
|
||||||
|
await unlink(tempOutput).catch(() => {});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,10 @@
|
|||||||
import { IsEnum, IsOptional, IsString, IsUUID } from "class-validator";
|
import {
|
||||||
|
IsEnum,
|
||||||
|
IsOptional,
|
||||||
|
IsString,
|
||||||
|
IsUUID,
|
||||||
|
MaxLength,
|
||||||
|
} from "class-validator";
|
||||||
|
|
||||||
export enum ReportReason {
|
export enum ReportReason {
|
||||||
INAPPROPRIATE = "inappropriate",
|
INAPPROPRIATE = "inappropriate",
|
||||||
@@ -21,5 +27,6 @@ export class CreateReportDto {
|
|||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsString()
|
@IsString()
|
||||||
|
@MaxLength(1000)
|
||||||
description?: string;
|
description?: string;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
import { Module } from "@nestjs/common";
|
import { forwardRef, Module } from "@nestjs/common";
|
||||||
import { AuthModule } from "../auth/auth.module";
|
import { AuthModule } from "../auth/auth.module";
|
||||||
import { CryptoModule } from "../crypto/crypto.module";
|
|
||||||
import { DatabaseModule } from "../database/database.module";
|
|
||||||
import { ReportsController } from "./reports.controller";
|
import { ReportsController } from "./reports.controller";
|
||||||
import { ReportsService } from "./reports.service";
|
import { ReportsService } from "./reports.service";
|
||||||
|
import { ReportsRepository } from "./repositories/reports.repository";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [DatabaseModule, AuthModule, CryptoModule],
|
imports: [forwardRef(() => AuthModule)],
|
||||||
controllers: [ReportsController],
|
controllers: [ReportsController],
|
||||||
providers: [ReportsService],
|
providers: [ReportsService, ReportsRepository],
|
||||||
|
exports: [ReportsRepository, ReportsService],
|
||||||
})
|
})
|
||||||
export class ReportsModule {}
|
export class ReportsModule {}
|
||||||
|
|||||||
@@ -1,56 +1,29 @@
|
|||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
import { DatabaseService } from "../database/database.service";
|
|
||||||
import { CreateReportDto } from "./dto/create-report.dto";
|
|
||||||
import { ReportsService } from "./reports.service";
|
import { ReportsService } from "./reports.service";
|
||||||
|
import { ReportsRepository } from "./repositories/reports.repository";
|
||||||
|
|
||||||
describe("ReportsService", () => {
|
describe("ReportsService", () => {
|
||||||
let service: ReportsService;
|
let service: ReportsService;
|
||||||
|
let repository: ReportsRepository;
|
||||||
|
|
||||||
const mockDb = {
|
const mockReportsRepository = {
|
||||||
insert: jest.fn(),
|
create: jest.fn(),
|
||||||
values: jest.fn(),
|
findAll: jest.fn(),
|
||||||
returning: jest.fn(),
|
updateStatus: jest.fn(),
|
||||||
select: jest.fn(),
|
|
||||||
from: jest.fn(),
|
|
||||||
orderBy: jest.fn(),
|
|
||||||
limit: jest.fn(),
|
|
||||||
offset: jest.fn(),
|
|
||||||
update: jest.fn(),
|
|
||||||
set: jest.fn(),
|
|
||||||
where: jest.fn(),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
jest.clearAllMocks();
|
jest.clearAllMocks();
|
||||||
|
|
||||||
const chain = {
|
|
||||||
insert: jest.fn().mockReturnThis(),
|
|
||||||
values: jest.fn().mockReturnThis(),
|
|
||||||
returning: jest.fn().mockReturnThis(),
|
|
||||||
select: jest.fn().mockReturnThis(),
|
|
||||||
from: jest.fn().mockReturnThis(),
|
|
||||||
orderBy: jest.fn().mockReturnThis(),
|
|
||||||
limit: jest.fn().mockReturnThis(),
|
|
||||||
offset: jest.fn().mockReturnThis(),
|
|
||||||
update: jest.fn().mockReturnThis(),
|
|
||||||
set: jest.fn().mockReturnThis(),
|
|
||||||
where: jest.fn().mockReturnThis(),
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockImplementation = () => Object.assign(Promise.resolve([]), chain);
|
|
||||||
for (const mock of Object.values(chain)) {
|
|
||||||
mock.mockImplementation(mockImplementation);
|
|
||||||
}
|
|
||||||
Object.assign(mockDb, chain);
|
|
||||||
|
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
providers: [
|
providers: [
|
||||||
ReportsService,
|
ReportsService,
|
||||||
{ provide: DatabaseService, useValue: { db: mockDb } },
|
{ provide: ReportsRepository, useValue: mockReportsRepository },
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
service = module.get<ReportsService>(ReportsService);
|
service = module.get<ReportsService>(ReportsService);
|
||||||
|
repository = module.get<ReportsRepository>(ReportsRepository);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should be defined", () => {
|
it("should be defined", () => {
|
||||||
@@ -60,29 +33,37 @@ describe("ReportsService", () => {
|
|||||||
describe("create", () => {
|
describe("create", () => {
|
||||||
it("should create a report", async () => {
|
it("should create a report", async () => {
|
||||||
const reporterId = "u1";
|
const reporterId = "u1";
|
||||||
const data: CreateReportDto = { contentId: "c1", reason: "spam" };
|
const data = { contentId: "c1", reason: "spam" } as const;
|
||||||
mockDb.returning.mockResolvedValue([{ id: "r1", ...data, reporterId }]);
|
mockReportsRepository.create.mockResolvedValue({
|
||||||
|
id: "r1",
|
||||||
|
...data,
|
||||||
|
reporterId,
|
||||||
|
});
|
||||||
|
|
||||||
const result = await service.create(reporterId, data);
|
const result = await service.create(reporterId, data);
|
||||||
|
|
||||||
expect(result.id).toBe("r1");
|
expect(result.id).toBe("r1");
|
||||||
expect(mockDb.insert).toHaveBeenCalled();
|
expect(repository.create).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("findAll", () => {
|
describe("findAll", () => {
|
||||||
it("should return reports", async () => {
|
it("should return reports", async () => {
|
||||||
mockDb.offset.mockResolvedValue([{ id: "r1" }]);
|
mockReportsRepository.findAll.mockResolvedValue([{ id: "r1" }]);
|
||||||
const result = await service.findAll(10, 0);
|
const result = await service.findAll(10, 0);
|
||||||
expect(result).toHaveLength(1);
|
expect(result).toHaveLength(1);
|
||||||
|
expect(repository.findAll).toHaveBeenCalledWith(10, 0);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("updateStatus", () => {
|
describe("updateStatus", () => {
|
||||||
it("should update report status", async () => {
|
it("should update report status", async () => {
|
||||||
mockDb.returning.mockResolvedValue([{ id: "r1", status: "resolved" }]);
|
mockReportsRepository.updateStatus.mockResolvedValue([
|
||||||
|
{ id: "r1", status: "resolved" },
|
||||||
|
]);
|
||||||
const result = await service.updateStatus("r1", "resolved");
|
const result = await service.updateStatus("r1", "resolved");
|
||||||
expect(result[0].status).toBe("resolved");
|
expect(result[0].status).toBe("resolved");
|
||||||
|
expect(repository.updateStatus).toHaveBeenCalledWith("r1", "resolved");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,37 +1,26 @@
|
|||||||
import { Injectable, Logger } from "@nestjs/common";
|
import { Injectable, Logger } from "@nestjs/common";
|
||||||
import { desc, eq } from "drizzle-orm";
|
|
||||||
import { DatabaseService } from "../database/database.service";
|
|
||||||
import { reports } from "../database/schemas";
|
|
||||||
import { CreateReportDto } from "./dto/create-report.dto";
|
import { CreateReportDto } from "./dto/create-report.dto";
|
||||||
|
import { ReportsRepository } from "./repositories/reports.repository";
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class ReportsService {
|
export class ReportsService {
|
||||||
private readonly logger = new Logger(ReportsService.name);
|
private readonly logger = new Logger(ReportsService.name);
|
||||||
|
|
||||||
constructor(private readonly databaseService: DatabaseService) {}
|
constructor(private readonly reportsRepository: ReportsRepository) {}
|
||||||
|
|
||||||
async create(reporterId: string, data: CreateReportDto) {
|
async create(reporterId: string, data: CreateReportDto) {
|
||||||
this.logger.log(`Creating report from user ${reporterId}`);
|
this.logger.log(`Creating report from user ${reporterId}`);
|
||||||
const [newReport] = await this.databaseService.db
|
return await this.reportsRepository.create({
|
||||||
.insert(reports)
|
reporterId,
|
||||||
.values({
|
contentId: data.contentId,
|
||||||
reporterId,
|
tagId: data.tagId,
|
||||||
contentId: data.contentId,
|
reason: data.reason,
|
||||||
tagId: data.tagId,
|
description: data.description,
|
||||||
reason: data.reason,
|
});
|
||||||
description: data.description,
|
|
||||||
})
|
|
||||||
.returning();
|
|
||||||
return newReport;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async findAll(limit: number, offset: number) {
|
async findAll(limit: number, offset: number) {
|
||||||
return await this.databaseService.db
|
return await this.reportsRepository.findAll(limit, offset);
|
||||||
.select()
|
|
||||||
.from(reports)
|
|
||||||
.orderBy(desc(reports.createdAt))
|
|
||||||
.limit(limit)
|
|
||||||
.offset(offset);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async updateStatus(
|
async updateStatus(
|
||||||
@@ -39,10 +28,6 @@ export class ReportsService {
|
|||||||
status: "pending" | "reviewed" | "resolved" | "dismissed",
|
status: "pending" | "reviewed" | "resolved" | "dismissed",
|
||||||
) {
|
) {
|
||||||
this.logger.log(`Updating report ${id} status to ${status}`);
|
this.logger.log(`Updating report ${id} status to ${status}`);
|
||||||
return await this.databaseService.db
|
return await this.reportsRepository.updateStatus(id, status);
|
||||||
.update(reports)
|
|
||||||
.set({ status, updatedAt: new Date() })
|
|
||||||
.where(eq(reports.id, id))
|
|
||||||
.returning();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
50
backend/src/reports/repositories/reports.repository.ts
Normal file
50
backend/src/reports/repositories/reports.repository.ts
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
import { Injectable } from "@nestjs/common";
|
||||||
|
import { desc, eq, lte } from "drizzle-orm";
|
||||||
|
import { DatabaseService } from "../../database/database.service";
|
||||||
|
import { reports } from "../../database/schemas";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class ReportsRepository {
|
||||||
|
constructor(private readonly databaseService: DatabaseService) {}
|
||||||
|
|
||||||
|
async create(data: {
|
||||||
|
reporterId: string;
|
||||||
|
contentId?: string;
|
||||||
|
tagId?: string;
|
||||||
|
reason: "inappropriate" | "spam" | "copyright" | "other";
|
||||||
|
description?: string;
|
||||||
|
}) {
|
||||||
|
const [newReport] = await this.databaseService.db
|
||||||
|
.insert(reports)
|
||||||
|
.values(data)
|
||||||
|
.returning();
|
||||||
|
return newReport;
|
||||||
|
}
|
||||||
|
|
||||||
|
async findAll(limit: number, offset: number) {
|
||||||
|
return await this.databaseService.db
|
||||||
|
.select()
|
||||||
|
.from(reports)
|
||||||
|
.orderBy(desc(reports.createdAt))
|
||||||
|
.limit(limit)
|
||||||
|
.offset(offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateStatus(
|
||||||
|
id: string,
|
||||||
|
status: "pending" | "reviewed" | "resolved" | "dismissed",
|
||||||
|
) {
|
||||||
|
return await this.databaseService.db
|
||||||
|
.update(reports)
|
||||||
|
.set({ status, updatedAt: new Date() })
|
||||||
|
.where(eq(reports.id, id))
|
||||||
|
.returning();
|
||||||
|
}
|
||||||
|
|
||||||
|
async purgeObsolete(now: Date) {
|
||||||
|
return await this.databaseService.db
|
||||||
|
.delete(reports)
|
||||||
|
.where(lte(reports.expiresAt, now))
|
||||||
|
.returning();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -7,7 +7,7 @@ jest.mock("minio");
|
|||||||
|
|
||||||
describe("S3Service", () => {
|
describe("S3Service", () => {
|
||||||
let service: S3Service;
|
let service: S3Service;
|
||||||
let _configService: ConfigService;
|
let configService: ConfigService;
|
||||||
// biome-ignore lint/suspicious/noExplicitAny: Fine for testing purposes
|
// biome-ignore lint/suspicious/noExplicitAny: Fine for testing purposes
|
||||||
let minioClient: any;
|
let minioClient: any;
|
||||||
|
|
||||||
@@ -42,7 +42,7 @@ describe("S3Service", () => {
|
|||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
service = module.get<S3Service>(S3Service);
|
service = module.get<S3Service>(S3Service);
|
||||||
_configService = module.get<ConfigService>(ConfigService);
|
configService = module.get<ConfigService>(ConfigService);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should be defined", () => {
|
it("should be defined", () => {
|
||||||
@@ -185,35 +185,39 @@ describe("S3Service", () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("moveFile", () => {
|
describe("getPublicUrl", () => {
|
||||||
it("should move file within default bucket", async () => {
|
it("should use API_URL if provided", () => {
|
||||||
const source = "source.txt";
|
(configService.get as jest.Mock).mockImplementation((key: string) => {
|
||||||
const dest = "dest.txt";
|
if (key === "API_URL") return "https://api.test.com";
|
||||||
await service.moveFile(source, dest);
|
return null;
|
||||||
|
});
|
||||||
expect(minioClient.copyObject).toHaveBeenCalledWith(
|
const url = service.getPublicUrl("test.webp");
|
||||||
"memegoat",
|
expect(url).toBe("https://api.test.com/media/test.webp");
|
||||||
dest,
|
|
||||||
"/memegoat/source.txt",
|
|
||||||
expect.any(Minio.CopyConditions),
|
|
||||||
);
|
|
||||||
expect(minioClient.removeObject).toHaveBeenCalledWith("memegoat", source);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should move file between different buckets", async () => {
|
it("should use DOMAIN_NAME and PORT for localhost", () => {
|
||||||
const source = "source.txt";
|
(configService.get as jest.Mock).mockImplementation(
|
||||||
const dest = "dest.txt";
|
(key: string, def: unknown) => {
|
||||||
const sBucket = "source-bucket";
|
if (key === "API_URL") return null;
|
||||||
const dBucket = "dest-bucket";
|
if (key === "DOMAIN_NAME") return "localhost";
|
||||||
await service.moveFile(source, dest, sBucket, dBucket);
|
if (key === "PORT") return 3000;
|
||||||
|
return def;
|
||||||
expect(minioClient.copyObject).toHaveBeenCalledWith(
|
},
|
||||||
dBucket,
|
|
||||||
dest,
|
|
||||||
`/${sBucket}/${source}`,
|
|
||||||
expect.any(Minio.CopyConditions),
|
|
||||||
);
|
);
|
||||||
expect(minioClient.removeObject).toHaveBeenCalledWith(sBucket, source);
|
const url = service.getPublicUrl("test.webp");
|
||||||
|
expect(url).toBe("http://localhost:3000/media/test.webp");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use api.DOMAIN_NAME for production", () => {
|
||||||
|
(configService.get as jest.Mock).mockImplementation(
|
||||||
|
(key: string, def: unknown) => {
|
||||||
|
if (key === "API_URL") return null;
|
||||||
|
if (key === "DOMAIN_NAME") return "memegoat.fr";
|
||||||
|
return def;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
const url = service.getPublicUrl("test.webp");
|
||||||
|
expect(url).toBe("https://api.memegoat.fr/media/test.webp");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
import { Injectable, Logger, OnModuleInit } from "@nestjs/common";
|
import { Injectable, Logger, OnModuleInit } from "@nestjs/common";
|
||||||
import { ConfigService } from "@nestjs/config";
|
import { ConfigService } from "@nestjs/config";
|
||||||
import * as Minio from "minio";
|
import * as Minio from "minio";
|
||||||
|
import type { IStorageService } from "../common/interfaces/storage.interface";
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class S3Service implements OnModuleInit {
|
export class S3Service implements OnModuleInit, IStorageService {
|
||||||
private readonly logger = new Logger(S3Service.name);
|
private readonly logger = new Logger(S3Service.name);
|
||||||
private minioClient: Minio.Client;
|
private minioClient: Minio.Client;
|
||||||
private readonly bucketName: string;
|
private readonly bucketName: string;
|
||||||
@@ -53,6 +54,7 @@ export class S3Service implements OnModuleInit {
|
|||||||
...metaData,
|
...metaData,
|
||||||
"Content-Type": mimeType,
|
"Content-Type": mimeType,
|
||||||
});
|
});
|
||||||
|
this.logger.log(`File uploaded successfully: ${fileName} to ${bucketName}`);
|
||||||
return fileName;
|
return fileName;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
this.logger.error(`Error uploading file to ${bucketName}: ${error.message}`);
|
this.logger.error(`Error uploading file to ${bucketName}: ${error.message}`);
|
||||||
@@ -112,6 +114,7 @@ export class S3Service implements OnModuleInit {
|
|||||||
async deleteFile(fileName: string, bucketName: string = this.bucketName) {
|
async deleteFile(fileName: string, bucketName: string = this.bucketName) {
|
||||||
try {
|
try {
|
||||||
await this.minioClient.removeObject(bucketName, fileName);
|
await this.minioClient.removeObject(bucketName, fileName);
|
||||||
|
this.logger.log(`File deleted successfully: ${fileName} from ${bucketName}`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
this.logger.error(
|
this.logger.error(
|
||||||
`Error deleting file from ${bucketName}: ${error.message}`,
|
`Error deleting file from ${bucketName}: ${error.message}`,
|
||||||
@@ -154,4 +157,22 @@ export class S3Service implements OnModuleInit {
|
|||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getPublicUrl(storageKey: string): string {
|
||||||
|
const apiUrl = this.configService.get<string>("API_URL");
|
||||||
|
const domain = this.configService.get<string>("DOMAIN_NAME", "localhost");
|
||||||
|
const port = this.configService.get<number>("PORT", 3000);
|
||||||
|
|
||||||
|
let baseUrl: string;
|
||||||
|
|
||||||
|
if (apiUrl) {
|
||||||
|
baseUrl = apiUrl.replace(/\/$/, "");
|
||||||
|
} else if (domain === "localhost" || domain === "127.0.0.1") {
|
||||||
|
baseUrl = `http://${domain}:${port}`;
|
||||||
|
} else {
|
||||||
|
baseUrl = `https://api.${domain}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return `${baseUrl}/media/${storageKey}`;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
64
backend/src/sessions/repositories/sessions.repository.ts
Normal file
64
backend/src/sessions/repositories/sessions.repository.ts
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
import { Injectable } from "@nestjs/common";
|
||||||
|
import { and, eq, lte } from "drizzle-orm";
|
||||||
|
import { DatabaseService } from "../../database/database.service";
|
||||||
|
import { sessions } from "../../database/schemas";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class SessionsRepository {
|
||||||
|
constructor(private readonly databaseService: DatabaseService) {}
|
||||||
|
|
||||||
|
async create(data: {
|
||||||
|
userId: string;
|
||||||
|
refreshToken: string;
|
||||||
|
userAgent?: string;
|
||||||
|
ipHash?: string | null;
|
||||||
|
expiresAt: Date;
|
||||||
|
}) {
|
||||||
|
const [session] = await this.databaseService.db
|
||||||
|
.insert(sessions)
|
||||||
|
.values(data)
|
||||||
|
.returning();
|
||||||
|
return session;
|
||||||
|
}
|
||||||
|
|
||||||
|
async findValidByRefreshToken(refreshToken: string) {
|
||||||
|
const result = await this.databaseService.db
|
||||||
|
.select()
|
||||||
|
.from(sessions)
|
||||||
|
.where(
|
||||||
|
and(eq(sessions.refreshToken, refreshToken), eq(sessions.isValid, true)),
|
||||||
|
)
|
||||||
|
.limit(1);
|
||||||
|
return result[0] || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(sessionId: string, data: Record<string, unknown>) {
|
||||||
|
const [updatedSession] = await this.databaseService.db
|
||||||
|
.update(sessions)
|
||||||
|
.set({ ...data, updatedAt: new Date() })
|
||||||
|
.where(eq(sessions.id, sessionId))
|
||||||
|
.returning();
|
||||||
|
return updatedSession;
|
||||||
|
}
|
||||||
|
|
||||||
|
async revoke(sessionId: string) {
|
||||||
|
await this.databaseService.db
|
||||||
|
.update(sessions)
|
||||||
|
.set({ isValid: false, updatedAt: new Date() })
|
||||||
|
.where(eq(sessions.id, sessionId));
|
||||||
|
}
|
||||||
|
|
||||||
|
async revokeAllByUserId(userId: string) {
|
||||||
|
await this.databaseService.db
|
||||||
|
.update(sessions)
|
||||||
|
.set({ isValid: false, updatedAt: new Date() })
|
||||||
|
.where(eq(sessions.userId, userId));
|
||||||
|
}
|
||||||
|
|
||||||
|
async purgeExpired(now: Date) {
|
||||||
|
return await this.databaseService.db
|
||||||
|
.delete(sessions)
|
||||||
|
.where(lte(sessions.expiresAt, now))
|
||||||
|
.returning();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,11 +1,9 @@
|
|||||||
import { Module } from "@nestjs/common";
|
import { Module } from "@nestjs/common";
|
||||||
import { CryptoModule } from "../crypto/crypto.module";
|
import { SessionsRepository } from "./repositories/sessions.repository";
|
||||||
import { DatabaseModule } from "../database/database.module";
|
|
||||||
import { SessionsService } from "./sessions.service";
|
import { SessionsService } from "./sessions.service";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [DatabaseModule, CryptoModule],
|
providers: [SessionsService, SessionsRepository],
|
||||||
providers: [SessionsService],
|
exports: [SessionsService, SessionsRepository],
|
||||||
exports: [SessionsService],
|
|
||||||
})
|
})
|
||||||
export class SessionsModule {}
|
export class SessionsModule {}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user