Merge pull request 'Fix attempt on media url' (#13) from dev into prod
Some checks failed
Backend Tests / test (push) Successful in 1m11s
Deploy to Production / Validate Build & Lint (backend) (push) Successful in 1m14s
Deploy to Production / Validate Build & Lint (documentation) (push) Failing after 29s
Lint / lint (backend) (push) Successful in 1m8s
Lint / lint (documentation) (push) Successful in 1m8s
Deploy to Production / Validate Build & Lint (frontend) (push) Failing after 29s
Deploy to Production / Deploy to Production (push) Has been skipped
Lint / lint (frontend) (push) Successful in 1m7s
Some checks failed
Backend Tests / test (push) Successful in 1m11s
Deploy to Production / Validate Build & Lint (backend) (push) Successful in 1m14s
Deploy to Production / Validate Build & Lint (documentation) (push) Failing after 29s
Lint / lint (backend) (push) Successful in 1m8s
Lint / lint (documentation) (push) Successful in 1m8s
Deploy to Production / Validate Build & Lint (frontend) (push) Failing after 29s
Deploy to Production / Deploy to Production (push) Has been skipped
Lint / lint (frontend) (push) Successful in 1m7s
Reviewed-on: #13
This commit was merged in pull request #13.
This commit is contained in:
@@ -31,6 +31,6 @@ jobs:
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pnpm-store-
|
${{ runner.os }}-pnpm-store-
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install --frozen-lockfile
|
run: pnpm install --frozen-lockfile --prefer-offline
|
||||||
- name: Run Backend Tests
|
- name: Run Backend Tests
|
||||||
run: pnpm -F @memegoat/backend test
|
run: pnpm -F @memegoat/backend test
|
||||||
|
|||||||
@@ -40,8 +40,19 @@ jobs:
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pnpm-store-
|
${{ runner.os }}-pnpm-store-
|
||||||
|
|
||||||
|
- name: Cache Next.js build
|
||||||
|
if: matrix.component != 'backend'
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ${{ matrix.component }}/.next/cache
|
||||||
|
# Clé basée sur le lockfile et les fichiers source du composant
|
||||||
|
key: ${{ runner.os }}-nextjs-${{ matrix.component }}-${{ hashFiles('**/pnpm-lock.yaml') }}-${{ hashFiles(concat(matrix.component, '/**/*.[jt]s'), concat(matrix.component, '/**/*.[jt]sx')) }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-nextjs-${{ matrix.component }}-${{ hashFiles('**/pnpm-lock.yaml') }}-
|
||||||
|
${{ runner.os }}-nextjs-${{ matrix.component }}-
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install --frozen-lockfile
|
run: pnpm install --frozen-lockfile --prefer-offline
|
||||||
|
|
||||||
- name: Lint ${{ matrix.component }}
|
- name: Lint ${{ matrix.component }}
|
||||||
run: pnpm -F @memegoat/${{ matrix.component }} lint
|
run: pnpm -F @memegoat/${{ matrix.component }} lint
|
||||||
|
|||||||
@@ -38,6 +38,6 @@ jobs:
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pnpm-store-
|
${{ runner.os }}-pnpm-store-
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install --frozen-lockfile
|
run: pnpm install --frozen-lockfile --prefer-offline
|
||||||
- name: Lint ${{ matrix.component }}
|
- name: Lint ${{ matrix.component }}
|
||||||
run: pnpm -F @memegoat/${{ matrix.component }} lint
|
run: pnpm -F @memegoat/${{ matrix.component }} lint
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
FROM node:22-slim AS base
|
# syntax=docker/dockerfile:1
|
||||||
|
FROM node:22-alpine AS base
|
||||||
ENV PNPM_HOME="/pnpm"
|
ENV PNPM_HOME="/pnpm"
|
||||||
ENV PATH="$PNPM_HOME:$PATH"
|
ENV PATH="$PNPM_HOME:$PATH"
|
||||||
RUN corepack enable && corepack prepare pnpm@latest --activate
|
RUN corepack enable && corepack prepare pnpm@latest --activate
|
||||||
@@ -9,10 +10,17 @@ COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./
|
|||||||
COPY backend/package.json ./backend/
|
COPY backend/package.json ./backend/
|
||||||
COPY frontend/package.json ./frontend/
|
COPY frontend/package.json ./frontend/
|
||||||
COPY documentation/package.json ./documentation/
|
COPY documentation/package.json ./documentation/
|
||||||
RUN pnpm install --no-frozen-lockfile
|
|
||||||
|
# Utilisation du cache pour pnpm et installation figée
|
||||||
|
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
|
||||||
|
pnpm install --frozen-lockfile
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
# On réinstalle après COPY pour s'assurer que tous les scripts de cycle de vie et les liens sont corrects
|
|
||||||
RUN pnpm install --no-frozen-lockfile
|
# Deuxième passe avec cache pour les scripts/liens
|
||||||
|
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
|
||||||
|
pnpm install --frozen-lockfile
|
||||||
|
|
||||||
RUN pnpm run --filter @memegoat/backend build
|
RUN pnpm run --filter @memegoat/backend build
|
||||||
RUN pnpm deploy --filter=@memegoat/backend --prod --legacy /app
|
RUN pnpm deploy --filter=@memegoat/backend --prod --legacy /app
|
||||||
RUN cp -r backend/dist /app/dist
|
RUN cp -r backend/dist /app/dist
|
||||||
|
|||||||
@@ -100,6 +100,7 @@ export class ContentsService {
|
|||||||
// 3. Upload vers S3
|
// 3. Upload vers S3
|
||||||
const key = `contents/${userId}/${Date.now()}-${uuidv4()}.${processed.extension}`;
|
const key = `contents/${userId}/${Date.now()}-${uuidv4()}.${processed.extension}`;
|
||||||
await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType);
|
await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType);
|
||||||
|
this.logger.log(`File uploaded successfully to S3: ${key}`);
|
||||||
|
|
||||||
// 4. Création en base de données
|
// 4. Création en base de données
|
||||||
return await this.create(userId, {
|
return await this.create(userId, {
|
||||||
|
|||||||
@@ -28,12 +28,13 @@ describe("MediaController", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe("getFile", () => {
|
describe("getFile", () => {
|
||||||
it("should stream the file and set headers", async () => {
|
it("should stream the file and set headers with path containing slashes", async () => {
|
||||||
const res = {
|
const res = {
|
||||||
setHeader: jest.fn(),
|
setHeader: jest.fn(),
|
||||||
} as any;
|
} as any;
|
||||||
const stream = new Readable();
|
const stream = new Readable();
|
||||||
stream.pipe = jest.fn();
|
stream.pipe = jest.fn();
|
||||||
|
const key = "contents/user-id/test.webp";
|
||||||
|
|
||||||
mockS3Service.getFileInfo.mockResolvedValue({
|
mockS3Service.getFileInfo.mockResolvedValue({
|
||||||
size: 100,
|
size: 100,
|
||||||
@@ -41,8 +42,9 @@ describe("MediaController", () => {
|
|||||||
});
|
});
|
||||||
mockS3Service.getFile.mockResolvedValue(stream);
|
mockS3Service.getFile.mockResolvedValue(stream);
|
||||||
|
|
||||||
await controller.getFile("test.webp", res);
|
await controller.getFile(key, res);
|
||||||
|
|
||||||
|
expect(mockS3Service.getFileInfo).toHaveBeenCalledWith(key);
|
||||||
expect(res.setHeader).toHaveBeenCalledWith("Content-Type", "image/webp");
|
expect(res.setHeader).toHaveBeenCalledWith("Content-Type", "image/webp");
|
||||||
expect(res.setHeader).toHaveBeenCalledWith("Content-Length", 100);
|
expect(res.setHeader).toHaveBeenCalledWith("Content-Length", 100);
|
||||||
expect(stream.pipe).toHaveBeenCalledWith(res);
|
expect(stream.pipe).toHaveBeenCalledWith(res);
|
||||||
|
|||||||
@@ -9,13 +9,15 @@ export class MediaController {
|
|||||||
@Get("*key")
|
@Get("*key")
|
||||||
async getFile(@Param("key") key: string, @Res() res: Response) {
|
async getFile(@Param("key") key: string, @Res() res: Response) {
|
||||||
try {
|
try {
|
||||||
const stats = await this.s3Service.getFileInfo(key);
|
const stats = (await this.s3Service.getFileInfo(key)) as any;
|
||||||
const stream = await this.s3Service.getFile(key);
|
const stream = await this.s3Service.getFile(key);
|
||||||
|
|
||||||
res.setHeader(
|
const contentType =
|
||||||
"Content-Type",
|
stats.metaData?.["content-type"] ||
|
||||||
stats.metaData["content-type"] || "application/octet-stream",
|
stats.metadata?.["content-type"] ||
|
||||||
);
|
"application/octet-stream";
|
||||||
|
|
||||||
|
res.setHeader("Content-Type", contentType);
|
||||||
res.setHeader("Content-Length", stats.size);
|
res.setHeader("Content-Length", stats.size);
|
||||||
res.setHeader("Cache-Control", "public, max-age=31536000, immutable");
|
res.setHeader("Cache-Control", "public, max-age=31536000, immutable");
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ jest.mock("minio");
|
|||||||
|
|
||||||
describe("S3Service", () => {
|
describe("S3Service", () => {
|
||||||
let service: S3Service;
|
let service: S3Service;
|
||||||
let _configService: ConfigService;
|
let configService: ConfigService;
|
||||||
// biome-ignore lint/suspicious/noExplicitAny: Fine for testing purposes
|
// biome-ignore lint/suspicious/noExplicitAny: Fine for testing purposes
|
||||||
let minioClient: any;
|
let minioClient: any;
|
||||||
|
|
||||||
@@ -42,7 +42,7 @@ describe("S3Service", () => {
|
|||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
service = module.get<S3Service>(S3Service);
|
service = module.get<S3Service>(S3Service);
|
||||||
_configService = module.get<ConfigService>(ConfigService);
|
configService = module.get<ConfigService>(ConfigService);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should be defined", () => {
|
it("should be defined", () => {
|
||||||
@@ -185,35 +185,39 @@ describe("S3Service", () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("moveFile", () => {
|
describe("getPublicUrl", () => {
|
||||||
it("should move file within default bucket", async () => {
|
it("should use API_URL if provided", () => {
|
||||||
const source = "source.txt";
|
(configService.get as jest.Mock).mockImplementation((key: string) => {
|
||||||
const dest = "dest.txt";
|
if (key === "API_URL") return "https://api.test.com";
|
||||||
await service.moveFile(source, dest);
|
return null;
|
||||||
|
});
|
||||||
expect(minioClient.copyObject).toHaveBeenCalledWith(
|
const url = service.getPublicUrl("test.webp");
|
||||||
"memegoat",
|
expect(url).toBe("https://api.test.com/media/test.webp");
|
||||||
dest,
|
|
||||||
"/memegoat/source.txt",
|
|
||||||
expect.any(Minio.CopyConditions),
|
|
||||||
);
|
|
||||||
expect(minioClient.removeObject).toHaveBeenCalledWith("memegoat", source);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should move file between different buckets", async () => {
|
it("should use DOMAIN_NAME and PORT for localhost", () => {
|
||||||
const source = "source.txt";
|
(configService.get as jest.Mock).mockImplementation(
|
||||||
const dest = "dest.txt";
|
(key: string, def: any) => {
|
||||||
const sBucket = "source-bucket";
|
if (key === "API_URL") return null;
|
||||||
const dBucket = "dest-bucket";
|
if (key === "DOMAIN_NAME") return "localhost";
|
||||||
await service.moveFile(source, dest, sBucket, dBucket);
|
if (key === "PORT") return 3000;
|
||||||
|
return def;
|
||||||
expect(minioClient.copyObject).toHaveBeenCalledWith(
|
},
|
||||||
dBucket,
|
|
||||||
dest,
|
|
||||||
`/${sBucket}/${source}`,
|
|
||||||
expect.any(Minio.CopyConditions),
|
|
||||||
);
|
);
|
||||||
expect(minioClient.removeObject).toHaveBeenCalledWith(sBucket, source);
|
const url = service.getPublicUrl("test.webp");
|
||||||
|
expect(url).toBe("http://localhost:3000/media/test.webp");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use api.DOMAIN_NAME for production", () => {
|
||||||
|
(configService.get as jest.Mock).mockImplementation(
|
||||||
|
(key: string, def: any) => {
|
||||||
|
if (key === "API_URL") return null;
|
||||||
|
if (key === "DOMAIN_NAME") return "memegoat.fr";
|
||||||
|
return def;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
const url = service.getPublicUrl("test.webp");
|
||||||
|
expect(url).toBe("https://api.memegoat.fr/media/test.webp");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -158,17 +158,19 @@ export class S3Service implements OnModuleInit, IStorageService {
|
|||||||
|
|
||||||
getPublicUrl(storageKey: string): string {
|
getPublicUrl(storageKey: string): string {
|
||||||
const apiUrl = this.configService.get<string>("API_URL");
|
const apiUrl = this.configService.get<string>("API_URL");
|
||||||
if (apiUrl) {
|
|
||||||
return `${apiUrl.replace(/\/$/, "")}/media/${storageKey}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const domain = this.configService.get<string>("DOMAIN_NAME", "localhost");
|
const domain = this.configService.get<string>("DOMAIN_NAME", "localhost");
|
||||||
const port = this.configService.get<number>("PORT", 3000);
|
const port = this.configService.get<number>("PORT", 3000);
|
||||||
|
|
||||||
if (domain === "localhost" || domain === "127.0.0.1") {
|
let baseUrl: string;
|
||||||
return `http://${domain}:${port}/media/${storageKey}`;
|
|
||||||
|
if (apiUrl) {
|
||||||
|
baseUrl = apiUrl.replace(/\/$/, "");
|
||||||
|
} else if (domain === "localhost" || domain === "127.0.0.1") {
|
||||||
|
baseUrl = `http://${domain}:${port}`;
|
||||||
|
} else {
|
||||||
|
baseUrl = `https://api.${domain}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
return `https://api.${domain}/media/${storageKey}`;
|
return `${baseUrl}/media/${storageKey}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -143,6 +143,7 @@ export class UsersService {
|
|||||||
// 3. Upload vers S3
|
// 3. Upload vers S3
|
||||||
const key = `avatars/${uuid}/${Date.now()}-${uuidv4()}.${processed.extension}`;
|
const key = `avatars/${uuid}/${Date.now()}-${uuidv4()}.${processed.extension}`;
|
||||||
await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType);
|
await this.s3Service.uploadFile(key, processed.buffer, processed.mimeType);
|
||||||
|
this.logger.log(`Avatar uploaded successfully to S3: ${key}`);
|
||||||
|
|
||||||
// 4. Mise à jour de la base de données
|
// 4. Mise à jour de la base de données
|
||||||
const user = await this.update(uuid, { avatarUrl: key });
|
const user = await this.update(uuid, { avatarUrl: key });
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# syntax=docker.io/docker/dockerfile:1
|
# syntax=docker/dockerfile:1
|
||||||
|
|
||||||
FROM node:22-alpine AS base
|
FROM node:22-alpine AS base
|
||||||
ENV PNPM_HOME="/pnpm"
|
ENV PNPM_HOME="/pnpm"
|
||||||
@@ -11,11 +11,20 @@ COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./
|
|||||||
COPY backend/package.json ./backend/
|
COPY backend/package.json ./backend/
|
||||||
COPY frontend/package.json ./frontend/
|
COPY frontend/package.json ./frontend/
|
||||||
COPY documentation/package.json ./documentation/
|
COPY documentation/package.json ./documentation/
|
||||||
RUN pnpm install --no-frozen-lockfile
|
|
||||||
|
# Montage du cache pnpm
|
||||||
|
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
|
||||||
|
pnpm install --frozen-lockfile
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
# On réinstalle après COPY pour s'assurer que tous les scripts de cycle de vie et les liens sont corrects
|
|
||||||
RUN pnpm install --no-frozen-lockfile
|
# Deuxième passe avec cache pour les scripts/liens
|
||||||
RUN pnpm run --filter @memegoat/documentation build
|
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
|
||||||
|
pnpm install --frozen-lockfile
|
||||||
|
|
||||||
|
# Build avec cache Next.js
|
||||||
|
RUN --mount=type=cache,id=next-docs-cache,target=/usr/src/app/documentation/.next/cache \
|
||||||
|
pnpm run --filter @memegoat/documentation build
|
||||||
|
|
||||||
FROM node:22-alpine AS runner
|
FROM node:22-alpine AS runner
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# syntax=docker.io/docker/dockerfile:1
|
# syntax=docker/dockerfile:1
|
||||||
|
|
||||||
FROM node:22-alpine AS base
|
FROM node:22-alpine AS base
|
||||||
ENV PNPM_HOME="/pnpm"
|
ENV PNPM_HOME="/pnpm"
|
||||||
@@ -11,11 +11,20 @@ COPY pnpm-lock.yaml pnpm-workspace.yaml package.json ./
|
|||||||
COPY backend/package.json ./backend/
|
COPY backend/package.json ./backend/
|
||||||
COPY frontend/package.json ./frontend/
|
COPY frontend/package.json ./frontend/
|
||||||
COPY documentation/package.json ./documentation/
|
COPY documentation/package.json ./documentation/
|
||||||
RUN pnpm install --no-frozen-lockfile
|
|
||||||
|
# Montage du cache pnpm
|
||||||
|
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
|
||||||
|
pnpm install --frozen-lockfile
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
# On réinstalle après COPY pour s'assurer que tous les scripts de cycle de vie et les liens sont corrects
|
|
||||||
RUN pnpm install --no-frozen-lockfile
|
# Deuxième passe avec cache pour les scripts/liens
|
||||||
RUN pnpm run --filter @memegoat/frontend build
|
RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
|
||||||
|
pnpm install --frozen-lockfile
|
||||||
|
|
||||||
|
# Build avec cache Next.js
|
||||||
|
RUN --mount=type=cache,id=next-cache,target=/usr/src/app/frontend/.next/cache \
|
||||||
|
pnpm run --filter @memegoat/frontend build
|
||||||
|
|
||||||
FROM node:22-alpine AS runner
|
FROM node:22-alpine AS runner
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|||||||
Reference in New Issue
Block a user