Refactor file saving logic and enhance logging
Revised the file saving process to include better logging and error management. Added groupId to file saving parameters and enhanced verification checks. Updated .gitignore to exclude assets directory.
This commit is contained in:
parent
711877bf60
commit
a0f5c3dab6
1
.gitignore
vendored
1
.gitignore
vendored
@ -2,6 +2,7 @@
|
||||
drizzle
|
||||
# compiled output
|
||||
dist
|
||||
assets/*
|
||||
tmp
|
||||
/out-tsc
|
||||
|
||||
|
@ -73,7 +73,10 @@ export const FilesTable = pgTable("files", {
|
||||
})
|
||||
.notNull(),
|
||||
|
||||
groupId: p.uuid("group_id").references(() => FilesGroupTable.uuid),
|
||||
groupId: p
|
||||
.uuid("group_id")
|
||||
.notNull()
|
||||
.references(() => FilesGroupTable.uuid),
|
||||
|
||||
fileSize: p.integer("file_size").notNull(),
|
||||
|
||||
@ -153,7 +156,7 @@ export const MachinesTable = pgTable("machines", {
|
||||
|
||||
//TODO Many to Many table betwen File en Machine
|
||||
export const FilesForMachinesTable = pgTable("files_for_machines", {
|
||||
id: p.uuid('id').primaryKey().notNull().defaultRandom(),
|
||||
id: p.uuid("id").primaryKey().notNull().defaultRandom(),
|
||||
|
||||
fileId: p
|
||||
.uuid("file_id")
|
||||
|
@ -43,23 +43,32 @@ export class FilesController {
|
||||
const _isDocumentation = req.headers["is_documentation"] as string;
|
||||
const _isRestricted = req.headers["is_restricted"] as string;
|
||||
const _isAdmin = Boolean(req.headers["is_admin"] as string | boolean);
|
||||
console.log(_fileName, _groupId, _uploadedBy, _machineId, _isDocumentation, _isRestricted, _isAdmin);
|
||||
console.log(
|
||||
_fileName,
|
||||
_groupId,
|
||||
_uploadedBy,
|
||||
_machineId,
|
||||
_isDocumentation,
|
||||
_isRestricted,
|
||||
_isAdmin,
|
||||
);
|
||||
|
||||
// Vérifier que les en-têtes nécessaires sont présents
|
||||
if (!_fileName || !_groupId || !_machineId) {
|
||||
throw new BadRequestException("Header(s) manquant(s)");
|
||||
}
|
||||
console.log("Header found !");
|
||||
const machineId = Array(_machineId);
|
||||
|
||||
const Params = new Map()
|
||||
.set("fileName", _fileName.toString())
|
||||
.set("groupId", Array(JSON.parse(_groupId.toString())))
|
||||
.set("groupId", _groupId.toString())
|
||||
.set("uploadedBy", _uploadedBy.toString())
|
||||
.set("machineId", Array(JSON.parse(machineId.toString())))
|
||||
.set("isDocumentation", false)
|
||||
.set("isRestricted", false);
|
||||
|
||||
console.log(Params);
|
||||
console.log("Current params :\n", Params);
|
||||
|
||||
//TODO Integrate a verification if the source is an admin, if that the case then it can define isDocumentation and isRestricted else throw in case of presence of those parameters.
|
||||
if (_isAdmin) {
|
||||
@ -67,16 +76,25 @@ export class FilesController {
|
||||
Params.set("isRestricted", Boolean(_isRestricted));
|
||||
}
|
||||
|
||||
return await this.filesService.save(fileBuffer, Params);
|
||||
console.log("Executing save procedure...");
|
||||
return res
|
||||
// @ts-ignore
|
||||
.status(HttpStatus.CREATED)
|
||||
.send(await this.filesService.save(fileBuffer, Params));
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
return res.status(err.status || HttpStatus.INTERNAL_SERVER_ERROR).send(err)
|
||||
console.error(err);
|
||||
return res
|
||||
// @ts-ignore
|
||||
.status(err.status || HttpStatus.INTERNAL_SERVER_ERROR)
|
||||
.send(err);
|
||||
}
|
||||
});
|
||||
|
||||
req.on("error", (err) => {
|
||||
// @ts-ignore
|
||||
return res.status(err.status || HttpStatus.INTERNAL_SERVER_ERROR).send(err)
|
||||
return res
|
||||
// @ts-ignore
|
||||
.status(err.status || HttpStatus.INTERNAL_SERVER_ERROR)
|
||||
.send(err);
|
||||
});
|
||||
|
||||
return;
|
||||
|
@ -9,9 +9,10 @@ import {
|
||||
FilesForMachinesTable,
|
||||
FilesGroupTable,
|
||||
FilesTable,
|
||||
FilesTypeForMachine, FilesTypesTable,
|
||||
MachinesTable
|
||||
} from 'apps/backend/src/app/db/schema';
|
||||
FilesTypeForMachine,
|
||||
FilesTypesTable,
|
||||
MachinesTable,
|
||||
} from "apps/backend/src/app/db/schema";
|
||||
import { StorageService } from "apps/backend/src/app/storage/storage.service";
|
||||
import { data } from "autoprefixer";
|
||||
import { eq, ilike } from "drizzle-orm";
|
||||
@ -81,88 +82,91 @@ export class FilesService {
|
||||
|
||||
//TODO save a file
|
||||
public async save(file: Buffer, data: Map<string, unknown>) {
|
||||
try {
|
||||
const _machineIds = data.get("machineId").toString().split(",");
|
||||
const _machineIds = data.get("machineId").toString().split(",");
|
||||
|
||||
const machinesIds = new Set<string>();
|
||||
for (const machineId of _machineIds) {
|
||||
console.log(
|
||||
`Checking if machine with ID ${machineId} exist in the database...`,
|
||||
);
|
||||
const machineExists = await this.database
|
||||
.use()
|
||||
.select({
|
||||
uuid: MachinesTable.id,
|
||||
})
|
||||
.from(MachinesTable)
|
||||
.where(eq(MachinesTable.id, machineId))
|
||||
.prepare("checkMachineExists")
|
||||
.execute();
|
||||
|
||||
if (machineExists.length === 0) {
|
||||
throw new NotFoundException(`Machine with ID "${machineId}" not found`);
|
||||
}
|
||||
|
||||
machinesIds.add(machineExists[0].uuid);
|
||||
}
|
||||
|
||||
const _group = data.get("groupId") as string;
|
||||
// verify that the group exist in the database
|
||||
const groupExists = await this.database
|
||||
const machinesIds = new Set<string>();
|
||||
for (const machineId of _machineIds) {
|
||||
console.log(
|
||||
`Checking if machine with ID ${machineId} exist in the database...`,
|
||||
);
|
||||
const machineExists = await this.database
|
||||
.use()
|
||||
.select()
|
||||
.from(FilesGroupTable)
|
||||
.where(eq(FilesGroupTable.uuid, _group))
|
||||
.prepare("checkGroupExists")
|
||||
.select({
|
||||
uuid: MachinesTable.id,
|
||||
})
|
||||
.from(MachinesTable)
|
||||
.where(eq(MachinesTable.id, machineId))
|
||||
.prepare("checkMachineExists")
|
||||
.execute();
|
||||
|
||||
if (groupExists.length === 0) {
|
||||
throw new NotFoundException(`Group with ID "${_group}" not found`);
|
||||
if (machineExists.length === 0) {
|
||||
throw new NotFoundException(`Machine with ID "${machineId}" not found`);
|
||||
}
|
||||
|
||||
const saveResult = await this.storage.new(
|
||||
data.get("fileName") as string,
|
||||
file,
|
||||
_machineIds,
|
||||
Boolean(data.get("isDocumentation")),
|
||||
);
|
||||
console.log(saveResult);
|
||||
const mimeId = await this.database.use()
|
||||
.select()
|
||||
.from(FilesTypesTable)
|
||||
.where(eq(FilesTypesTable.mime, saveResult.fileType.mime))
|
||||
|
||||
|
||||
const inserted = await this.database
|
||||
.use()
|
||||
.insert(FilesTable)
|
||||
.values({
|
||||
fileName: data.get("fileName") as string,
|
||||
checksum: saveResult.fileChecksum,
|
||||
extension: saveResult.fileType.extension,
|
||||
fileSize: saveResult.fileSize,
|
||||
fileType: mimeId[0].id,
|
||||
isRestricted: Boolean(data.get("isRestricted")),
|
||||
isDocumentation: Boolean(data.get("isDocumentation")),
|
||||
uploadedBy: data.get("uploadedBy") as string,
|
||||
})
|
||||
.returning();
|
||||
console.log(inserted);
|
||||
|
||||
for (const machineId of machinesIds) {
|
||||
//TODO insert a link betwen fileId and MachineIds[]
|
||||
const linkRow = await this.database
|
||||
.use()
|
||||
.insert(FilesForMachinesTable)
|
||||
.values({
|
||||
fileId: inserted[0].uuid,
|
||||
machineId: machineId,
|
||||
});
|
||||
}
|
||||
|
||||
return inserted[0];
|
||||
} catch (err) {
|
||||
throw err;
|
||||
machinesIds.add(machineExists[0].uuid);
|
||||
}
|
||||
|
||||
const _group = data.get("groupId") as string;
|
||||
console.log("Linking to group :\n", _group);
|
||||
if (!_group) {
|
||||
throw new NotFoundException(`Group with ID "${_group}" not found`);
|
||||
}
|
||||
// verify that the group exist in the database
|
||||
const groupExists = await this.database
|
||||
.use()
|
||||
.select()
|
||||
.from(FilesGroupTable)
|
||||
.where(eq(FilesGroupTable.uuid, _group))
|
||||
.prepare("checkGroupExists")
|
||||
.execute();
|
||||
|
||||
if (groupExists.length === 0) {
|
||||
throw new NotFoundException(`Group with ID "${_group}" not found`);
|
||||
}
|
||||
|
||||
const saveResult = await this.storage.new(
|
||||
data.get("fileName") as string,
|
||||
file,
|
||||
_machineIds,
|
||||
Boolean(data.get("isDocumentation")),
|
||||
);
|
||||
console.log(saveResult);
|
||||
const mimeId = await this.database
|
||||
.use()
|
||||
.select()
|
||||
.from(FilesTypesTable)
|
||||
.where(eq(FilesTypesTable.mime, saveResult.fileType.mime));
|
||||
|
||||
const inserted = await this.database
|
||||
.use()
|
||||
.insert(FilesTable)
|
||||
.values({
|
||||
fileName: data.get("fileName") as string,
|
||||
groupId: groupExists[0].uuid,
|
||||
checksum: saveResult.fileChecksum,
|
||||
extension: saveResult.fileType.extension,
|
||||
fileSize: saveResult.fileSize,
|
||||
fileType: mimeId[0].id,
|
||||
isRestricted: Boolean(data.get("isRestricted")),
|
||||
isDocumentation: Boolean(data.get("isDocumentation")),
|
||||
uploadedBy: data.get("uploadedBy") as string,
|
||||
})
|
||||
.returning();
|
||||
console.log(inserted);
|
||||
|
||||
for (const machineId of machinesIds) {
|
||||
//TODO insert a link betwen fileId and MachineIds[]
|
||||
console.log(
|
||||
`Append file ${inserted[0].fileName} for machine : "${machineId}"`,
|
||||
);
|
||||
const linkRow = await this.database
|
||||
.use()
|
||||
.insert(FilesForMachinesTable)
|
||||
.values({
|
||||
fileId: inserted[0].uuid,
|
||||
machineId: machineId,
|
||||
});
|
||||
}
|
||||
return inserted[0];
|
||||
}
|
||||
}
|
||||
|
@ -35,6 +35,7 @@ export class StorageService {
|
||||
private async saveFile(fileName: string, file: Buffer): Promise<void> {
|
||||
try {
|
||||
await writeFile(join(process.cwd(), "assets/", fileName), file, "utf8");
|
||||
console.log(`File "${fileName}" saved.`);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
throw new InternalServerErrorException("File save failed !");
|
||||
|
Loading…
x
Reference in New Issue
Block a user