chore(server): Store generated files (thumbnails, encoded video) in subdirectories (#4112)

* save thumbnails in subdirectories

* migration job, migrate assets and face thumbnails

* fix tests

* directory depth of two instead of three

* cleanup empty dirs after migration

* clean up empty dirs after migration, migrate people without assetId

* add job card for new migration job

* fix removeEmptyDirs race condition because of missing await

* cleanup empty directories after asset deletion

* move ensurePath to storage core

* rename jobs

* remove unnecessary property of IEntityJob

* use updated person getById, minor refactoring

* ensure that directory cleanup doesn't interfere with migration

* better description for job in ui

* fix remove directories when migration is done

* cleanup empty folders at start of migration

* fix: actually persist concurrency setting

* add comment explaining regex

* chore: cleanup

---------

Co-authored-by: Jason Rasmussen <jrasm91@gmail.com>
This commit is contained in:
Daniel Dietzler
2023-09-25 17:07:21 +02:00
committed by GitHub
parent 07069c3b1e
commit 3053cbd4c8
36 changed files with 310 additions and 102 deletions

View File

@@ -307,14 +307,14 @@ describe(FacialRecognitionService.name, () => {
await sut.handleGenerateFaceThumbnail(face.middle);
expect(assetMock.getByIds).toHaveBeenCalledWith(['asset-1']);
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id');
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs/user-id/pe/rs');
expect(mediaMock.crop).toHaveBeenCalledWith('/uploads/user-id/thumbs/path.jpg', {
left: 95,
top: 95,
width: 110,
height: 110,
});
expect(mediaMock.resize).toHaveBeenCalledWith(croppedFace, 'upload/thumbs/user-id/person-1.jpeg', {
expect(mediaMock.resize).toHaveBeenCalledWith(croppedFace, 'upload/thumbs/user-id/pe/rs/person-1.jpeg', {
format: 'jpeg',
size: 250,
quality: 80,
@@ -323,7 +323,7 @@ describe(FacialRecognitionService.name, () => {
expect(personMock.update).toHaveBeenCalledWith({
faceAssetId: 'asset-1',
id: 'person-1',
thumbnailPath: 'upload/thumbs/user-id/person-1.jpeg',
thumbnailPath: 'upload/thumbs/user-id/pe/rs/person-1.jpeg',
});
});
@@ -338,7 +338,7 @@ describe(FacialRecognitionService.name, () => {
width: 510,
height: 510,
});
expect(mediaMock.resize).toHaveBeenCalledWith(croppedFace, 'upload/thumbs/user-id/person-1.jpeg', {
expect(mediaMock.resize).toHaveBeenCalledWith(croppedFace, 'upload/thumbs/user-id/pe/rs/person-1.jpeg', {
format: 'jpeg',
size: 250,
quality: 80,
@@ -357,7 +357,7 @@ describe(FacialRecognitionService.name, () => {
width: 202,
height: 202,
});
expect(mediaMock.resize).toHaveBeenCalledWith(croppedFace, 'upload/thumbs/user-id/person-1.jpeg', {
expect(mediaMock.resize).toHaveBeenCalledWith(croppedFace, 'upload/thumbs/user-id/pe/rs/person-1.jpeg', {
format: 'jpeg',
size: 250,
quality: 80,

View File

@@ -1,5 +1,4 @@
import { Inject, Logger } from '@nestjs/common';
import { join } from 'path';
import { IAssetRepository, WithoutProperty } from '../asset';
import { usePagination } from '../domain.util';
import { IBaseJob, IEntityJob, IFaceThumbnailJob, IJobRepository, JOBS_ASSET_PAGINATION_SIZE, JobName } from '../job';
@@ -13,8 +12,8 @@ import { AssetFaceId, IFaceRepository } from './face.repository';
export class FacialRecognitionService {
private logger = new Logger(FacialRecognitionService.name);
private storageCore = new StorageCore();
private configCore: SystemConfigCore;
private storageCore: StorageCore;
constructor(
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
@@ -28,6 +27,7 @@ export class FacialRecognitionService {
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
) {
this.configCore = new SystemConfigCore(configRepository);
this.storageCore = new StorageCore(storageRepository);
}
async handleQueueRecognizeFaces({ force }: IBaseJob) {
@@ -117,6 +117,21 @@ export class FacialRecognitionService {
return true;
}
async handlePersonMigration({ id }: IEntityJob) {
const person = await this.personRepository.getById(id);
if (!person) {
return false;
}
const path = this.storageCore.ensurePath(StorageFolder.THUMBNAILS, person.ownerId, `${id}.jpeg`);
if (person.thumbnailPath && person.thumbnailPath !== path) {
await this.storageRepository.moveFile(person.thumbnailPath, path);
await this.personRepository.update({ id, thumbnailPath: path });
}
return true;
}
async handleGenerateFaceThumbnail(data: IFaceThumbnailJob) {
const { machineLearning } = await this.configCore.getConfig();
if (!machineLearning.enabled || !machineLearning.facialRecognition.enabled) {
@@ -132,9 +147,7 @@ export class FacialRecognitionService {
this.logger.verbose(`Cropping face for person: ${personId}`);
const outputFolder = this.storageCore.getFolderLocation(StorageFolder.THUMBNAILS, asset.ownerId);
const output = join(outputFolder, `${personId}.jpeg`);
this.storageRepository.mkdirSync(outputFolder);
const output = this.storageCore.ensurePath(StorageFolder.THUMBNAILS, asset.ownerId, `${personId}.jpeg`);
const { x1, y1, x2, y2 } = boundingBox;