chore(ml): updated dockerfile, added typing, packaging (#2642)

* updated dockerfile, added typing, packaging

apply env change

* added arm64 support

* added ml version pump, second try for arm64

* added linting config to pyproject.toml

* renamed ml input field

* fixed linter config

* fixed dev docker compose
This commit is contained in:
Mert
2023-06-05 10:40:48 -04:00
committed by GitHub
parent c92c442356
commit 1e748864c5
13 changed files with 2647 additions and 67 deletions

View File

@@ -175,7 +175,7 @@ describe(FacialRecognitionService.name, () => {
assetMock.getByIds.mockResolvedValue([assetEntityStub.image]);
await sut.handleRecognizeFaces({ id: assetEntityStub.image.id });
expect(machineLearningMock.detectFaces).toHaveBeenCalledWith({
thumbnailPath: assetEntityStub.image.resizePath,
imagePath: assetEntityStub.image.resizePath,
});
expect(faceMock.create).not.toHaveBeenCalled();
expect(jobMock.queue).not.toHaveBeenCalled();

View File

@@ -54,7 +54,7 @@ export class FacialRecognitionService {
return false;
}
const faces = await this.machineLearning.detectFaces({ thumbnailPath: asset.resizePath });
const faces = await this.machineLearning.detectFaces({ imagePath: asset.resizePath });
this.logger.debug(`${faces.length} faces detected in ${asset.resizePath}`);
this.logger.verbose(faces.map((face) => ({ ...face, embedding: `float[${face.embedding.length}]` })));

View File

@@ -1,7 +1,7 @@
export const IMachineLearningRepository = 'IMachineLearningRepository';
export interface MachineLearningInput {
thumbnailPath: string;
imagePath: string;
}
export interface BoundingBox {

View File

@@ -84,7 +84,7 @@ describe(SmartInfoService.name, () => {
await sut.handleClassifyImage({ id: asset.id });
expect(machineMock.classifyImage).toHaveBeenCalledWith({ thumbnailPath: 'path/to/resize.ext' });
expect(machineMock.classifyImage).toHaveBeenCalledWith({ imagePath: 'path/to/resize.ext' });
expect(smartMock.upsert).toHaveBeenCalledWith({
assetId: 'asset-1',
tags: ['tag1', 'tag2', 'tag3'],
@@ -143,7 +143,7 @@ describe(SmartInfoService.name, () => {
await sut.handleEncodeClip({ id: asset.id });
expect(machineMock.encodeImage).toHaveBeenCalledWith({ thumbnailPath: 'path/to/resize.ext' });
expect(machineMock.encodeImage).toHaveBeenCalledWith({ imagePath: 'path/to/resize.ext' });
expect(smartMock.upsert).toHaveBeenCalledWith({
assetId: 'asset-1',
clipEmbedding: [0.01, 0.02, 0.03],

View File

@@ -40,7 +40,7 @@ export class SmartInfoService {
return false;
}
const tags = await this.machineLearning.classifyImage({ thumbnailPath: asset.resizePath });
const tags = await this.machineLearning.classifyImage({ imagePath: asset.resizePath });
if (tags.length === 0) {
return false;
}
@@ -73,7 +73,7 @@ export class SmartInfoService {
return false;
}
const clipEmbedding = await this.machineLearning.encodeImage({ thumbnailPath: asset.resizePath });
const clipEmbedding = await this.machineLearning.encodeImage({ imagePath: asset.resizePath });
await this.repository.upsert({ assetId: asset.id, clipEmbedding: clipEmbedding });
return true;