feat(ml)!: customizable ML settings (#3891)

* consolidated endpoints, added live configuration

* added ml settings to server

* added settings dashboard

* updated deps, fixed typos

* simplified modelconfig

updated tests

* Added ml setting accordion for admin page

updated tests

* merge `clipText` and `clipVision`

* added face distance setting

clarified setting

* add clip mode in request, dropdown for face models

* polished ml settings

updated descriptions

* update clip field on error

* removed unused import

* add description for image classification threshold

* pin safetensors for arm wheel

updated poetry lock

* moved dto

* set model type only in ml repository

* revert form-data package install

use fetch instead of axios

* added slotted description with link

updated facial recognition description

clarified effect of disabling tasks

* validation before model load

* removed unnecessary getconfig call

* added migration

* updated api

updated api

updated api

---------

Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
This commit is contained in:
Mert
2023-08-29 09:58:00 -04:00
committed by GitHub
parent 22f5e05060
commit bcc36d14a1
56 changed files with 2324 additions and 655 deletions

View File

@@ -86,6 +86,7 @@ export interface ISearchRepository {
deleteAssets(ids: string[]): Promise<void>;
deleteFaces(ids: string[]): Promise<void>;
deleteAllFaces(): Promise<number>;
updateCLIPField(num_dim: number): Promise<void>;
searchAlbums(query: string, filters: SearchFilter): Promise<SearchResult<AlbumEntity>>;
searchAssets(query: string, filters: SearchFilter): Promise<SearchResult<AssetEntity>>;

View File

@@ -121,15 +121,18 @@ export class SearchService {
await this.configCore.requireFeature(FeatureFlag.SEARCH);
const query = dto.q || dto.query || '*';
const hasClip = machineLearning.enabled && machineLearning.clipEncodeEnabled;
const hasClip = machineLearning.enabled && machineLearning.clip.enabled;
const strategy = dto.clip && hasClip ? SearchStrategy.CLIP : SearchStrategy.TEXT;
const filters = { userId: authUser.id, ...dto };
let assets: SearchResult<AssetEntity>;
switch (strategy) {
case SearchStrategy.CLIP:
const clip = await this.machineLearning.encodeText(machineLearning.url, query);
assets = await this.searchRepository.vectorSearch(clip, filters);
const {
machineLearning: { clip },
} = await this.configCore.getConfig();
const embedding = await this.machineLearning.encodeText(machineLearning.url, { text: query }, clip);
assets = await this.searchRepository.vectorSearch(embedding, filters);
break;
case SearchStrategy.TEXT:
default: