mirror of
https://github.com/KevinMidboe/immich.git
synced 2025-12-08 20:29:05 +00:00
feat(server)!: search via typesense (#1778)
* build: add typesense to docker * feat(server): typesense search * feat(web): search * fix(web): show api error response message * chore: search tests * chore: regenerate open api * fix: disable typesense on e2e * fix: number properties for open api (dart) * fix: e2e test * fix: change lat/lng from floats to typesense geopoint * dev: Add smartInfo relation to findAssetById to be able to query against it --------- Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
This commit is contained in:
@@ -16,6 +16,11 @@ export const immichAppConfig: ConfigModuleOptions = {
|
||||
DB_PASSWORD: WHEN_DB_URL_SET,
|
||||
DB_DATABASE_NAME: WHEN_DB_URL_SET,
|
||||
DB_URL: Joi.string().optional(),
|
||||
TYPESENSE_API_KEY: Joi.when('TYPESENSE_ENABLED', {
|
||||
is: 'false',
|
||||
then: Joi.string().optional(),
|
||||
otherwise: Joi.string().required(),
|
||||
}),
|
||||
DISABLE_REVERSE_GEOCODING: Joi.boolean().optional().valid(true, false).default(false),
|
||||
REVERSE_GEOCODING_PRECISION: Joi.number().optional().valid(0, 1, 2, 3).default(3),
|
||||
LOG_LEVEL: Joi.string().optional().valid('simple', 'verbose', 'debug', 'log', 'warn', 'error').default('log'),
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
import { AlbumEntity } from '@app/infra/db/entities';
|
||||
|
||||
export const IAlbumRepository = 'IAlbumRepository';
|
||||
|
||||
export interface IAlbumRepository {
|
||||
deleteAll(userId: string): Promise<void>;
|
||||
getAll(): Promise<AlbumEntity[]>;
|
||||
save(album: Partial<AlbumEntity>): Promise<AlbumEntity>;
|
||||
}
|
||||
|
||||
21
server/libs/domain/src/asset/asset.core.ts
Normal file
21
server/libs/domain/src/asset/asset.core.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { AssetEntity, AssetType } from '@app/infra/db/entities';
|
||||
import { ISearchRepository, SearchCollection } from '../search/search.repository';
|
||||
import { AssetSearchOptions, IAssetRepository } from './asset.repository';
|
||||
|
||||
export class AssetCore {
|
||||
constructor(private repository: IAssetRepository, private searchRepository: ISearchRepository) {}
|
||||
|
||||
getAll(options: AssetSearchOptions) {
|
||||
return this.repository.getAll(options);
|
||||
}
|
||||
|
||||
async save(asset: Partial<AssetEntity>) {
|
||||
const _asset = await this.repository.save(asset);
|
||||
await this.searchRepository.index(SearchCollection.ASSETS, _asset);
|
||||
return _asset;
|
||||
}
|
||||
|
||||
findLivePhotoMatch(livePhotoCID: string, otherAssetId: string, type: AssetType): Promise<AssetEntity | null> {
|
||||
return this.repository.findLivePhotoMatch(livePhotoCID, otherAssetId, type);
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,14 @@
|
||||
import { AssetEntity, AssetType } from '@app/infra/db/entities';
|
||||
|
||||
export interface AssetSearchOptions {
|
||||
isVisible?: boolean;
|
||||
}
|
||||
|
||||
export const IAssetRepository = 'IAssetRepository';
|
||||
|
||||
export interface IAssetRepository {
|
||||
deleteAll(ownerId: string): Promise<void>;
|
||||
getAll(): Promise<AssetEntity[]>;
|
||||
getAll(options?: AssetSearchOptions): Promise<AssetEntity[]>;
|
||||
save(asset: Partial<AssetEntity>): Promise<AssetEntity>;
|
||||
findLivePhotoMatch(livePhotoCID: string, otherAssetId: string, type: AssetType): Promise<AssetEntity | null>;
|
||||
}
|
||||
|
||||
@@ -1,19 +1,25 @@
|
||||
import { AssetEntity, AssetType } from '@app/infra/db/entities';
|
||||
import { newJobRepositoryMock } from '../../test';
|
||||
import { AssetService } from '../asset';
|
||||
import { assetEntityStub, newAssetRepositoryMock, newJobRepositoryMock } from '../../test';
|
||||
import { newSearchRepositoryMock } from '../../test/search.repository.mock';
|
||||
import { AssetService, IAssetRepository } from '../asset';
|
||||
import { IJobRepository, JobName } from '../job';
|
||||
import { ISearchRepository } from '../search';
|
||||
|
||||
describe(AssetService.name, () => {
|
||||
let sut: AssetService;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let jobMock: jest.Mocked<IJobRepository>;
|
||||
let searchMock: jest.Mocked<ISearchRepository>;
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
assetMock = newAssetRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
sut = new AssetService(jobMock);
|
||||
searchMock = newSearchRepositoryMock();
|
||||
sut = new AssetService(assetMock, jobMock, searchMock);
|
||||
});
|
||||
|
||||
describe(`handle asset upload`, () => {
|
||||
@@ -42,4 +48,15 @@ describe(AssetService.name, () => {
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('save', () => {
|
||||
it('should save an asset', async () => {
|
||||
assetMock.save.mockResolvedValue(assetEntityStub.image);
|
||||
|
||||
await sut.save(assetEntityStub.image);
|
||||
|
||||
expect(assetMock.save).toHaveBeenCalledWith(assetEntityStub.image);
|
||||
expect(searchMock.index).toHaveBeenCalledWith('assets', assetEntityStub.image);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,9 +1,20 @@
|
||||
import { AssetType } from '@app/infra/db/entities';
|
||||
import { AssetEntity, AssetType } from '@app/infra/db/entities';
|
||||
import { Inject } from '@nestjs/common';
|
||||
import { IAssetUploadedJob, IJobRepository, JobName } from '../job';
|
||||
import { ISearchRepository } from '../search';
|
||||
import { AssetCore } from './asset.core';
|
||||
import { IAssetRepository } from './asset.repository';
|
||||
|
||||
export class AssetService {
|
||||
constructor(@Inject(IJobRepository) private jobRepository: IJobRepository) {}
|
||||
private assetCore: AssetCore;
|
||||
|
||||
constructor(
|
||||
@Inject(IAssetRepository) assetRepository: IAssetRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(ISearchRepository) searchRepository: ISearchRepository,
|
||||
) {
|
||||
this.assetCore = new AssetCore(assetRepository, searchRepository);
|
||||
}
|
||||
|
||||
async handleAssetUpload(data: IAssetUploadedJob) {
|
||||
await this.jobRepository.queue({ name: JobName.GENERATE_JPEG_THUMBNAIL, data });
|
||||
@@ -15,4 +26,8 @@ export class AssetService {
|
||||
await this.jobRepository.queue({ name: JobName.EXIF_EXTRACTION, data });
|
||||
}
|
||||
}
|
||||
|
||||
save(asset: Partial<AssetEntity>) {
|
||||
return this.assetCore.save(asset);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export * from './asset.core';
|
||||
export * from './asset.repository';
|
||||
export * from './asset.service';
|
||||
export * from './response-dto';
|
||||
|
||||
@@ -5,6 +5,7 @@ import { AuthService } from './auth';
|
||||
import { DeviceInfoService } from './device-info';
|
||||
import { MediaService } from './media';
|
||||
import { OAuthService } from './oauth';
|
||||
import { SearchService } from './search';
|
||||
import { ShareService } from './share';
|
||||
import { SmartInfoService } from './smart-info';
|
||||
import { StorageService } from './storage';
|
||||
@@ -25,6 +26,7 @@ const providers: Provider[] = [
|
||||
SystemConfigService,
|
||||
UserService,
|
||||
ShareService,
|
||||
SearchService,
|
||||
{
|
||||
provide: INITIAL_SYSTEM_CONFIG,
|
||||
inject: [SystemConfigService],
|
||||
|
||||
@@ -9,6 +9,7 @@ export * from './domain.module';
|
||||
export * from './job';
|
||||
export * from './media';
|
||||
export * from './oauth';
|
||||
export * from './search';
|
||||
export * from './share';
|
||||
export * from './smart-info';
|
||||
export * from './storage';
|
||||
|
||||
@@ -5,6 +5,7 @@ export enum QueueName {
|
||||
MACHINE_LEARNING = 'machine-learning-queue',
|
||||
BACKGROUND_TASK = 'background-task',
|
||||
STORAGE_TEMPLATE_MIGRATION = 'storage-template-migration-queue',
|
||||
SEARCH = 'search-queue',
|
||||
}
|
||||
|
||||
export enum JobName {
|
||||
@@ -22,4 +23,10 @@ export enum JobName {
|
||||
OBJECT_DETECTION = 'detect-object',
|
||||
IMAGE_TAGGING = 'tag-image',
|
||||
DELETE_FILES = 'delete-files',
|
||||
SEARCH_INDEX_ASSETS = 'search-index-assets',
|
||||
SEARCH_INDEX_ASSET = 'search-index-asset',
|
||||
SEARCH_INDEX_ALBUMS = 'search-index-albums',
|
||||
SEARCH_INDEX_ALBUM = 'search-index-album',
|
||||
SEARCH_REMOVE_ALBUM = 'search-remove-album',
|
||||
SEARCH_REMOVE_ASSET = 'search-remove-asset',
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
import { AssetEntity, UserEntity } from '@app/infra/db/entities';
|
||||
import { AlbumEntity, AssetEntity, UserEntity } from '@app/infra/db/entities';
|
||||
|
||||
export interface IAlbumJob {
|
||||
album: AlbumEntity;
|
||||
}
|
||||
|
||||
export interface IAssetJob {
|
||||
asset: AssetEntity;
|
||||
@@ -9,6 +13,10 @@ export interface IAssetUploadedJob {
|
||||
fileName: string;
|
||||
}
|
||||
|
||||
export interface IDeleteJob {
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface IDeleteFilesJob {
|
||||
files: Array<string | null | undefined>;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,13 @@
|
||||
import { JobName, QueueName } from './job.constants';
|
||||
import { IAssetJob, IAssetUploadedJob, IDeleteFilesJob, IReverseGeocodingJob, IUserDeletionJob } from './job.interface';
|
||||
import {
|
||||
IAlbumJob,
|
||||
IAssetJob,
|
||||
IAssetUploadedJob,
|
||||
IDeleteFilesJob,
|
||||
IDeleteJob,
|
||||
IReverseGeocodingJob,
|
||||
IUserDeletionJob,
|
||||
} from './job.interface';
|
||||
|
||||
export interface JobCounts {
|
||||
active: number;
|
||||
@@ -23,7 +31,13 @@ export type JobItem =
|
||||
| { name: JobName.EXTRACT_VIDEO_METADATA; data: IAssetUploadedJob }
|
||||
| { name: JobName.OBJECT_DETECTION; data: IAssetJob }
|
||||
| { name: JobName.IMAGE_TAGGING; data: IAssetJob }
|
||||
| { name: JobName.DELETE_FILES; data: IDeleteFilesJob };
|
||||
| { name: JobName.DELETE_FILES; data: IDeleteFilesJob }
|
||||
| { name: JobName.SEARCH_INDEX_ASSETS }
|
||||
| { name: JobName.SEARCH_INDEX_ASSET; data: IAssetJob }
|
||||
| { name: JobName.SEARCH_INDEX_ALBUMS }
|
||||
| { name: JobName.SEARCH_INDEX_ALBUM; data: IAlbumJob }
|
||||
| { name: JobName.SEARCH_REMOVE_ASSET; data: IDeleteJob }
|
||||
| { name: JobName.SEARCH_REMOVE_ALBUM; data: IDeleteJob };
|
||||
|
||||
export const IJobRepository = 'IJobRepository';
|
||||
|
||||
|
||||
1
server/libs/domain/src/search/dto/index.ts
Normal file
1
server/libs/domain/src/search/dto/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './search.dto';
|
||||
57
server/libs/domain/src/search/dto/search.dto.ts
Normal file
57
server/libs/domain/src/search/dto/search.dto.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { AssetType } from '@app/infra/db/entities';
|
||||
import { Transform } from 'class-transformer';
|
||||
import { IsArray, IsBoolean, IsEnum, IsNotEmpty, IsOptional, IsString } from 'class-validator';
|
||||
import { toBoolean } from '../../../../../apps/immich/src/utils/transform.util';
|
||||
|
||||
export class SearchDto {
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@IsOptional()
|
||||
query?: string;
|
||||
|
||||
@IsEnum(AssetType)
|
||||
@IsOptional()
|
||||
type?: AssetType;
|
||||
|
||||
@IsBoolean()
|
||||
@IsOptional()
|
||||
@Transform(toBoolean)
|
||||
isFavorite?: boolean;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@IsOptional()
|
||||
'exifInfo.city'?: string;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@IsOptional()
|
||||
'exifInfo.state'?: string;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@IsOptional()
|
||||
'exifInfo.country'?: string;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@IsOptional()
|
||||
'exifInfo.make'?: string;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@IsOptional()
|
||||
'exifInfo.model'?: string;
|
||||
|
||||
@IsString({ each: true })
|
||||
@IsArray()
|
||||
@IsOptional()
|
||||
@Transform(({ value }) => value.split(','))
|
||||
'smartInfo.objects'?: string[];
|
||||
|
||||
@IsString({ each: true })
|
||||
@IsArray()
|
||||
@IsOptional()
|
||||
@Transform(({ value }) => value.split(','))
|
||||
'smartInfo.tags'?: string[];
|
||||
}
|
||||
4
server/libs/domain/src/search/index.ts
Normal file
4
server/libs/domain/src/search/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from './dto';
|
||||
export * from './response-dto';
|
||||
export * from './search.repository';
|
||||
export * from './search.service';
|
||||
2
server/libs/domain/src/search/response-dto/index.ts
Normal file
2
server/libs/domain/src/search/response-dto/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from './search-config-response.dto';
|
||||
export * from './search-response.dto';
|
||||
@@ -0,0 +1,3 @@
|
||||
export class SearchConfigResponseDto {
|
||||
enabled!: boolean;
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { AlbumResponseDto } from '../../album';
|
||||
import { AssetResponseDto } from '../../asset';
|
||||
|
||||
class SearchFacetCountResponseDto {
|
||||
@ApiProperty({ type: 'integer' })
|
||||
count!: number;
|
||||
value!: string;
|
||||
}
|
||||
|
||||
class SearchFacetResponseDto {
|
||||
fieldName!: string;
|
||||
counts!: SearchFacetCountResponseDto[];
|
||||
}
|
||||
|
||||
class SearchAlbumResponseDto {
|
||||
@ApiProperty({ type: 'integer' })
|
||||
total!: number;
|
||||
@ApiProperty({ type: 'integer' })
|
||||
count!: number;
|
||||
items!: AlbumResponseDto[];
|
||||
facets!: SearchFacetResponseDto[];
|
||||
}
|
||||
|
||||
class SearchAssetResponseDto {
|
||||
@ApiProperty({ type: 'integer' })
|
||||
total!: number;
|
||||
@ApiProperty({ type: 'integer' })
|
||||
count!: number;
|
||||
items!: AssetResponseDto[];
|
||||
facets!: SearchFacetResponseDto[];
|
||||
}
|
||||
|
||||
export class SearchResponseDto {
|
||||
albums!: SearchAlbumResponseDto;
|
||||
assets!: SearchAssetResponseDto;
|
||||
}
|
||||
60
server/libs/domain/src/search/search.repository.ts
Normal file
60
server/libs/domain/src/search/search.repository.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { AlbumEntity, AssetEntity, AssetType } from '@app/infra/db/entities';
|
||||
|
||||
export enum SearchCollection {
|
||||
ASSETS = 'assets',
|
||||
ALBUMS = 'albums',
|
||||
}
|
||||
|
||||
export interface SearchFilter {
|
||||
id?: string;
|
||||
userId: string;
|
||||
type?: AssetType;
|
||||
isFavorite?: boolean;
|
||||
city?: string;
|
||||
state?: string;
|
||||
country?: string;
|
||||
make?: string;
|
||||
model?: string;
|
||||
objects?: string[];
|
||||
tags?: string[];
|
||||
}
|
||||
|
||||
export interface SearchResult<T> {
|
||||
/** total matches */
|
||||
total: number;
|
||||
/** collection size */
|
||||
count: number;
|
||||
/** current page */
|
||||
page: number;
|
||||
/** items for page */
|
||||
items: T[];
|
||||
facets: SearchFacet[];
|
||||
}
|
||||
|
||||
export interface SearchFacet {
|
||||
fieldName: string;
|
||||
counts: Array<{
|
||||
count: number;
|
||||
value: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
export type SearchCollectionIndexStatus = Record<SearchCollection, boolean>;
|
||||
|
||||
export const ISearchRepository = 'ISearchRepository';
|
||||
|
||||
export interface ISearchRepository {
|
||||
setup(): Promise<void>;
|
||||
checkMigrationStatus(): Promise<SearchCollectionIndexStatus>;
|
||||
|
||||
index(collection: SearchCollection.ASSETS, item: AssetEntity): Promise<void>;
|
||||
index(collection: SearchCollection.ALBUMS, item: AlbumEntity): Promise<void>;
|
||||
|
||||
delete(collection: SearchCollection, id: string): Promise<void>;
|
||||
|
||||
import(collection: SearchCollection.ASSETS, items: AssetEntity[], done: boolean): Promise<void>;
|
||||
import(collection: SearchCollection.ALBUMS, items: AlbumEntity[], done: boolean): Promise<void>;
|
||||
|
||||
search(collection: SearchCollection.ASSETS, query: string, filters: SearchFilter): Promise<SearchResult<AssetEntity>>;
|
||||
search(collection: SearchCollection.ALBUMS, query: string, filters: SearchFilter): Promise<SearchResult<AlbumEntity>>;
|
||||
}
|
||||
317
server/libs/domain/src/search/search.service.spec.ts
Normal file
317
server/libs/domain/src/search/search.service.spec.ts
Normal file
@@ -0,0 +1,317 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { plainToInstance } from 'class-transformer';
|
||||
import {
|
||||
albumStub,
|
||||
assetEntityStub,
|
||||
authStub,
|
||||
newAlbumRepositoryMock,
|
||||
newAssetRepositoryMock,
|
||||
newJobRepositoryMock,
|
||||
newSearchRepositoryMock,
|
||||
} from '../../test';
|
||||
import { IAlbumRepository } from '../album/album.repository';
|
||||
import { IAssetRepository } from '../asset/asset.repository';
|
||||
import { JobName } from '../job';
|
||||
import { IJobRepository } from '../job/job.repository';
|
||||
import { SearchDto } from './dto';
|
||||
import { ISearchRepository } from './search.repository';
|
||||
import { SearchService } from './search.service';
|
||||
|
||||
describe(SearchService.name, () => {
|
||||
let sut: SearchService;
|
||||
let albumMock: jest.Mocked<IAlbumRepository>;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let jobMock: jest.Mocked<IJobRepository>;
|
||||
let searchMock: jest.Mocked<ISearchRepository>;
|
||||
let configMock: jest.Mocked<ConfigService>;
|
||||
|
||||
beforeEach(() => {
|
||||
albumMock = newAlbumRepositoryMock();
|
||||
assetMock = newAssetRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
searchMock = newSearchRepositoryMock();
|
||||
configMock = { get: jest.fn() } as unknown as jest.Mocked<ConfigService>;
|
||||
|
||||
sut = new SearchService(albumMock, assetMock, jobMock, searchMock, configMock);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('request dto', () => {
|
||||
it('should convert smartInfo.tags to a string list', () => {
|
||||
const instance = plainToInstance(SearchDto, { 'smartInfo.tags': 'a,b,c' });
|
||||
expect(instance['smartInfo.tags']).toEqual(['a', 'b', 'c']);
|
||||
});
|
||||
|
||||
it('should handle empty smartInfo.tags', () => {
|
||||
const instance = plainToInstance(SearchDto, {});
|
||||
expect(instance['smartInfo.tags']).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should convert smartInfo.objects to a string list', () => {
|
||||
const instance = plainToInstance(SearchDto, { 'smartInfo.objects': 'a,b,c' });
|
||||
expect(instance['smartInfo.objects']).toEqual(['a', 'b', 'c']);
|
||||
});
|
||||
|
||||
it('should handle empty smartInfo.objects', () => {
|
||||
const instance = plainToInstance(SearchDto, {});
|
||||
expect(instance['smartInfo.objects']).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('isEnabled', () => {
|
||||
it('should be enabled by default', () => {
|
||||
expect(sut.isEnabled()).toBe(true);
|
||||
});
|
||||
|
||||
it('should be disabled via an env variable', () => {
|
||||
configMock.get.mockReturnValue('false');
|
||||
sut = new SearchService(albumMock, assetMock, jobMock, searchMock, configMock);
|
||||
|
||||
expect(sut.isEnabled()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getConfig', () => {
|
||||
it('should return the config', () => {
|
||||
expect(sut.getConfig()).toEqual({ enabled: true });
|
||||
});
|
||||
|
||||
it('should return the config when search is disabled', () => {
|
||||
configMock.get.mockReturnValue('false');
|
||||
sut = new SearchService(albumMock, assetMock, jobMock, searchMock, configMock);
|
||||
|
||||
expect(sut.getConfig()).toEqual({ enabled: false });
|
||||
});
|
||||
});
|
||||
|
||||
describe(`bootstrap`, () => {
|
||||
it('should skip when search is disabled', async () => {
|
||||
configMock.get.mockReturnValue('false');
|
||||
sut = new SearchService(albumMock, assetMock, jobMock, searchMock, configMock);
|
||||
|
||||
await sut.bootstrap();
|
||||
|
||||
expect(searchMock.setup).not.toHaveBeenCalled();
|
||||
expect(searchMock.checkMigrationStatus).not.toHaveBeenCalled();
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip schema migration if not needed', async () => {
|
||||
searchMock.checkMigrationStatus.mockResolvedValue({ assets: false, albums: false });
|
||||
await sut.bootstrap();
|
||||
|
||||
expect(searchMock.setup).toHaveBeenCalled();
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do schema migration if needed', async () => {
|
||||
searchMock.checkMigrationStatus.mockResolvedValue({ assets: true, albums: true });
|
||||
await sut.bootstrap();
|
||||
|
||||
expect(searchMock.setup).toHaveBeenCalled();
|
||||
expect(jobMock.queue.mock.calls).toEqual([
|
||||
[{ name: JobName.SEARCH_INDEX_ASSETS }],
|
||||
[{ name: JobName.SEARCH_INDEX_ALBUMS }],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('search', () => {
|
||||
it('should throw an error is search is disabled', async () => {
|
||||
configMock.get.mockReturnValue('false');
|
||||
sut = new SearchService(albumMock, assetMock, jobMock, searchMock, configMock);
|
||||
|
||||
await expect(sut.search(authStub.admin, {})).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(searchMock.search).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should search assets and albums', async () => {
|
||||
searchMock.search.mockResolvedValue({
|
||||
total: 0,
|
||||
count: 0,
|
||||
page: 1,
|
||||
items: [],
|
||||
facets: [],
|
||||
});
|
||||
|
||||
await expect(sut.search(authStub.admin, {})).resolves.toEqual({
|
||||
albums: {
|
||||
total: 0,
|
||||
count: 0,
|
||||
page: 1,
|
||||
items: [],
|
||||
facets: [],
|
||||
},
|
||||
assets: {
|
||||
total: 0,
|
||||
count: 0,
|
||||
page: 1,
|
||||
items: [],
|
||||
facets: [],
|
||||
},
|
||||
});
|
||||
|
||||
expect(searchMock.search.mock.calls).toEqual([
|
||||
['assets', '*', { userId: authStub.admin.id }],
|
||||
['albums', '*', { userId: authStub.admin.id }],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleIndexAssets', () => {
|
||||
it('should skip if search is disabled', async () => {
|
||||
configMock.get.mockReturnValue('false');
|
||||
sut = new SearchService(albumMock, assetMock, jobMock, searchMock, configMock);
|
||||
|
||||
await sut.handleIndexAssets();
|
||||
|
||||
expect(searchMock.import).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should index all the assets', async () => {
|
||||
assetMock.getAll.mockResolvedValue([]);
|
||||
|
||||
await sut.handleIndexAssets();
|
||||
|
||||
expect(searchMock.import).toHaveBeenCalledWith('assets', [], true);
|
||||
});
|
||||
|
||||
it('should log an error', async () => {
|
||||
assetMock.getAll.mockResolvedValue([]);
|
||||
searchMock.import.mockRejectedValue(new Error('import failed'));
|
||||
|
||||
await sut.handleIndexAssets();
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleIndexAsset', () => {
|
||||
it('should skip if search is disabled', async () => {
|
||||
configMock.get.mockReturnValue('false');
|
||||
sut = new SearchService(albumMock, assetMock, jobMock, searchMock, configMock);
|
||||
|
||||
await sut.handleIndexAsset({ asset: assetEntityStub.image });
|
||||
|
||||
expect(searchMock.index).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should index the asset', async () => {
|
||||
await sut.handleIndexAsset({ asset: assetEntityStub.image });
|
||||
|
||||
expect(searchMock.index).toHaveBeenCalledWith('assets', assetEntityStub.image);
|
||||
});
|
||||
|
||||
it('should log an error', async () => {
|
||||
searchMock.index.mockRejectedValue(new Error('index failed'));
|
||||
|
||||
await sut.handleIndexAsset({ asset: assetEntityStub.image });
|
||||
|
||||
expect(searchMock.index).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleIndexAlbums', () => {
|
||||
it('should skip if search is disabled', async () => {
|
||||
configMock.get.mockReturnValue('false');
|
||||
sut = new SearchService(albumMock, assetMock, jobMock, searchMock, configMock);
|
||||
|
||||
await sut.handleIndexAlbums();
|
||||
|
||||
expect(searchMock.import).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should index all the albums', async () => {
|
||||
albumMock.getAll.mockResolvedValue([]);
|
||||
|
||||
await sut.handleIndexAlbums();
|
||||
|
||||
expect(searchMock.import).toHaveBeenCalledWith('albums', [], true);
|
||||
});
|
||||
|
||||
it('should log an error', async () => {
|
||||
albumMock.getAll.mockResolvedValue([]);
|
||||
searchMock.import.mockRejectedValue(new Error('import failed'));
|
||||
|
||||
await sut.handleIndexAlbums();
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleIndexAlbum', () => {
|
||||
it('should skip if search is disabled', async () => {
|
||||
configMock.get.mockReturnValue('false');
|
||||
sut = new SearchService(albumMock, assetMock, jobMock, searchMock, configMock);
|
||||
|
||||
await sut.handleIndexAlbum({ album: albumStub.empty });
|
||||
|
||||
expect(searchMock.index).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should index the album', async () => {
|
||||
await sut.handleIndexAlbum({ album: albumStub.empty });
|
||||
|
||||
expect(searchMock.index).toHaveBeenCalledWith('albums', albumStub.empty);
|
||||
});
|
||||
|
||||
it('should log an error', async () => {
|
||||
searchMock.index.mockRejectedValue(new Error('index failed'));
|
||||
|
||||
await sut.handleIndexAlbum({ album: albumStub.empty });
|
||||
|
||||
expect(searchMock.index).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleRemoveAlbum', () => {
|
||||
it('should skip if search is disabled', async () => {
|
||||
configMock.get.mockReturnValue('false');
|
||||
sut = new SearchService(albumMock, assetMock, jobMock, searchMock, configMock);
|
||||
|
||||
await sut.handleRemoveAlbum({ id: 'album1' });
|
||||
|
||||
expect(searchMock.delete).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should remove the album', async () => {
|
||||
await sut.handleRemoveAlbum({ id: 'album1' });
|
||||
|
||||
expect(searchMock.delete).toHaveBeenCalledWith('albums', 'album1');
|
||||
});
|
||||
|
||||
it('should log an error', async () => {
|
||||
searchMock.delete.mockRejectedValue(new Error('remove failed'));
|
||||
|
||||
await sut.handleRemoveAlbum({ id: 'album1' });
|
||||
|
||||
expect(searchMock.delete).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleRemoveAsset', () => {
|
||||
it('should skip if search is disabled', async () => {
|
||||
configMock.get.mockReturnValue('false');
|
||||
sut = new SearchService(albumMock, assetMock, jobMock, searchMock, configMock);
|
||||
|
||||
await sut.handleRemoveAsset({ id: 'asset1`' });
|
||||
|
||||
expect(searchMock.delete).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should remove the asset', async () => {
|
||||
await sut.handleRemoveAsset({ id: 'asset1' });
|
||||
|
||||
expect(searchMock.delete).toHaveBeenCalledWith('assets', 'asset1');
|
||||
});
|
||||
|
||||
it('should log an error', async () => {
|
||||
searchMock.delete.mockRejectedValue(new Error('remove failed'));
|
||||
|
||||
await sut.handleRemoveAsset({ id: 'asset1' });
|
||||
|
||||
expect(searchMock.delete).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
154
server/libs/domain/src/search/search.service.ts
Normal file
154
server/libs/domain/src/search/search.service.ts
Normal file
@@ -0,0 +1,154 @@
|
||||
import { BadRequestException, Inject, Injectable, Logger } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { IAlbumRepository } from '../album/album.repository';
|
||||
import { IAssetRepository } from '../asset/asset.repository';
|
||||
import { AuthUserDto } from '../auth';
|
||||
import { IAlbumJob, IAssetJob, IDeleteJob, IJobRepository, JobName } from '../job';
|
||||
import { SearchDto } from './dto';
|
||||
import { SearchConfigResponseDto, SearchResponseDto } from './response-dto';
|
||||
import { ISearchRepository, SearchCollection } from './search.repository';
|
||||
|
||||
@Injectable()
|
||||
export class SearchService {
|
||||
private logger = new Logger(SearchService.name);
|
||||
private enabled: boolean;
|
||||
|
||||
constructor(
|
||||
@Inject(IAlbumRepository) private albumRepository: IAlbumRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(ISearchRepository) private searchRepository: ISearchRepository,
|
||||
configService: ConfigService,
|
||||
) {
|
||||
this.enabled = configService.get('TYPESENSE_ENABLED') !== 'false';
|
||||
}
|
||||
|
||||
isEnabled() {
|
||||
return this.enabled;
|
||||
}
|
||||
|
||||
getConfig(): SearchConfigResponseDto {
|
||||
return {
|
||||
enabled: this.enabled,
|
||||
};
|
||||
}
|
||||
|
||||
async bootstrap() {
|
||||
if (!this.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.log('Running bootstrap');
|
||||
await this.searchRepository.setup();
|
||||
|
||||
const migrationStatus = await this.searchRepository.checkMigrationStatus();
|
||||
if (migrationStatus[SearchCollection.ASSETS]) {
|
||||
this.logger.debug('Queueing job to re-index all assets');
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_ASSETS });
|
||||
}
|
||||
if (migrationStatus[SearchCollection.ALBUMS]) {
|
||||
this.logger.debug('Queueing job to re-index all albums');
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_ALBUMS });
|
||||
}
|
||||
}
|
||||
|
||||
async search(authUser: AuthUserDto, dto: SearchDto): Promise<SearchResponseDto> {
|
||||
if (!this.enabled) {
|
||||
throw new BadRequestException('Search is disabled');
|
||||
}
|
||||
|
||||
const query = dto.query || '*';
|
||||
|
||||
return {
|
||||
assets: (await this.searchRepository.search(SearchCollection.ASSETS, query, {
|
||||
userId: authUser.id,
|
||||
...dto,
|
||||
})) as any,
|
||||
albums: (await this.searchRepository.search(SearchCollection.ALBUMS, query, {
|
||||
userId: authUser.id,
|
||||
...dto,
|
||||
})) as any,
|
||||
};
|
||||
}
|
||||
|
||||
async handleIndexAssets() {
|
||||
if (!this.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
this.logger.debug(`Running indexAssets`);
|
||||
// TODO: do this in batches based on searchIndexVersion
|
||||
const assets = await this.assetRepository.getAll({ isVisible: true });
|
||||
|
||||
this.logger.log(`Indexing ${assets.length} assets`);
|
||||
await this.searchRepository.import(SearchCollection.ASSETS, assets, true);
|
||||
} catch (error: any) {
|
||||
this.logger.error(`Unable to index all assets`, error?.stack);
|
||||
}
|
||||
}
|
||||
|
||||
async handleIndexAsset(data: IAssetJob) {
|
||||
if (!this.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { asset } = data;
|
||||
|
||||
try {
|
||||
await this.searchRepository.index(SearchCollection.ASSETS, asset);
|
||||
} catch (error: any) {
|
||||
this.logger.error(`Unable to index asset: ${asset.id}`, error?.stack);
|
||||
}
|
||||
}
|
||||
|
||||
async handleIndexAlbums() {
|
||||
if (!this.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const albums = await this.albumRepository.getAll();
|
||||
this.logger.log(`Indexing ${albums.length} albums`);
|
||||
await this.searchRepository.import(SearchCollection.ALBUMS, albums, true);
|
||||
} catch (error: any) {
|
||||
this.logger.error(`Unable to index all albums`, error?.stack);
|
||||
}
|
||||
}
|
||||
|
||||
async handleIndexAlbum(data: IAlbumJob) {
|
||||
if (!this.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { album } = data;
|
||||
|
||||
try {
|
||||
await this.searchRepository.index(SearchCollection.ALBUMS, album);
|
||||
} catch (error: any) {
|
||||
this.logger.error(`Unable to index album: ${album.id}`, error?.stack);
|
||||
}
|
||||
}
|
||||
|
||||
async handleRemoveAlbum(data: IDeleteJob) {
|
||||
await this.handleRemove(SearchCollection.ALBUMS, data);
|
||||
}
|
||||
|
||||
async handleRemoveAsset(data: IDeleteJob) {
|
||||
await this.handleRemove(SearchCollection.ASSETS, data);
|
||||
}
|
||||
|
||||
private async handleRemove(collection: SearchCollection, data: IDeleteJob) {
|
||||
if (!this.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { id } = data;
|
||||
|
||||
try {
|
||||
await this.searchRepository.delete(collection, id);
|
||||
} catch (error: any) {
|
||||
this.logger.error(`Unable to remove ${collection}: ${id}`, error?.stack);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,5 +3,7 @@ import { IAlbumRepository } from '../src';
|
||||
export const newAlbumRepositoryMock = (): jest.Mocked<IAlbumRepository> => {
|
||||
return {
|
||||
deleteAll: jest.fn(),
|
||||
getAll: jest.fn(),
|
||||
save: jest.fn(),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import {
|
||||
AlbumEntity,
|
||||
APIKeyEntity,
|
||||
AssetEntity,
|
||||
AssetType,
|
||||
@@ -155,6 +156,21 @@ export const assetEntityStub = {
|
||||
} as AssetEntity),
|
||||
};
|
||||
|
||||
export const albumStub = {
|
||||
empty: Object.freeze<AlbumEntity>({
|
||||
id: 'album-1',
|
||||
albumName: 'Empty album',
|
||||
ownerId: authStub.admin.id,
|
||||
owner: userEntityStub.admin,
|
||||
assets: [],
|
||||
albumThumbnailAssetId: null,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
sharedLinks: [],
|
||||
sharedUsers: [],
|
||||
}),
|
||||
};
|
||||
|
||||
const assetInfo: ExifResponseDto = {
|
||||
make: 'camera-make',
|
||||
model: 'camera-model',
|
||||
|
||||
@@ -6,6 +6,7 @@ export * from './device-info.repository.mock';
|
||||
export * from './fixtures';
|
||||
export * from './job.repository.mock';
|
||||
export * from './machine-learning.repository.mock';
|
||||
export * from './search.repository.mock';
|
||||
export * from './shared-link.repository.mock';
|
||||
export * from './smart-info.repository.mock';
|
||||
export * from './storage.repository.mock';
|
||||
|
||||
12
server/libs/domain/test/search.repository.mock.ts
Normal file
12
server/libs/domain/test/search.repository.mock.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { ISearchRepository } from '../src';
|
||||
|
||||
export const newSearchRepositoryMock = (): jest.Mocked<ISearchRepository> => {
|
||||
return {
|
||||
setup: jest.fn(),
|
||||
checkMigrationStatus: jest.fn(),
|
||||
index: jest.fn(),
|
||||
import: jest.fn(),
|
||||
search: jest.fn(),
|
||||
delete: jest.fn(),
|
||||
};
|
||||
};
|
||||
@@ -11,4 +11,13 @@ export class AlbumRepository implements IAlbumRepository {
|
||||
async deleteAll(userId: string): Promise<void> {
|
||||
await this.repository.delete({ ownerId: userId });
|
||||
}
|
||||
|
||||
getAll(): Promise<AlbumEntity[]> {
|
||||
return this.repository.find();
|
||||
}
|
||||
|
||||
async save(album: Partial<AlbumEntity>) {
|
||||
const { id } = await this.repository.save(album);
|
||||
return this.repository.findOneOrFail({ where: { id } });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { IAssetRepository } from '@app/domain';
|
||||
import { AssetSearchOptions, IAssetRepository } from '@app/domain';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
import { Not, Repository } from 'typeorm';
|
||||
@@ -12,13 +12,32 @@ export class AssetRepository implements IAssetRepository {
|
||||
await this.repository.delete({ ownerId });
|
||||
}
|
||||
|
||||
async getAll(): Promise<AssetEntity[]> {
|
||||
return this.repository.find({ relations: { exifInfo: true } });
|
||||
getAll(options?: AssetSearchOptions | undefined): Promise<AssetEntity[]> {
|
||||
options = options || {};
|
||||
|
||||
return this.repository.find({
|
||||
where: {
|
||||
isVisible: options.isVisible,
|
||||
},
|
||||
relations: {
|
||||
exifInfo: true,
|
||||
smartInfo: true,
|
||||
tags: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async save(asset: Partial<AssetEntity>): Promise<AssetEntity> {
|
||||
const { id } = await this.repository.save(asset);
|
||||
return this.repository.findOneOrFail({ where: { id } });
|
||||
return this.repository.findOneOrFail({
|
||||
where: { id },
|
||||
relations: {
|
||||
exifInfo: true,
|
||||
owner: true,
|
||||
smartInfo: true,
|
||||
tags: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
findLivePhotoMatch(livePhotoCID: string, otherAssetId: string, type: AssetType): Promise<AssetEntity | null> {
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
IKeyRepository,
|
||||
IMachineLearningRepository,
|
||||
IMediaRepository,
|
||||
ISearchRepository,
|
||||
ISharedLinkRepository,
|
||||
ISmartInfoRepository,
|
||||
IStorageRepository,
|
||||
@@ -45,6 +46,7 @@ import {
|
||||
import { JobRepository } from './job';
|
||||
import { MachineLearningRepository } from './machine-learning';
|
||||
import { MediaRepository } from './media';
|
||||
import { TypesenseRepository } from './search';
|
||||
import { FilesystemProvider } from './storage';
|
||||
|
||||
const providers: Provider[] = [
|
||||
@@ -52,12 +54,12 @@ const providers: Provider[] = [
|
||||
{ provide: IAssetRepository, useClass: AssetRepository },
|
||||
{ provide: ICommunicationRepository, useClass: CommunicationRepository },
|
||||
{ provide: ICryptoRepository, useClass: CryptoRepository },
|
||||
{ provide: ICryptoRepository, useClass: CryptoRepository },
|
||||
{ provide: IDeviceInfoRepository, useClass: DeviceInfoRepository },
|
||||
{ provide: IKeyRepository, useClass: APIKeyRepository },
|
||||
{ provide: IJobRepository, useClass: JobRepository },
|
||||
{ provide: IMachineLearningRepository, useClass: MachineLearningRepository },
|
||||
{ provide: IMediaRepository, useClass: MediaRepository },
|
||||
{ provide: ISearchRepository, useClass: TypesenseRepository },
|
||||
{ provide: ISharedLinkRepository, useClass: SharedLinkRepository },
|
||||
{ provide: ISmartInfoRepository, useClass: SmartInfoRepository },
|
||||
{ provide: IStorageRepository, useClass: FilesystemProvider },
|
||||
|
||||
@@ -13,6 +13,7 @@ export class JobRepository implements IJobRepository {
|
||||
@InjectQueue(QueueName.STORAGE_TEMPLATE_MIGRATION) private storageTemplateMigration: Queue,
|
||||
@InjectQueue(QueueName.THUMBNAIL_GENERATION) private thumbnail: Queue,
|
||||
@InjectQueue(QueueName.VIDEO_CONVERSION) private videoTranscode: Queue<IAssetJob>,
|
||||
@InjectQueue(QueueName.SEARCH) private searchIndex: Queue,
|
||||
) {}
|
||||
|
||||
async isActive(name: QueueName): Promise<boolean> {
|
||||
@@ -70,6 +71,18 @@ export class JobRepository implements IJobRepository {
|
||||
await this.videoTranscode.add(item.name, item.data);
|
||||
break;
|
||||
|
||||
case JobName.SEARCH_INDEX_ASSETS:
|
||||
case JobName.SEARCH_INDEX_ALBUMS:
|
||||
await this.searchIndex.add(item.name);
|
||||
break;
|
||||
|
||||
case JobName.SEARCH_INDEX_ASSET:
|
||||
case JobName.SEARCH_INDEX_ALBUM:
|
||||
case JobName.SEARCH_REMOVE_ALBUM:
|
||||
case JobName.SEARCH_REMOVE_ASSET:
|
||||
await this.searchIndex.add(item.name, item.data);
|
||||
break;
|
||||
|
||||
default:
|
||||
// TODO inject remaining queues and map job to queue
|
||||
this.logger.error('Invalid job', item);
|
||||
|
||||
1
server/libs/infra/src/search/index.ts
Normal file
1
server/libs/infra/src/search/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './typesense.repository';
|
||||
13
server/libs/infra/src/search/schemas/album.schema.ts
Normal file
13
server/libs/infra/src/search/schemas/album.schema.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { CollectionCreateSchema } from 'typesense/lib/Typesense/Collections';
|
||||
|
||||
export const albumSchemaVersion = 1;
|
||||
export const albumSchema: CollectionCreateSchema = {
|
||||
name: `albums-v${albumSchemaVersion}`,
|
||||
fields: [
|
||||
{ name: 'ownerId', type: 'string', facet: false },
|
||||
{ name: 'albumName', type: 'string', facet: false, sort: true },
|
||||
{ name: 'createdAt', type: 'string', facet: false, sort: true },
|
||||
{ name: 'updatedAt', type: 'string', facet: false, sort: true },
|
||||
],
|
||||
default_sorting_field: 'createdAt',
|
||||
};
|
||||
37
server/libs/infra/src/search/schemas/asset.schema.ts
Normal file
37
server/libs/infra/src/search/schemas/asset.schema.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { CollectionCreateSchema } from 'typesense/lib/Typesense/Collections';
|
||||
|
||||
export const assetSchemaVersion = 1;
|
||||
export const assetSchema: CollectionCreateSchema = {
|
||||
name: `assets-v${assetSchemaVersion}`,
|
||||
fields: [
|
||||
// asset
|
||||
{ name: 'ownerId', type: 'string', facet: false },
|
||||
{ name: 'type', type: 'string', facet: true },
|
||||
{ name: 'originalPath', type: 'string', facet: false },
|
||||
{ name: 'createdAt', type: 'string', facet: false, sort: true },
|
||||
{ name: 'updatedAt', type: 'string', facet: false, sort: true },
|
||||
{ name: 'fileCreatedAt', type: 'string', facet: false, sort: true },
|
||||
{ name: 'fileModifiedAt', type: 'string', facet: false, sort: true },
|
||||
{ name: 'isFavorite', type: 'bool', facet: true },
|
||||
// { name: 'checksum', type: 'string', facet: true },
|
||||
// { name: 'tags', type: 'string[]', facet: true, optional: true },
|
||||
|
||||
// exif
|
||||
{ name: 'exifInfo.city', type: 'string', facet: true, optional: true },
|
||||
{ name: 'exifInfo.country', type: 'string', facet: true, optional: true },
|
||||
{ name: 'exifInfo.state', type: 'string', facet: true, optional: true },
|
||||
{ name: 'exifInfo.description', type: 'string', facet: false, optional: true },
|
||||
{ name: 'exifInfo.imageName', type: 'string', facet: false, optional: true },
|
||||
{ name: 'geo', type: 'geopoint', facet: false, optional: true },
|
||||
{ name: 'exifInfo.make', type: 'string', facet: true, optional: true },
|
||||
{ name: 'exifInfo.model', type: 'string', facet: true, optional: true },
|
||||
{ name: 'exifInfo.orientation', type: 'string', optional: true },
|
||||
|
||||
// smart info
|
||||
{ name: 'smartInfo.objects', type: 'string[]', facet: true, optional: true },
|
||||
{ name: 'smartInfo.tags', type: 'string[]', facet: true, optional: true },
|
||||
],
|
||||
token_separators: ['.'],
|
||||
enable_nested_fields: true,
|
||||
default_sorting_field: 'fileCreatedAt',
|
||||
};
|
||||
325
server/libs/infra/src/search/typesense.repository.ts
Normal file
325
server/libs/infra/src/search/typesense.repository.ts
Normal file
@@ -0,0 +1,325 @@
|
||||
import {
|
||||
ISearchRepository,
|
||||
SearchCollection,
|
||||
SearchCollectionIndexStatus,
|
||||
SearchFilter,
|
||||
SearchResult,
|
||||
} from '@app/domain';
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import _, { Dictionary } from 'lodash';
|
||||
import { Client } from 'typesense';
|
||||
import { CollectionCreateSchema } from 'typesense/lib/Typesense/Collections';
|
||||
import { DocumentSchema, SearchResponse } from 'typesense/lib/Typesense/Documents';
|
||||
import { AlbumEntity, AssetEntity } from '../db';
|
||||
import { albumSchema } from './schemas/album.schema';
|
||||
import { assetSchema } from './schemas/asset.schema';
|
||||
|
||||
interface GeoAssetEntity extends AssetEntity {
|
||||
geo?: [number, number];
|
||||
}
|
||||
|
||||
function removeNil<T extends Dictionary<any>>(item: T): Partial<T> {
|
||||
_.forOwn(item, (value, key) => {
|
||||
if (_.isNil(value) || (_.isObject(value) && !_.isDate(value) && _.isEmpty(removeNil(value)))) {
|
||||
delete item[key];
|
||||
}
|
||||
});
|
||||
|
||||
return item;
|
||||
}
|
||||
|
||||
const schemaMap: Record<SearchCollection, CollectionCreateSchema> = {
|
||||
[SearchCollection.ASSETS]: assetSchema,
|
||||
[SearchCollection.ALBUMS]: albumSchema,
|
||||
};
|
||||
|
||||
const schemas = Object.entries(schemaMap) as [SearchCollection, CollectionCreateSchema][];
|
||||
|
||||
interface SearchUpdateQueue<T = any> {
|
||||
upsert: T[];
|
||||
delete: string[];
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class TypesenseRepository implements ISearchRepository {
|
||||
private logger = new Logger(TypesenseRepository.name);
|
||||
private queue: Record<SearchCollection, SearchUpdateQueue> = {
|
||||
[SearchCollection.ASSETS]: {
|
||||
upsert: [],
|
||||
delete: [],
|
||||
},
|
||||
[SearchCollection.ALBUMS]: {
|
||||
upsert: [],
|
||||
delete: [],
|
||||
},
|
||||
};
|
||||
|
||||
private _client: Client | null = null;
|
||||
private get client(): Client {
|
||||
if (!this._client) {
|
||||
throw new Error('Typesense client not available (no apiKey was provided)');
|
||||
}
|
||||
return this._client;
|
||||
}
|
||||
|
||||
constructor() {
|
||||
const apiKey = process.env.TYPESENSE_API_KEY;
|
||||
if (!apiKey) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._client = new Client({
|
||||
nodes: [
|
||||
{
|
||||
host: process.env.TYPESENSE_HOST || 'typesense',
|
||||
port: Number(process.env.TYPESENSE_PORT) || 8108,
|
||||
protocol: process.env.TYPESENSE_PROTOCOL || 'http',
|
||||
},
|
||||
],
|
||||
apiKey,
|
||||
numRetries: 3,
|
||||
connectionTimeoutSeconds: 10,
|
||||
});
|
||||
|
||||
setInterval(() => this.flush(), 5_000);
|
||||
}
|
||||
|
||||
async setup(): Promise<void> {
|
||||
// upsert collections
|
||||
for (const [collectionName, schema] of schemas) {
|
||||
const collection = await this.client
|
||||
.collections(schema.name)
|
||||
.retrieve()
|
||||
.catch(() => null);
|
||||
if (!collection) {
|
||||
this.logger.log(`Creating schema: ${collectionName}/${schema.name}`);
|
||||
await this.client.collections().create(schema);
|
||||
} else {
|
||||
this.logger.log(`Schema up to date: ${collectionName}/${schema.name}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async checkMigrationStatus(): Promise<SearchCollectionIndexStatus> {
|
||||
const migrationMap: SearchCollectionIndexStatus = {
|
||||
[SearchCollection.ASSETS]: false,
|
||||
[SearchCollection.ALBUMS]: false,
|
||||
};
|
||||
|
||||
// check if alias is using the current schema
|
||||
const { aliases } = await this.client.aliases().retrieve();
|
||||
this.logger.log(`Alias mapping: ${JSON.stringify(aliases)}`);
|
||||
|
||||
for (const [aliasName, schema] of schemas) {
|
||||
const match = aliases.find((alias) => alias.name === aliasName);
|
||||
if (!match || match.collection_name !== schema.name) {
|
||||
migrationMap[aliasName] = true;
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.log(`Collections needing migration: ${JSON.stringify(migrationMap)}`);
|
||||
|
||||
return migrationMap;
|
||||
}
|
||||
|
||||
async index(collection: SearchCollection, item: AssetEntity | AlbumEntity, immediate?: boolean): Promise<void> {
|
||||
const schema = schemaMap[collection];
|
||||
|
||||
if (collection === SearchCollection.ASSETS) {
|
||||
item = this.patchAsset(item as AssetEntity);
|
||||
}
|
||||
|
||||
if (immediate) {
|
||||
await this.client.collections(schema.name).documents().upsert(item);
|
||||
return;
|
||||
}
|
||||
|
||||
this.queue[collection].upsert.push(item);
|
||||
}
|
||||
|
||||
async delete(collection: SearchCollection, id: string, immediate?: boolean): Promise<void> {
|
||||
const schema = schemaMap[collection];
|
||||
|
||||
if (immediate) {
|
||||
await this.client.collections(schema.name).documents().delete(id);
|
||||
return;
|
||||
}
|
||||
|
||||
this.queue[collection].delete.push(id);
|
||||
}
|
||||
|
||||
async import(collection: SearchCollection, items: AssetEntity[] | AlbumEntity[], done: boolean): Promise<void> {
|
||||
try {
|
||||
const schema = schemaMap[collection];
|
||||
const _items = items.map((item) => {
|
||||
if (collection === SearchCollection.ASSETS) {
|
||||
item = this.patchAsset(item as AssetEntity);
|
||||
}
|
||||
// null values are invalid for typesense documents
|
||||
return removeNil(item);
|
||||
});
|
||||
if (_items.length > 0) {
|
||||
await this.client
|
||||
.collections(schema.name)
|
||||
.documents()
|
||||
.import(_items, { action: 'upsert', dirty_values: 'coerce_or_drop' });
|
||||
}
|
||||
if (done) {
|
||||
await this.updateAlias(collection);
|
||||
}
|
||||
} catch (error: any) {
|
||||
this.handleError(error);
|
||||
}
|
||||
}
|
||||
|
||||
search(collection: SearchCollection.ASSETS, query: string, filter: SearchFilter): Promise<SearchResult<AssetEntity>>;
|
||||
search(collection: SearchCollection.ALBUMS, query: string, filter: SearchFilter): Promise<SearchResult<AlbumEntity>>;
|
||||
async search(collection: SearchCollection, query: string, filters: SearchFilter) {
|
||||
const alias = await this.client.aliases(collection).retrieve();
|
||||
|
||||
const { userId } = filters;
|
||||
|
||||
const _filters = [`ownerId:${userId}`];
|
||||
|
||||
if (filters.id) {
|
||||
_filters.push(`id:=${filters.id}`);
|
||||
}
|
||||
if (collection === SearchCollection.ASSETS) {
|
||||
for (const item of schemaMap[collection].fields || []) {
|
||||
let value = filters[item.name as keyof SearchFilter];
|
||||
if (Array.isArray(value)) {
|
||||
value = `[${value.join(',')}]`;
|
||||
}
|
||||
if (item.facet && value !== undefined) {
|
||||
_filters.push(`${item.name}:${value}`);
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.debug(`Searching query='${query}', filters='${JSON.stringify(_filters)}'`);
|
||||
|
||||
const results = await this.client
|
||||
.collections<AssetEntity>(alias.collection_name)
|
||||
.documents()
|
||||
.search({
|
||||
q: query,
|
||||
query_by: [
|
||||
'exifInfo.imageName',
|
||||
'exifInfo.country',
|
||||
'exifInfo.state',
|
||||
'exifInfo.city',
|
||||
'exifInfo.description',
|
||||
'smartInfo.tags',
|
||||
'smartInfo.objects',
|
||||
].join(','),
|
||||
filter_by: _filters.join(' && '),
|
||||
per_page: 250,
|
||||
facet_by: (assetSchema.fields || [])
|
||||
.filter((field) => field.facet)
|
||||
.map((field) => field.name)
|
||||
.join(','),
|
||||
});
|
||||
|
||||
return this.asResponse(results);
|
||||
}
|
||||
|
||||
if (collection === SearchCollection.ALBUMS) {
|
||||
const results = await this.client
|
||||
.collections<AlbumEntity>(alias.collection_name)
|
||||
.documents()
|
||||
.search({
|
||||
q: query,
|
||||
query_by: 'albumName',
|
||||
filter_by: _filters.join(','),
|
||||
});
|
||||
|
||||
return this.asResponse(results);
|
||||
}
|
||||
|
||||
throw new Error(`Invalid collection: ${collection}`);
|
||||
}
|
||||
|
||||
private asResponse<T extends DocumentSchema>(results: SearchResponse<T>): SearchResult<T> {
|
||||
return {
|
||||
page: results.page,
|
||||
total: results.found,
|
||||
count: results.out_of,
|
||||
items: (results.hits || []).map((hit) => hit.document),
|
||||
facets: (results.facet_counts || []).map((facet) => ({
|
||||
counts: facet.counts.map((item) => ({ count: item.count, value: item.value })),
|
||||
fieldName: facet.field_name as string,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
private async flush() {
|
||||
for (const [collection, schema] of schemas) {
|
||||
if (this.queue[collection].upsert.length > 0) {
|
||||
try {
|
||||
const items = this.queue[collection].upsert.map((item) => removeNil(item));
|
||||
this.logger.debug(`Flushing ${items.length} ${collection} upserts to typesense`);
|
||||
await this.client
|
||||
.collections(schema.name)
|
||||
.documents()
|
||||
.import(items, { action: 'upsert', dirty_values: 'coerce_or_drop' });
|
||||
this.queue[collection].upsert = [];
|
||||
} catch (error) {
|
||||
this.handleError(error);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.queue[collection].delete.length > 0) {
|
||||
try {
|
||||
const items = this.queue[collection].delete;
|
||||
this.logger.debug(`Flushing ${items.length} ${collection} deletes to typesense`);
|
||||
await this.client
|
||||
.collections(schema.name)
|
||||
.documents()
|
||||
.delete({ filter_by: `id: [${items.join(',')}]` });
|
||||
this.queue[collection].delete = [];
|
||||
} catch (error) {
|
||||
this.handleError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private handleError(error: any): never {
|
||||
this.logger.error('Unable to index documents');
|
||||
const results = error.importResults || [];
|
||||
for (const result of results) {
|
||||
try {
|
||||
result.document = JSON.parse(result.document);
|
||||
} catch {}
|
||||
}
|
||||
this.logger.verbose(JSON.stringify(results, null, 2));
|
||||
throw error;
|
||||
}
|
||||
|
||||
private async updateAlias(collection: SearchCollection) {
|
||||
const schema = schemaMap[collection];
|
||||
const alias = await this.client
|
||||
.aliases(collection)
|
||||
.retrieve()
|
||||
.catch(() => null);
|
||||
|
||||
// update alias to current collection
|
||||
this.logger.log(`Using new schema: ${alias?.collection_name || '(unset)'} => ${schema.name}`);
|
||||
await this.client.aliases().upsert(collection, { collection_name: schema.name });
|
||||
|
||||
// delete previous collection
|
||||
if (alias && alias.collection_name !== schema.name) {
|
||||
this.logger.log(`Deleting old schema: ${alias.collection_name}`);
|
||||
await this.client.collections(alias.collection_name).delete();
|
||||
}
|
||||
}
|
||||
|
||||
private patchAsset(asset: AssetEntity): GeoAssetEntity {
|
||||
const lat = asset.exifInfo?.latitude;
|
||||
const lng = asset.exifInfo?.longitude;
|
||||
if (lat && lng && lat !== 0 && lng !== 0) {
|
||||
return { ...asset, geo: [lat, lng] };
|
||||
}
|
||||
|
||||
return asset;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user