From 8ecde3b277e4de5c2a285efcca240d7da6e56d7b Mon Sep 17 00:00:00 2001 From: Jonathan Jogenfors Date: Mon, 2 Dec 2024 22:35:51 +0100 Subject: [PATCH 01/10] feat: run all offline checks in a single job --- server/src/interfaces/job.interface.ts | 14 +++--- server/src/services/library.service.spec.ts | 56 ++++++++++----------- server/src/services/library.service.ts | 44 +++++++++------- 3 files changed, 61 insertions(+), 53 deletions(-) diff --git a/server/src/interfaces/job.interface.ts b/server/src/interfaces/job.interface.ts index 7976f813022ff..56f09a92b05ca 100644 --- a/server/src/interfaces/job.interface.ts +++ b/server/src/interfaces/job.interface.ts @@ -85,7 +85,7 @@ export enum JobName { LIBRARY_QUEUE_SYNC_FILES = 'library-queue-sync-files', LIBRARY_QUEUE_SYNC_ASSETS = 'library-queue-sync-assets', LIBRARY_SYNC_FILE = 'library-sync-file', - LIBRARY_SYNC_ASSET = 'library-sync-asset', + LIBRARY_SYNC_ASSETS = 'library-sync-assets', LIBRARY_DELETE = 'library-delete', LIBRARY_QUEUE_SYNC_ALL = 'library-queue-sync-all', LIBRARY_QUEUE_CLEANUP = 'library-queue-cleanup', @@ -148,15 +148,15 @@ export interface ILibraryFileJob extends IEntityJob { assetPath: string; } -export interface ILibraryAssetJob extends IEntityJob { - importPaths: string[]; - exclusionPatterns: string[]; -} - export interface IBulkEntityJob extends IBaseJob { ids: string[]; } +export interface ILibraryAssetsJob extends IBulkEntityJob { + importPaths: string[]; + exclusionPatterns: string[]; +} + export interface IDeleteFilesJob extends IBaseJob { files: Array; } @@ -287,7 +287,7 @@ export type JobItem = | { name: JobName.LIBRARY_SYNC_FILE; data: ILibraryFileJob } | { name: JobName.LIBRARY_QUEUE_SYNC_FILES; data: IEntityJob } | { name: JobName.LIBRARY_QUEUE_SYNC_ASSETS; data: IEntityJob } - | { name: JobName.LIBRARY_SYNC_ASSET; data: ILibraryAssetJob } + | { name: JobName.LIBRARY_SYNC_ASSETS; data: ILibraryAssetsJob } | { name: JobName.LIBRARY_DELETE; data: IEntityJob } | { name: JobName.LIBRARY_QUEUE_SYNC_ALL; data?: IBaseJob } | { name: JobName.LIBRARY_QUEUE_CLEANUP; data: IBaseJob } diff --git a/server/src/services/library.service.spec.ts b/server/src/services/library.service.spec.ts index 43d6662d659e5..13507f64759b6 100644 --- a/server/src/services/library.service.spec.ts +++ b/server/src/services/library.service.spec.ts @@ -10,7 +10,7 @@ import { ICronRepository } from 'src/interfaces/cron.interface'; import { IDatabaseRepository } from 'src/interfaces/database.interface'; import { IJobRepository, - ILibraryAssetJob, + ILibraryAssetsJob, ILibraryFileJob, JobName, JOBS_LIBRARY_PAGINATION_SIZE, @@ -231,7 +231,7 @@ describe(LibraryService.name, () => { expect(jobMock.queueAll).toHaveBeenCalledWith([ { - name: JobName.LIBRARY_SYNC_ASSET, + name: JobName.LIBRARY_SYNC_ASSETS, data: { id: assetStub.external.id, importPaths: libraryStub.externalLibrary1.importPaths, @@ -250,22 +250,22 @@ describe(LibraryService.name, () => { describe('handleSyncAsset', () => { it('should skip missing assets', async () => { - const mockAssetJob: ILibraryAssetJob = { - id: assetStub.external.id, + const mockAssetJob: ILibraryAssetsJob = { + ids: [assetStub.external.id], importPaths: ['/'], exclusionPatterns: [], }; assetMock.getById.mockResolvedValue(null); - await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SKIPPED); expect(assetMock.remove).not.toHaveBeenCalled(); }); it('should offline assets no longer on disk', async () => { - const mockAssetJob: ILibraryAssetJob = { - id: assetStub.external.id, + const mockAssetJob: ILibraryAssetsJob = { + ids: [assetStub.external.id], importPaths: ['/'], exclusionPatterns: [], }; @@ -273,7 +273,7 @@ describe(LibraryService.name, () => { assetMock.getById.mockResolvedValue(assetStub.external); storageMock.stat.mockRejectedValue(new Error('ENOENT, no such file or directory')); - await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], { isOffline: true, @@ -282,15 +282,15 @@ describe(LibraryService.name, () => { }); it('should offline assets matching an exclusion pattern', async () => { - const mockAssetJob: ILibraryAssetJob = { - id: assetStub.external.id, + const mockAssetJob: ILibraryAssetsJob = { + ids: [assetStub.external.id], importPaths: ['/'], exclusionPatterns: ['**/user1/**'], }; assetMock.getById.mockResolvedValue(assetStub.external); - await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], { isOffline: true, deletedAt: expect.any(Date), @@ -298,8 +298,8 @@ describe(LibraryService.name, () => { }); it('should set assets outside of import paths as offline', async () => { - const mockAssetJob: ILibraryAssetJob = { - id: assetStub.external.id, + const mockAssetJob: ILibraryAssetsJob = { + ids: [assetStub.external.id], importPaths: ['/data/user2'], exclusionPatterns: [], }; @@ -307,7 +307,7 @@ describe(LibraryService.name, () => { assetMock.getById.mockResolvedValue(assetStub.external); storageMock.checkFileExists.mockResolvedValue(true); - await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], { isOffline: true, @@ -316,8 +316,8 @@ describe(LibraryService.name, () => { }); it('should do nothing with online assets', async () => { - const mockAssetJob: ILibraryAssetJob = { - id: assetStub.external.id, + const mockAssetJob: ILibraryAssetsJob = { + ids: [assetStub.external.id], importPaths: ['/'], exclusionPatterns: [], }; @@ -325,14 +325,14 @@ describe(LibraryService.name, () => { assetMock.getById.mockResolvedValue(assetStub.external); storageMock.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats); - await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); expect(assetMock.updateAll).not.toHaveBeenCalled(); }); it('should un-trash an asset previously marked as offline', async () => { - const mockAssetJob: ILibraryAssetJob = { - id: assetStub.external.id, + const mockAssetJob: ILibraryAssetsJob = { + ids: [assetStub.external.id], importPaths: ['/'], exclusionPatterns: [], }; @@ -340,7 +340,7 @@ describe(LibraryService.name, () => { assetMock.getById.mockResolvedValue(assetStub.trashedOffline); storageMock.stat.mockResolvedValue({ mtime: assetStub.trashedOffline.fileModifiedAt } as Stats); - await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.trashedOffline.id], { deletedAt: null, @@ -353,8 +353,8 @@ describe(LibraryService.name, () => { }); it('should update file when mtime has changed', async () => { - const mockAssetJob: ILibraryAssetJob = { - id: assetStub.external.id, + const mockAssetJob: ILibraryAssetsJob = { + ids: [assetStub.external.id], importPaths: ['/'], exclusionPatterns: [], }; @@ -363,7 +363,7 @@ describe(LibraryService.name, () => { assetMock.getById.mockResolvedValue(assetStub.external); storageMock.stat.mockResolvedValue({ mtime: newMTime } as Stats); - await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], { fileModifiedAt: newMTime, @@ -969,7 +969,7 @@ describe(LibraryService.name, () => { }, ]); expect(jobMock.queueAll).toHaveBeenCalledWith([ - { name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) }, + { name: JobName.LIBRARY_SYNC_ASSETS, data: expect.objectContaining({ ids: [assetStub.image.id] }) }, ]); }); @@ -994,7 +994,7 @@ describe(LibraryService.name, () => { }, ]); expect(jobMock.queueAll).toHaveBeenCalledWith([ - { name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) }, + { name: JobName.LIBRARY_SYNC_ASSETS, data: expect.objectContaining({ id: [assetStub.image.id] }) }, ]); }); @@ -1009,7 +1009,7 @@ describe(LibraryService.name, () => { await sut.watchAll(); expect(jobMock.queueAll).toHaveBeenCalledWith([ - { name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) }, + { name: JobName.LIBRARY_SYNC_ASSETS, data: expect.objectContaining({ ids: [assetStub.image.id] }) }, ]); }); @@ -1166,9 +1166,9 @@ describe(LibraryService.name, () => { expect(jobMock.queueAll).toHaveBeenCalledWith([ { - name: JobName.LIBRARY_SYNC_ASSET, + name: JobName.LIBRARY_SYNC_ASSETS, data: { - id: assetStub.image1.id, + ids: [assetStub.image1.id], importPaths: libraryStub.externalLibrary1.importPaths, exclusionPatterns: libraryStub.externalLibrary1.exclusionPatterns, }, diff --git a/server/src/services/library.service.ts b/server/src/services/library.service.ts index c0d24fea9e19d..0be2dcc4fae63 100644 --- a/server/src/services/library.service.ts +++ b/server/src/services/library.service.ts @@ -242,12 +242,10 @@ export class LibraryService extends BaseService { } private async syncAssets({ importPaths, exclusionPatterns }: LibraryEntity, assetIds: string[]) { - await this.jobRepository.queueAll( - assetIds.map((assetId) => ({ - name: JobName.LIBRARY_SYNC_ASSET, - data: { id: assetId, importPaths, exclusionPatterns }, - })), - ); + await this.jobRepository.queue({ + name: JobName.LIBRARY_SYNC_ASSETS, + data: { ids: assetIds, importPaths, exclusionPatterns }, + }); } private async validateImportPath(importPath: string): Promise { @@ -472,27 +470,35 @@ export class LibraryService extends BaseService { return JobStatus.SUCCESS; } - @OnJob({ name: JobName.LIBRARY_SYNC_ASSET, queue: QueueName.LIBRARY }) - async handleSyncAsset(job: JobOf): Promise { - const asset = await this.assetRepository.getById(job.id); + @OnJob({ name: JobName.LIBRARY_SYNC_ASSETS, queue: QueueName.LIBRARY }) + async handleSyncAssets(job: JobOf): Promise { + for (const id of job.ids) { + await this.handleSyncAsset(id, job.importPaths, job.exclusionPatterns); + } + + return JobStatus.SUCCESS; + } + + private async handleSyncAsset(id: string, importPaths: string[], exclusionPatterns: string[]): Promise { + const asset = await this.assetRepository.getById(id); if (!asset) { return JobStatus.SKIPPED; } const markOffline = async (explanation: string) => { if (!asset.isOffline) { - this.logger.debug(`${explanation}, removing: ${asset.originalPath}`); + this.logger.debug(`${explanation}, moving to trash: ${asset.originalPath}`); await this.assetRepository.updateAll([asset.id], { isOffline: true, deletedAt: new Date() }); } }; - const isInPath = job.importPaths.find((path) => asset.originalPath.startsWith(path)); + const isInPath = importPaths.find((path) => asset.originalPath.startsWith(path)); if (!isInPath) { await markOffline('Asset is no longer in an import path'); return JobStatus.SUCCESS; } - const isExcluded = job.exclusionPatterns.some((pattern) => picomatch.isMatch(asset.originalPath, pattern)); + const isExcluded = exclusionPatterns.some((pattern) => picomatch.isMatch(asset.originalPath, pattern)); if (isExcluded) { await markOffline('Asset is covered by an exclusion pattern'); return JobStatus.SUCCESS; @@ -597,12 +603,14 @@ export class LibraryService extends BaseService { for await (const assets of onlineAssets) { assetCount += assets.length; this.logger.debug(`Discovered ${assetCount} asset(s) in library ${library.id}...`); - await this.jobRepository.queueAll( - assets.map((asset) => ({ - name: JobName.LIBRARY_SYNC_ASSET, - data: { id: asset.id, importPaths: library.importPaths, exclusionPatterns: library.exclusionPatterns }, - })), - ); + await this.jobRepository.queue({ + name: JobName.LIBRARY_SYNC_ASSETS, + data: { + ids: assets.map((asset) => asset.id), + importPaths: library.importPaths, + exclusionPatterns: library.exclusionPatterns, + }, + }); this.logger.debug(`Queued check of ${assets.length} asset(s) in library ${library.id}...`); } From 96f2f6535e4d258b33a03a7f4618707175e8ebdc Mon Sep 17 00:00:00 2001 From: Jonathan Jogenfors Date: Tue, 10 Dec 2024 15:39:30 +0100 Subject: [PATCH 02/10] do it in sql, baby --- e2e/src/api/specs/library.e2e-spec.ts | 195 +++++++++- e2e/src/utils.ts | 15 +- server/src/interfaces/asset.interface.ts | 4 +- server/src/interfaces/job.interface.ts | 12 +- server/src/interfaces/library.interface.ts | 7 + server/src/repositories/asset.repository.ts | 79 ++-- server/src/services/library.service.ts | 362 ++++++++++-------- server/src/utils/asset.util.ts | 1 - server/src/utils/misc.spec.ts | 2 +- server/src/utils/misc.ts | 70 ++-- .../repositories/asset.repository.mock.ts | 2 + 11 files changed, 524 insertions(+), 225 deletions(-) diff --git a/e2e/src/api/specs/library.e2e-spec.ts b/e2e/src/api/specs/library.e2e-spec.ts index 9c7796d1584c1..1959a230ad231 100644 --- a/e2e/src/api/specs/library.e2e-spec.ts +++ b/e2e/src/api/specs/library.e2e-spec.ts @@ -421,7 +421,7 @@ describe('/libraries', () => { const { status } = await request(app) .post(`/libraries/${library.id}/scan`) .set('Authorization', `Bearer ${admin.accessToken}`) - .send({ refreshModifiedFiles: true }); + .send(); expect(status).toBe(204); await utils.waitForQueueFinish(admin.accessToken, 'library'); @@ -453,7 +453,7 @@ describe('/libraries', () => { const { status } = await request(app) .post(`/libraries/${library.id}/scan`) .set('Authorization', `Bearer ${admin.accessToken}`) - .send({ refreshModifiedFiles: true }); + .send(); expect(status).toBe(204); await utils.waitForQueueFinish(admin.accessToken, 'library'); @@ -577,7 +577,7 @@ describe('/libraries', () => { ]); }); - it('should not trash an online asset', async () => { + it('should not set an asset offline if its file exists, is in an import path, and not covered by an exclusion pattern', async () => { const library = await utils.createLibrary(admin.accessToken, { ownerId: admin.userId, importPaths: [`${testAssetDirInternal}/temp`], @@ -601,6 +601,195 @@ describe('/libraries', () => { expect(assets).toEqual(assetsBefore); }); + + it('should set an offline asset to online if its file exists, is in an import path, and not covered by an exclusion pattern', async () => { + utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`); + + const library = await utils.createLibrary(admin.accessToken, { + ownerId: admin.userId, + importPaths: [`${testAssetDirInternal}/temp/offline`], + }); + + await scan(admin.accessToken, library.id); + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + + utils.renameImageFile(`${testAssetDir}/temp/offline/offline.png`, `${testAssetDir}/temp/offline.png`); + + { + const { status } = await request(app) + .post(`/libraries/${library.id}/scan`) + .set('Authorization', `Bearer ${admin.accessToken}`) + .send(); + expect(status).toBe(204); + } + + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + const offlineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id); + expect(offlineAsset.isTrashed).toBe(true); + expect(offlineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`); + expect(offlineAsset.isOffline).toBe(true); + + { + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true }); + expect(assets.count).toBe(1); + } + + utils.renameImageFile(`${testAssetDir}/temp/offline.png`, `${testAssetDir}/temp/offline/offline.png`); + + { + const { status } = await request(app) + .post(`/libraries/${library.id}/scan`) + .set('Authorization', `Bearer ${admin.accessToken}`) + .send(); + expect(status).toBe(204); + } + + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + const backOnlineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id); + + expect(backOnlineAsset.isTrashed).toBe(false); + expect(backOnlineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`); + expect(backOnlineAsset.isOffline).toBe(false); + + { + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + expect(assets.count).toBe(1); + } + }); + + it('should not set an offline asset to online if its file exists, is not covered by an exclusion pattern, but is outside of all import paths', async () => { + utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`); + + const library = await utils.createLibrary(admin.accessToken, { + ownerId: admin.userId, + importPaths: [`${testAssetDirInternal}/temp/offline`], + }); + + await scan(admin.accessToken, library.id); + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + + utils.renameImageFile(`${testAssetDir}/temp/offline/offline.png`, `${testAssetDir}/temp/offline.png`); + + { + const { status } = await request(app) + .post(`/libraries/${library.id}/scan`) + .set('Authorization', `Bearer ${admin.accessToken}`) + .send(); + expect(status).toBe(204); + } + + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + { + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true }); + expect(assets.count).toBe(1); + } + + const offlineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id); + + expect(offlineAsset.isTrashed).toBe(true); + expect(offlineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`); + expect(offlineAsset.isOffline).toBe(true); + + utils.renameImageFile(`${testAssetDir}/temp/offline.png`, `${testAssetDir}/temp/offline/offline.png`); + + utils.createDirectory(`${testAssetDir}/temp/another-path/`); + + await utils.updateLibrary(admin.accessToken, library.id, { + importPaths: [`${testAssetDirInternal}/temp/another-path`], + }); + + { + const { status } = await request(app) + .post(`/libraries/${library.id}/scan`) + .set('Authorization', `Bearer ${admin.accessToken}`) + .send(); + expect(status).toBe(204); + } + + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + const stillOfflineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id); + + expect(stillOfflineAsset.isTrashed).toBe(true); + expect(stillOfflineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`); + expect(stillOfflineAsset.isOffline).toBe(true); + + { + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true }); + expect(assets.count).toBe(1); + } + + utils.removeDirectory(`${testAssetDir}/temp/another-path/`); + }); + + it('should not set an offline asset to online if its file exists, is in an import path, but is covered by an exclusion pattern', async () => { + utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`); + + const library = await utils.createLibrary(admin.accessToken, { + ownerId: admin.userId, + importPaths: [`${testAssetDirInternal}/temp/offline`], + }); + + await scan(admin.accessToken, library.id); + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + + utils.renameImageFile(`${testAssetDir}/temp/offline/offline.png`, `${testAssetDir}/temp/offline.png`); + + { + const { status } = await request(app) + .post(`/libraries/${library.id}/scan`) + .set('Authorization', `Bearer ${admin.accessToken}`) + .send(); + expect(status).toBe(204); + } + + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + { + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true }); + expect(assets.count).toBe(1); + } + + const offlineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id); + + expect(offlineAsset.isTrashed).toBe(true); + expect(offlineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`); + expect(offlineAsset.isOffline).toBe(true); + + utils.renameImageFile(`${testAssetDir}/temp/offline.png`, `${testAssetDir}/temp/offline/offline.png`); + + await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] }); + + { + const { status } = await request(app) + .post(`/libraries/${library.id}/scan`) + .set('Authorization', `Bearer ${admin.accessToken}`) + .send(); + expect(status).toBe(204); + } + + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + const stillOfflineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id); + + expect(stillOfflineAsset.isTrashed).toBe(true); + expect(stillOfflineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`); + expect(stillOfflineAsset.isOffline).toBe(true); + + { + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true }); + expect(assets.count).toBe(1); + } + }); }); describe('POST /libraries/:id/validate', () => { diff --git a/e2e/src/utils.ts b/e2e/src/utils.ts index 14225ff063038..b00c3c0b6d30d 100644 --- a/e2e/src/utils.ts +++ b/e2e/src/utils.ts @@ -10,6 +10,7 @@ import { Permission, PersonCreateDto, SharedLinkCreateDto, + UpdateLibraryDto, UserAdminCreateDto, UserPreferencesUpdateDto, ValidateLibraryDto, @@ -35,6 +36,7 @@ import { updateAlbumUser, updateAssets, updateConfig, + updateLibrary, updateMyPreferences, upsertTags, validate, @@ -42,7 +44,7 @@ import { import { BrowserContext } from '@playwright/test'; import { exec, spawn } from 'node:child_process'; import { createHash } from 'node:crypto'; -import { existsSync, mkdirSync, rmSync, writeFileSync } from 'node:fs'; +import { existsSync, mkdirSync, renameSync, rmSync, writeFileSync } from 'node:fs'; import { tmpdir } from 'node:os'; import path, { dirname } from 'node:path'; import { setTimeout as setAsyncTimeout } from 'node:timers/promises'; @@ -392,6 +394,14 @@ export const utils = { rmSync(path); }, + renameImageFile: (oldPath: string, newPath: string) => { + if (!existsSync(oldPath)) { + return; + } + + renameSync(oldPath, newPath); + }, + removeDirectory: (path: string) => { if (!existsSync(path)) { return; @@ -444,6 +454,9 @@ export const utils = { createLibrary: (accessToken: string, dto: CreateLibraryDto) => createLibrary({ createLibraryDto: dto }, { headers: asBearerAuth(accessToken) }), + updateLibrary: (accessToken: string, id: string, dto: UpdateLibraryDto) => + updateLibrary({ id, updateLibraryDto: dto }, { headers: asBearerAuth(accessToken) }), + validateLibrary: (accessToken: string, id: string, dto: ValidateLibraryDto) => validate({ id, validateLibraryDto: dto }, { headers: asBearerAuth(accessToken) }), diff --git a/server/src/interfaces/asset.interface.ts b/server/src/interfaces/asset.interface.ts index add4a18ae3631..b77f41a9e45b9 100644 --- a/server/src/interfaces/asset.interface.ts +++ b/server/src/interfaces/asset.interface.ts @@ -151,6 +151,7 @@ export const IAssetRepository = 'IAssetRepository'; export interface IAssetRepository { create(asset: AssetCreate): Promise; + createAll(assets: AssetCreate[]): Promise; getByIds( ids: string[], relations?: FindOptionsRelations, @@ -193,5 +194,6 @@ export interface IAssetRepository { getChangedDeltaSync(options: AssetDeltaSyncOptions): Promise; upsertFile(file: UpsertFileOptions): Promise; upsertFiles(files: UpsertFileOptions[]): Promise; - updateOffline(pagination: PaginationOptions, library: LibraryEntity): Paginated; + updateOffline(library: LibraryEntity): Promise; + getNewPaths(libraryId: string, paths: string[]): Promise; } diff --git a/server/src/interfaces/job.interface.ts b/server/src/interfaces/job.interface.ts index 1822a609c2ff8..c55b2aefe0bea 100644 --- a/server/src/interfaces/job.interface.ts +++ b/server/src/interfaces/job.interface.ts @@ -143,12 +143,18 @@ export interface IAssetDeleteJob extends IEntityJob { deleteOnDisk: boolean; } -export interface ILibraryFileJob extends IEntityJob { +export interface ILibraryFileJob { + libraryId: string; ownerId: string; assetPath: string; } -export interface IBulkEntityJob extends IBaseJob { +export interface ILibraryBulkIdsJob { + libraryId: string; + assetIds: string[]; +} + +export interface IBulkEntityJob { ids: string[]; } @@ -287,7 +293,7 @@ export type JobItem = | { name: JobName.LIBRARY_SYNC_FILE; data: ILibraryFileJob } | { name: JobName.LIBRARY_QUEUE_SYNC_FILES; data: IEntityJob } | { name: JobName.LIBRARY_QUEUE_SYNC_ASSETS; data: IEntityJob } - | { name: JobName.LIBRARY_SYNC_ASSETS; data: IBulkEntityJob } + | { name: JobName.LIBRARY_SYNC_ASSETS; data: ILibraryBulkIdsJob } | { name: JobName.LIBRARY_DELETE; data: IEntityJob } | { name: JobName.LIBRARY_QUEUE_SYNC_ALL; data?: IBaseJob } | { name: JobName.LIBRARY_QUEUE_CLEANUP; data: IBaseJob } diff --git a/server/src/interfaces/library.interface.ts b/server/src/interfaces/library.interface.ts index d8f1a1303116e..803cf1bc4ed31 100644 --- a/server/src/interfaces/library.interface.ts +++ b/server/src/interfaces/library.interface.ts @@ -1,8 +1,15 @@ +import { ADDED_IN_PREFIX } from 'src/constants'; import { LibraryStatsResponseDto } from 'src/dtos/library.dto'; import { LibraryEntity } from 'src/entities/library.entity'; export const ILibraryRepository = 'ILibraryRepository'; +export enum AssetSyncResult { + DO_NOTHING, + UPDATE, + OFFLINE, +} + export interface ILibraryRepository { getAll(withDeleted?: boolean): Promise; getAllDeleted(): Promise; diff --git a/server/src/repositories/asset.repository.ts b/server/src/repositories/asset.repository.ts index e9c54b90b843f..24ccb3ff17c0c 100644 --- a/server/src/repositories/asset.repository.ts +++ b/server/src/repositories/asset.repository.ts @@ -1,6 +1,5 @@ import { Injectable } from '@nestjs/common'; import { InjectRepository } from '@nestjs/typeorm'; -import picomatch from 'picomatch'; import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators'; import { AssetFileEntity } from 'src/entities/asset-files.entity'; import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity'; @@ -79,6 +78,10 @@ export class AssetRepository implements IAssetRepository { return this.repository.save(asset); } + createAll(assets: AssetCreate[]): Promise { + return this.repository.save(assets); + } + @GenerateSql({ params: [[DummyValue.UUID], { day: 1, month: 1 }] }) getByDayOfYear(ownerIds: string[], { day, month }: MonthDay): Promise { return this.repository @@ -716,39 +719,47 @@ export class AssetRepository implements IAssetRepository { await this.fileRepository.upsert(files, { conflictPaths: ['assetId', 'type'] }); } - updateOffline(pagination: PaginationOptions, library: LibraryEntity): Paginated { - return this.dataSource.manager.transaction(async (transactionalEntityManager) => - transactionalEntityManager.query( - ` - WITH updated_rows AS ( - UPDATE assets - SET "isOffline" = $1, "deletedAt" = $2 - WHERE "isOffline" = $3 - AND ( - "originalPath" NOT SIMILAR TO $4 - OR "originalPath" SIMILAR TO $5 - ) - RETURNING id - ) - SELECT * - FROM assets - WHERE id NOT IN (SELECT id FROM updated_rows) - AND "libraryId" = $6 - AND ($7 OR "deletedAt" IS NULL) - LIMIT $8 OFFSET $9; - `, - [ - true, // $1 - is_offline = true - new Date(), // $2 - deleted_at = current timestamp - false, // $3 - is_offline = false - library.importPaths.map((importPath) => `${importPath}%`).join('|'), // $4 - importPartMatcher pattern - library.exclusionPatterns.map(globToSqlPattern).join('|'), // $5 - exclusionPatternMatcher pattern - library.id, // $6 - libraryId matches job.id - true, // $7 - withDeleted flag - pagination.take, // $8 - LIMIT - pagination.skip, // $9 - OFFSET - ], - ), + updateOffline(library: LibraryEntity): Promise { + const paths = library.importPaths.map((importPath) => `${importPath}%`).join('|'); + const exclusions = library.exclusionPatterns.map((pattern) => globToSqlPattern(pattern)).join('|'); + return this.repository + .createQueryBuilder() + .update() + .set({ + isOffline: true, + deletedAt: new Date(), + }) + .where({ isOffline: false }) + .andWhere({ libraryId: library.id }) + .andWhere( + new Brackets((qb) => { + qb.where('originalPath NOT SIMILAR TO :paths', { + paths, + }).orWhere('originalPath SIMILAR TO :exclusions', { + exclusions, + }); + }), + ) + .execute(); + } + + async getNewPaths(libraryId: string, paths: string[]): Promise { + const rawSql = ` + WITH unnested_paths AS ( + SELECT unnest($1::text[]) AS path + ) + SELECT unnested_paths.path AS path + FROM unnested_paths + WHERE not exists( + SELECT 1 + FROM assets + WHERE "originalPath" = unnested_paths.path AND + "libraryId" = $2 ); + `; + + return this.repository + .query(rawSql, [paths, libraryId]) + .then((result) => result.map((row: { path: string }) => row.path)); } } diff --git a/server/src/services/library.service.ts b/server/src/services/library.service.ts index febcf2418643a..2c4d17ab84774 100644 --- a/server/src/services/library.service.ts +++ b/server/src/services/library.service.ts @@ -2,7 +2,6 @@ import { BadRequestException, Injectable } from '@nestjs/common'; import { R_OK } from 'node:constants'; import path, { basename, isAbsolute, parse } from 'node:path'; import picomatch from 'picomatch'; -import parseLib from 'picomatch/lib/parse'; import { StorageCore } from 'src/cores/storage.core'; import { OnEvent, OnJob } from 'src/decorators'; import { @@ -17,10 +16,11 @@ import { } from 'src/dtos/library.dto'; import { AssetEntity } from 'src/entities/asset.entity'; import { LibraryEntity } from 'src/entities/library.entity'; -import { AssetType, ImmichWorker } from 'src/enum'; +import { AssetStatus, AssetType, ImmichWorker } from 'src/enum'; import { DatabaseLock } from 'src/interfaces/database.interface'; import { ArgOf } from 'src/interfaces/event.interface'; import { JobName, JobOf, JOBS_LIBRARY_PAGINATION_SIZE, JobStatus, QueueName } from 'src/interfaces/job.interface'; +import { AssetSyncResult } from 'src/interfaces/library.interface'; import { BaseService } from 'src/services/base.service'; import { mimeTypes } from 'src/utils/mime-types'; import { handlePromiseError } from 'src/utils/misc'; @@ -99,6 +99,15 @@ export class LibraryService extends BaseService { let _resolve: () => void; const ready$ = new Promise((resolve) => (_resolve = resolve)); + const handler = async (event: string, path: string) => { + if (matcher(path)) { + this.logger.debug(`File ${event} event received for ${path} in library ${library.id}}`); + await this.syncFiles(library, [path]); + } else { + this.logger.verbose(`Ignoring file ${event} event for ${path} in library ${library.id}`); + } + }; + this.watchers[id] = this.storageRepository.watch( library.importPaths, { @@ -108,58 +117,13 @@ export class LibraryService extends BaseService { { onReady: () => _resolve(), onAdd: (path) => { - const handler = async () => { - this.logger.debug(`File add event received for ${path} in library ${library.id}}`); - if (matcher(path)) { - const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path); - if (asset) { - await this.jobRepository.queue({ - name: JobName.LIBRARY_SYNC_ASSETS, - data: { - ids: [asset.id], - }, - }); - } - if (matcher(path)) { - await this.syncFiles(library, [path]); - } - } - }; - return handlePromiseError(handler(), this.logger); + return handlePromiseError(handler('add', path), this.logger); }, onChange: (path) => { - const handler = async () => { - this.logger.debug(`Detected file change for ${path} in library ${library.id}`); - const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path); - if (asset) { - await this.jobRepository.queue({ - name: JobName.LIBRARY_SYNC_ASSETS, - data: { - ids: [asset.id], - }, - }); - } - if (matcher(path)) { - // Note: if the changed file was not previously imported, it will be imported now. - await this.syncFiles(library, [path]); - } - }; - return handlePromiseError(handler(), this.logger); + return handlePromiseError(handler('change', path), this.logger); }, onUnlink: (path) => { - const handler = async () => { - this.logger.debug(`Detected deleted file at ${path} in library ${library.id}`); - const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path); - if (asset) { - await this.jobRepository.queue({ - name: JobName.LIBRARY_SYNC_ASSETS, - data: { - ids: [asset.id], - }, - }); - } - }; - return handlePromiseError(handler(), this.logger); + return handlePromiseError(handler('delete', path), this.logger); }, onError: (error) => { this.logger.error(`Library watcher for library ${library.id} encountered error: ${error}`); @@ -249,7 +213,7 @@ export class LibraryService extends BaseService { assetPaths.map((assetPath) => ({ name: JobName.LIBRARY_SYNC_FILE, data: { - id, + libraryId: id, assetPath, ownerId, }, @@ -370,92 +334,80 @@ export class LibraryService extends BaseService { @OnJob({ name: JobName.LIBRARY_SYNC_FILE, queue: QueueName.LIBRARY }) async handleSyncFile(job: JobOf): Promise { - // Only needs to handle new assets + /* For performance reasons, we don't check if the asset is already imported. + This is instead handled by a previous step in the scan process. + In the edge case of an asset being imported between that check + and this function call, the database constraint will prevent duplicates. + */ + const assetPath = path.normalize(job.assetPath); - let asset = await this.assetRepository.getByLibraryIdAndOriginalPath(job.id, assetPath); + // TODO: we can replace this get call with an exists call + /* let asset = await this.assetRepository.getByLibraryIdAndOriginalPath(job.libraryId, assetPath); if (asset) { - return JobStatus.SKIPPED; - } + return await this.handleSyncAssets({ libraryId: job.libraryId, assetIds: [asset.id] }); + } */ - let stat; - try { - stat = await this.storageRepository.stat(assetPath); - } catch (error: any) { - if (error.code === 'ENOENT') { - this.logger.error(`File not found: ${assetPath}`); - return JobStatus.SKIPPED; - } - this.logger.error(`Error reading file: ${assetPath}. Error: ${error}`); - return JobStatus.FAILED; - } - - this.logger.log(`Importing new library asset: ${assetPath}`); - - const library = await this.libraryRepository.get(job.id, true); - if (!library || library.deletedAt) { - this.logger.error('Cannot import asset into deleted library'); - return JobStatus.FAILED; - } + this.logger.log(`Importing new asset ${assetPath} into library ${job.libraryId}`); // TODO: device asset id is deprecated, remove it const deviceAssetId = `${basename(assetPath)}`.replaceAll(/\s+/g, ''); const pathHash = this.cryptoRepository.hashSha1(`path:${assetPath}`); - // TODO: doesn't xmp replace the file extension? Will need investigation - let sidecarPath: string | null = null; - if (await this.storageRepository.checkFileExists(`${assetPath}.xmp`, R_OK)) { - sidecarPath = `${assetPath}.xmp`; - } - const assetType = mimeTypes.isVideo(assetPath) ? AssetType.VIDEO : AssetType.IMAGE; - const mtime = stat.mtime; + const now = new Date(); - asset = await this.assetRepository.create({ + const asset = await this.assetRepository.create({ ownerId: job.ownerId, - libraryId: job.id, + libraryId: job.libraryId, checksum: pathHash, originalPath: assetPath, deviceAssetId, deviceId: 'Library Import', - fileCreatedAt: mtime, - fileModifiedAt: mtime, - localDateTime: mtime, + fileCreatedAt: now, + fileModifiedAt: now, + localDateTime: now, type: assetType, originalFileName: parse(assetPath).base, - - sidecarPath, isExternal: true, }); - await this.queuePostSyncJobs(asset); + this.logger.debug(`Queueing metadata extraction for: ${asset.originalPath}`); + + await this.queuePostSyncJobs([asset.id]); return JobStatus.SUCCESS; } - async queuePostSyncJobs(asset: AssetEntity) { - this.logger.debug(`Queueing metadata extraction for: ${asset.originalPath}`); - - await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: { id: asset.id, source: 'upload' } }); + async queuePostSyncJobs(assetIds: string[]) { + await this.jobRepository.queueAll( + assetIds.map((assetId) => ({ + name: JobName.METADATA_EXTRACTION, + data: { id: assetId, source: 'upload' }, + })), + ); } async queueScan(id: string) { await this.findOrFail(id); + this.logger.log(`Starting to scan library ${id}`); + await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_FILES, data: { id, }, }); + await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, data: { id } }); } @OnJob({ name: JobName.LIBRARY_QUEUE_SYNC_ALL, queue: QueueName.LIBRARY }) async handleQueueSyncAll(): Promise { - this.logger.debug(`Refreshing all external libraries`); + this.logger.log(`Initiating scan of all external libraries`); await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_CLEANUP, data: {} }); @@ -481,52 +433,110 @@ export class LibraryService extends BaseService { @OnJob({ name: JobName.LIBRARY_SYNC_ASSETS, queue: QueueName.LIBRARY }) async handleSyncAssets(job: JobOf): Promise { - for (const id of job.ids) { - await this.handleSyncAsset(id); + const assets = await this.assetRepository.getByIds(job.assetIds); + + const assetIdsToOffline: string[] = []; + const assetIdsToUpdate: string[] = []; + + for (const asset of assets) { + const action = await this.handleSyncAsset(asset); + switch (action) { + case AssetSyncResult.OFFLINE: + assetIdsToOffline.push(asset.id); + break; + case AssetSyncResult.UPDATE: + assetIdsToUpdate.push(asset.id); + break; + } + } + + if (assetIdsToOffline.length) { + await this.assetRepository.updateAll(assetIdsToOffline, { isOffline: true, deletedAt: new Date() }); + this.logger.log( + `Originals are missing for ${assetIdsToOffline.length} asset(s) in library ${job.libraryId}, marked offline`, + ); + } + + if (assetIdsToUpdate.length) { + //TODO: When we have asset status, we need to leave deletedAt as is when status is trashed + await this.assetRepository.updateAll(assetIdsToUpdate, { + isOffline: false, + deletedAt: null, + }); + + this.logger.log( + `Found ${assetIdsToOffline.length} asset(s) with modified files for library ${job.libraryId}, queuing refresh...`, + ); + + await this.queuePostSyncJobs(assetIdsToUpdate); + } + + const remainingCount = assets.length - assetIdsToOffline.length - assetIdsToUpdate.length; + if (remainingCount > 0) { + this.logger.log(`${remainingCount} asset(s) are unchanged in library ${job.libraryId}, no action required`); } return JobStatus.SUCCESS; } - private async handleSyncAsset(id: string): Promise { - const asset = await this.assetRepository.getById(id); + private async checkOfflineAsset(asset: AssetEntity) { + if (!asset.libraryId) { + return false; + } + + const library = await this.libraryRepository.get(asset.libraryId); + if (!library) { + return false; + } + + const isInImportPath = library.importPaths.find((path) => asset.originalPath.startsWith(path)); + if (!isInImportPath) { + return false; + } + + const isExcluded = library.exclusionPatterns.some((pattern) => picomatch.isMatch(asset.originalPath, pattern)); + if (isExcluded) { + return false; + } + + return true; + } + + private async handleSyncAsset(asset: AssetEntity): Promise { if (!asset) { - return JobStatus.SKIPPED; + return AssetSyncResult.DO_NOTHING; } let stat; try { stat = await this.storageRepository.stat(asset.originalPath); } catch { - await (async (explanation: string) => { - if (!asset.isOffline) { - this.logger.debug(`${explanation}, moving to trash: ${asset.originalPath}`); - await this.assetRepository.updateAll([asset.id], { isOffline: true, deletedAt: new Date() }); - } - })('Asset is no longer on disk or is inaccessible because of permissions'); - return JobStatus.SUCCESS; + if (asset.isOffline) { + return AssetSyncResult.DO_NOTHING; + } + + this.logger.debug( + `Asset is no longer on disk or is inaccessible because of permissions, moving to trash: ${asset.originalPath}`, + ); + return AssetSyncResult.OFFLINE; } const mtime = stat.mtime; const isAssetModified = mtime.toISOString() !== asset.fileModifiedAt.toISOString(); + let shouldAssetGoOnline = false; - if (asset.isOffline || isAssetModified) { - this.logger.debug(`Asset was offline or modified, updating asset record ${asset.originalPath}`); - //TODO: When we have asset status, we need to leave deletedAt as is when status is trashed - await this.assetRepository.updateAll([asset.id], { - isOffline: false, - deletedAt: null, - fileCreatedAt: mtime, - fileModifiedAt: mtime, - originalFileName: parse(asset.originalPath).base, - }); + if (asset.isOffline && asset.status != AssetStatus.DELETED) { + // Only perform the expensive check if the asset is offline + shouldAssetGoOnline = await this.checkOfflineAsset(asset); } - if (isAssetModified) { - this.logger.debug(`Asset was modified, queuing metadata extraction for: ${asset.originalPath}`); - await this.queuePostSyncJobs(asset); + if (shouldAssetGoOnline || isAssetModified) { + this.logger.debug(`Asset was offline or modified, updating asset record ${asset.originalPath}`); + + return AssetSyncResult.UPDATE; } - return JobStatus.SUCCESS; + + return AssetSyncResult.DO_NOTHING; } @OnJob({ name: JobName.LIBRARY_QUEUE_SYNC_FILES, queue: QueueName.LIBRARY }) @@ -537,7 +547,7 @@ export class LibraryService extends BaseService { return JobStatus.SKIPPED; } - this.logger.log(`Refreshing library ${library.id} for new assets`); + this.logger.log(`Crawling import paths for library ${library.id}...`); const validImportPaths: string[] = []; @@ -552,28 +562,53 @@ export class LibraryService extends BaseService { if (validImportPaths.length === 0) { this.logger.warn(`No valid import paths found for library ${library.id}`); + + return JobStatus.SKIPPED; } - const assetsOnDisk = this.storageRepository.walk({ + const pathsOnDisk = this.storageRepository.walk({ pathsToCrawl: validImportPaths, includeHidden: false, exclusionPatterns: library.exclusionPatterns, take: JOBS_LIBRARY_PAGINATION_SIZE, }); - let count = 0; + let importCount = 0; + let crawlCount = 0; - for await (const assetBatch of assetsOnDisk) { - count += assetBatch.length; - this.logger.debug(`Discovered ${count} asset(s) on disk for library ${library.id}...`); - await this.syncFiles(library, assetBatch); - this.logger.verbose(`Queued scan of ${assetBatch.length} crawled asset(s) in library ${library.id}...`); + this.logger.log(`Starting crawl of filesystem for ${library.id}...`); + + for await (const pathBatch of pathsOnDisk) { + crawlCount += pathBatch.length; + this.logger.log( + `Crawled ${pathBatch.length} file(s) for library ${library.id}, in total ${crawlCount} file(s) crawled so far`, + ); + const newPaths = await this.assetRepository.getNewPaths(library.id, pathBatch); + if (newPaths.length > 0) { + importCount += newPaths.length; + await this.syncFiles(library, newPaths); + if (newPaths.length < pathBatch.length) { + this.logger.debug( + `Current crawl batch: ${newPaths.length} of ${pathBatch.length} file(s) are new, queued import for library ${library.id}...`, + ); + } else { + this.logger.debug( + `Current crawl batch: ${newPaths.length} new file(s), queued import for library ${library.id}...`, + ); + } + } else { + this.logger.debug(`Current crawl batch: ${pathBatch.length} asset(s) already in library ${library.id}`); + } } - if (count > 0) { - this.logger.debug(`Finished queueing scan of ${count} assets on disk for library ${library.id}`); - } else if (validImportPaths.length > 0) { - this.logger.debug(`No non-excluded assets found in any import path for library ${library.id}`); + if (importCount > 0 && importCount === crawlCount) { + this.logger.log(`Finished crawling and queueing ${crawlCount} file(s) for import for library ${library.id}`); + } else if (importCount > 0) { + this.logger.log( + `Finished crawling ${crawlCount} file(s) of which ${importCount} are queued for import for library ${library.id}`, + ); + } else { + this.logger.debug(`Finished crawling, no files found for library ${library.id}`); } await this.libraryRepository.update({ id: job.id, refreshedAt: new Date() }); @@ -588,33 +623,58 @@ export class LibraryService extends BaseService { return JobStatus.SKIPPED; } - this.logger.log(`Checking assets in library ${library.id} against import path and exclusion patterns`); + const assetCount = (await this.getStatistics(library.id)).total; - const onlineAssets = usePagination(JOBS_LIBRARY_PAGINATION_SIZE, (pagination) => - this.assetRepository.updateOffline(pagination, library), + this.logger.log( + `Scanning library ${library.id} for assets outside of import paths and/or matching an exclusion pattern...`, ); + const offlineResult = await this.assetRepository.updateOffline(library); - this.logger.log(`Scanning library ${library.id} for removed assets`); + const affectedAssetCount = offlineResult.affected; + if (affectedAssetCount === undefined) { + this.logger.error(`Unknown error occurred when updating offline status in ${library.id}`); + return JobStatus.FAILED; + } - let assetCount = 0; - for await (const assets of onlineAssets) { - if (!assets) { - console.log('No assets found'); - } else { - console.log(assets[0]); - assetCount += assets.length; - this.logger.debug(`Discovered ${assetCount} asset(s) in library ${library.id}...`); + if (affectedAssetCount === assetCount) { + this.logger.log( + `All ${assetCount} asset(s) in ${library.id} are outside of import paths and/or match an exclusion pattern, marked as offline`, + ); + } else if (affectedAssetCount !== assetCount && affectedAssetCount > 0) { + this.logger.log( + `${offlineResult.affected} asset(s) out of ${assetCount} were marked offline due to import paths and/or exclusion patterns for library ${library.id}`, + ); + } else { + this.logger.log( + `All ${assetCount} asset(s) in library ${library.id} were in an import path and none matched an exclusion pattern`, + ); + } - for (const asset of assets) { - await this.handleSyncAsset(asset.id); - } + this.logger.log(`Scanning library ${library.id} for assets missing from disk...`); - this.logger.debug(`Checked ${assets.length} asset(s) in library ${library.id}...`); - } + const existingAssets = usePagination(JOBS_LIBRARY_PAGINATION_SIZE, (pagination) => + this.assetRepository.getAll(pagination, { libraryId: job.id, withDeleted: true }), + ); + + let currentAssetCount = 0; + for await (const assets of existingAssets) { + currentAssetCount += assets.length; + + await this.jobRepository.queue({ + name: JobName.LIBRARY_SYNC_ASSETS, + data: { + libraryId: library.id, + assetIds: assets.map((asset) => asset.id), + }, + }); + + this.logger.log( + `Queued check of ${assets.length} existing asset(s) in library ${library.id}, ${currentAssetCount} of ${assetCount} queued in total`, + ); } - if (assetCount) { - this.logger.log(`Finished check of ${assetCount} assets for library ${library.id}`); + if (currentAssetCount) { + this.logger.log(`Finished queuing ${currentAssetCount} file checks for library ${library.id}`); } return JobStatus.SUCCESS; diff --git a/server/src/utils/asset.util.ts b/server/src/utils/asset.util.ts index 02e1ced7bae7e..f8bed5485f8b1 100644 --- a/server/src/utils/asset.util.ts +++ b/server/src/utils/asset.util.ts @@ -1,5 +1,4 @@ import { BadRequestException } from '@nestjs/common'; -import picomatch from 'picomatch'; import { StorageCore } from 'src/cores/storage.core'; import { BulkIdErrorReason, BulkIdResponseDto } from 'src/dtos/asset-ids.response.dto'; import { UploadFieldName } from 'src/dtos/asset-media.dto'; diff --git a/server/src/utils/misc.spec.ts b/server/src/utils/misc.spec.ts index 8ede66df4059e..87ab6d4399bbf 100644 --- a/server/src/utils/misc.spec.ts +++ b/server/src/utils/misc.spec.ts @@ -59,7 +59,7 @@ describe('globToSqlPattern', () => { ['**/*.tif', '%/%.tif'], ['**/*.jp?', '%/%.jp_'], ['**/@eaDir/**', '%/@eaDir/%'], - ['**/._*', '%/.\\_%'], + ['**/._*', `%/.\\_%`], ]; it.each(testCases)('should convert %s to %s', (input, expected) => { diff --git a/server/src/utils/misc.ts b/server/src/utils/misc.ts index f2ce76b8760df..3543cf20b02f5 100644 --- a/server/src/utils/misc.ts +++ b/server/src/utils/misc.ts @@ -266,39 +266,49 @@ export const useSwagger = (app: INestApplication, { write }: { write: boolean }) } }; -export const globToSqlPattern = (glob: string) => { - const tokens = picomatch.parse(glob).tokens; +const convertTokenToSqlPattern = (token: any): string => { + if (typeof token === 'string') { + return token; + } - const convertTokenToSqlPattern = (token: any): string => { - if (typeof token === 'string') { - return token; + switch (token.type) { + case 'slash': { + return '/'; } - - switch (token.type) { - case 'slash': - return '/'; - case 'text': - return token.value; - case 'globstar': - case 'star': - return '%'; - case 'underscore': - return '\\_'; - case 'qmark': - return '_'; - case 'dot': - return '.'; - case 'bracket': - return `[${token.value}]`; - case 'negate': - return `[^${token.value}]`; - case 'brace': - const options = token.value.split(','); - return `(${options.join('|')})`; - default: - return ''; + case 'text': { + return token.value; } - }; + case 'globstar': + case 'star': { + return '%'; + } + case 'underscore': { + return String.raw`\_`; + } + case 'qmark': { + return '_'; + } + case 'dot': { + return '.'; + } + case 'bracket': { + return `[${token.value}]`; + } + case 'negate': { + return `[^${token.value}]`; + } + case 'brace': { + const options = token.value.split(','); + return `(${options.join('|')})`; + } + default: { + return ''; + } + } +}; + +export const globToSqlPattern = (glob: string) => { + const tokens = picomatch.parse(glob).tokens; let result = ''; for (const token of tokens) { diff --git a/server/test/repositories/asset.repository.mock.ts b/server/test/repositories/asset.repository.mock.ts index 928a7956c5f0c..07020c48c2c7e 100644 --- a/server/test/repositories/asset.repository.mock.ts +++ b/server/test/repositories/asset.repository.mock.ts @@ -38,5 +38,7 @@ export const newAssetRepositoryMock = (): Mocked => { getDuplicates: vitest.fn(), upsertFile: vitest.fn(), upsertFiles: vitest.fn(), + updateOffline: vitest.fn(), + getNewPaths: vitest.fn(), }; }; From 3d7b9248d69d1c732e36028383ebc4e07d65780f Mon Sep 17 00:00:00 2001 From: Jonathan Jogenfors Date: Fri, 13 Dec 2024 01:47:34 +0100 Subject: [PATCH 03/10] wip batch imports --- e2e/test-assets | 2 +- server/src/interfaces/job.interface.ts | 8 +- server/src/services/library.service.spec.ts | 6 +- server/src/services/library.service.ts | 91 +++++++++------------ server/src/services/metadata.service.ts | 6 +- 5 files changed, 53 insertions(+), 60 deletions(-) diff --git a/e2e/test-assets b/e2e/test-assets index 99544a200412d..c4a0575c3e89a 160000 --- a/e2e/test-assets +++ b/e2e/test-assets @@ -1 +1 @@ -Subproject commit 99544a200412d553103cc7b8f1a28f339c7cffd9 +Subproject commit c4a0575c3e89a755b951ae6d91e7307cd34c606f diff --git a/server/src/interfaces/job.interface.ts b/server/src/interfaces/job.interface.ts index c55b2aefe0bea..424691f087e09 100644 --- a/server/src/interfaces/job.interface.ts +++ b/server/src/interfaces/job.interface.ts @@ -84,7 +84,7 @@ export enum JobName { // library management LIBRARY_QUEUE_SYNC_FILES = 'library-queue-sync-files', LIBRARY_QUEUE_SYNC_ASSETS = 'library-queue-sync-assets', - LIBRARY_SYNC_FILE = 'library-sync-file', + LIBRARY_SYNC_FILES = 'library-sync-files', LIBRARY_SYNC_ASSETS = 'library-sync-assets', LIBRARY_DELETE = 'library-delete', LIBRARY_QUEUE_SYNC_ALL = 'library-queue-sync-all', @@ -135,7 +135,7 @@ export interface IDelayedJob extends IBaseJob { export interface IEntityJob extends IBaseJob { id: string; - source?: 'upload' | 'sidecar-write' | 'copy'; + source?: 'upload' | 'library-import' | 'sidecar-write' | 'copy'; notify?: boolean; } @@ -146,7 +146,7 @@ export interface IAssetDeleteJob extends IEntityJob { export interface ILibraryFileJob { libraryId: string; ownerId: string; - assetPath: string; + assetPaths: string[]; } export interface ILibraryBulkIdsJob { @@ -290,7 +290,7 @@ export type JobItem = | { name: JobName.ASSET_DELETION_CHECK; data?: IBaseJob } // Library Management - | { name: JobName.LIBRARY_SYNC_FILE; data: ILibraryFileJob } + | { name: JobName.LIBRARY_SYNC_FILES; data: ILibraryFileJob } | { name: JobName.LIBRARY_QUEUE_SYNC_FILES; data: IEntityJob } | { name: JobName.LIBRARY_QUEUE_SYNC_ASSETS; data: IEntityJob } | { name: JobName.LIBRARY_SYNC_ASSETS; data: ILibraryBulkIdsJob } diff --git a/server/src/services/library.service.spec.ts b/server/src/services/library.service.spec.ts index 13507f64759b6..6441634d43b06 100644 --- a/server/src/services/library.service.spec.ts +++ b/server/src/services/library.service.spec.ts @@ -179,7 +179,7 @@ describe(LibraryService.name, () => { expect(jobMock.queueAll).toHaveBeenCalledWith([ { - name: JobName.LIBRARY_SYNC_FILE, + name: JobName.LIBRARY_SYNC_FILES, data: { id: libraryStub.externalLibrary1.id, ownerId: libraryStub.externalLibrary1.owner.id, @@ -960,7 +960,7 @@ describe(LibraryService.name, () => { expect(jobMock.queueAll).toHaveBeenCalledWith([ { - name: JobName.LIBRARY_SYNC_FILE, + name: JobName.LIBRARY_SYNC_FILES, data: { id: libraryStub.externalLibraryWithImportPaths1.id, assetPath: '/foo/photo.jpg', @@ -985,7 +985,7 @@ describe(LibraryService.name, () => { expect(jobMock.queueAll).toHaveBeenCalledWith([ { - name: JobName.LIBRARY_SYNC_FILE, + name: JobName.LIBRARY_SYNC_FILES, data: { id: libraryStub.externalLibraryWithImportPaths1.id, assetPath: '/foo/photo.jpg', diff --git a/server/src/services/library.service.ts b/server/src/services/library.service.ts index 2c4d17ab84774..7bbf02540f883 100644 --- a/server/src/services/library.service.ts +++ b/server/src/services/library.service.ts @@ -17,6 +17,7 @@ import { import { AssetEntity } from 'src/entities/asset.entity'; import { LibraryEntity } from 'src/entities/library.entity'; import { AssetStatus, AssetType, ImmichWorker } from 'src/enum'; +import { AssetCreate } from 'src/interfaces/asset.interface'; import { DatabaseLock } from 'src/interfaces/database.interface'; import { ArgOf } from 'src/interfaces/event.interface'; import { JobName, JobOf, JOBS_LIBRARY_PAGINATION_SIZE, JobStatus, QueueName } from 'src/interfaces/job.interface'; @@ -102,7 +103,10 @@ export class LibraryService extends BaseService { const handler = async (event: string, path: string) => { if (matcher(path)) { this.logger.debug(`File ${event} event received for ${path} in library ${library.id}}`); - await this.syncFiles(library, [path]); + await this.jobRepository.queue({ + name: JobName.LIBRARY_SYNC_FILES, + data: { libraryId: library.id, ownerId: library.ownerId, assetPaths: [path] }, + }); } else { this.logger.verbose(`Ignoring file ${event} event for ${path} in library ${library.id}`); } @@ -208,17 +212,23 @@ export class LibraryService extends BaseService { return mapLibrary(library); } - private async syncFiles({ id, ownerId }: LibraryEntity, assetPaths: string[]) { - await this.jobRepository.queueAll( - assetPaths.map((assetPath) => ({ - name: JobName.LIBRARY_SYNC_FILE, - data: { - libraryId: id, - assetPath, - ownerId, - }, - })), - ); + @OnJob({ name: JobName.LIBRARY_SYNC_FILES, queue: QueueName.LIBRARY }) + async handleSyncFiles(job: JobOf): Promise { + const assetImports = job.assetPaths.map((assetPath) => this.processEntity(assetPath, job.ownerId, job.libraryId)); + + const assetIds: string[] = []; + const batchSize = 1000; // Adjust the batch size as needed + for (let i = 0; i < assetImports.length; i += batchSize) { + const batch = assetImports.slice(i, i + batchSize); + const batchIds = await this.assetRepository.createAll(batch).then((assets) => assets.map((asset) => asset.id)); + assetIds.push(...batchIds); + } + + this.logger.log(`Imported ${assetIds.length} asset(s) for library ${job.libraryId}`); + + await this.queuePostSyncJobs(assetIds); + + return JobStatus.SUCCESS; } private async validateImportPath(importPath: string): Promise { @@ -332,60 +342,34 @@ export class LibraryService extends BaseService { return JobStatus.SUCCESS; } - @OnJob({ name: JobName.LIBRARY_SYNC_FILE, queue: QueueName.LIBRARY }) - async handleSyncFile(job: JobOf): Promise { - /* For performance reasons, we don't check if the asset is already imported. - This is instead handled by a previous step in the scan process. - In the edge case of an asset being imported between that check - and this function call, the database constraint will prevent duplicates. - */ - - const assetPath = path.normalize(job.assetPath); - - // TODO: we can replace this get call with an exists call - /* let asset = await this.assetRepository.getByLibraryIdAndOriginalPath(job.libraryId, assetPath); - if (asset) { - return await this.handleSyncAssets({ libraryId: job.libraryId, assetIds: [asset.id] }); - } */ - - this.logger.log(`Importing new asset ${assetPath} into library ${job.libraryId}`); - - // TODO: device asset id is deprecated, remove it - const deviceAssetId = `${basename(assetPath)}`.replaceAll(/\s+/g, ''); - - const pathHash = this.cryptoRepository.hashSha1(`path:${assetPath}`); - - const assetType = mimeTypes.isVideo(assetPath) ? AssetType.VIDEO : AssetType.IMAGE; + private processEntity(filePath: string, ownerId: string, libraryId: string): AssetCreate { + const assetPath = path.normalize(filePath); const now = new Date(); - const asset = await this.assetRepository.create({ - ownerId: job.ownerId, - libraryId: job.libraryId, - checksum: pathHash, + return { + ownerId: ownerId, + libraryId: libraryId, + checksum: this.cryptoRepository.hashSha1(`path:${assetPath}`), originalPath: assetPath, - deviceAssetId, + + // TODO: device asset id is deprecated, remove it + deviceAssetId: `${basename(assetPath)}`.replaceAll(/\s+/g, ''), deviceId: 'Library Import', fileCreatedAt: now, fileModifiedAt: now, localDateTime: now, - type: assetType, + type: mimeTypes.isVideo(assetPath) ? AssetType.VIDEO : AssetType.IMAGE, originalFileName: parse(assetPath).base, isExternal: true, - }); - - this.logger.debug(`Queueing metadata extraction for: ${asset.originalPath}`); - - await this.queuePostSyncJobs([asset.id]); - - return JobStatus.SUCCESS; + }; } async queuePostSyncJobs(assetIds: string[]) { await this.jobRepository.queueAll( assetIds.map((assetId) => ({ name: JobName.METADATA_EXTRACTION, - data: { id: assetId, source: 'upload' }, + data: { id: assetId, source: 'library-import' }, })), ); } @@ -586,7 +570,12 @@ export class LibraryService extends BaseService { const newPaths = await this.assetRepository.getNewPaths(library.id, pathBatch); if (newPaths.length > 0) { importCount += newPaths.length; - await this.syncFiles(library, newPaths); + + await this.jobRepository.queue({ + name: JobName.LIBRARY_SYNC_FILES, + data: { libraryId: library.id, ownerId: library.ownerId, assetPaths: newPaths }, + }); + if (newPaths.length < pathBatch.length) { this.logger.debug( `Current crawl batch: ${newPaths.length} of ${pathBatch.length} file(s) are new, queued import for library ${library.id}...`, diff --git a/server/src/services/metadata.service.ts b/server/src/services/metadata.service.ts index 79a7d519d601e..14dae28da0ccc 100644 --- a/server/src/services/metadata.service.ts +++ b/server/src/services/metadata.service.ts @@ -148,13 +148,17 @@ export class MetadataService extends BaseService { } @OnJob({ name: JobName.METADATA_EXTRACTION, queue: QueueName.METADATA_EXTRACTION }) - async handleMetadataExtraction({ id }: JobOf): Promise { + async handleMetadataExtraction({ id, source }: JobOf): Promise { const { metadata, reverseGeocoding } = await this.getConfig({ withCache: true }); const [asset] = await this.assetRepository.getByIds([id], { faces: { person: false } }); if (!asset) { return JobStatus.FAILED; } + if (source === 'library-import') { + await this.processSidecar(id, false); + } + const stats = await this.storageRepository.stat(asset.originalPath); const exifTags = await this.getExifTags(asset); From 1df1b85aa8e23a54f1d93514611f66bd0f8d169e Mon Sep 17 00:00:00 2001 From: Jonathan Jogenfors Date: Wed, 18 Dec 2024 11:36:15 +0100 Subject: [PATCH 04/10] asset count instead of statistics --- mobile/openapi/README.md | 3 +- mobile/openapi/lib/api.dart | 1 - mobile/openapi/lib/api/libraries_api.dart | 24 ++-- mobile/openapi/lib/api_client.dart | 2 - .../lib/model/library_stats_response_dto.dart | 123 ------------------ open-api/immich-openapi-specs.json | 58 +++------ open-api/typescript-sdk/src/fetch-client.ts | 26 ++-- server/src/controllers/library.controller.ts | 6 +- server/src/interfaces/asset.interface.ts | 2 +- server/src/repositories/asset.repository.ts | 2 +- server/src/services/asset.service.ts | 7 +- server/src/services/job.service.ts | 2 +- server/src/services/library.service.ts | 23 ++-- server/src/services/trash.service.ts | 2 +- .../admin/library-management/+page.svelte | 40 ++---- 15 files changed, 73 insertions(+), 248 deletions(-) delete mode 100644 mobile/openapi/lib/model/library_stats_response_dto.dart diff --git a/mobile/openapi/README.md b/mobile/openapi/README.md index b336b1bfb6f40..e03f4dac77564 100644 --- a/mobile/openapi/README.md +++ b/mobile/openapi/README.md @@ -130,8 +130,8 @@ Class | Method | HTTP request | Description *LibrariesApi* | [**createLibrary**](doc//LibrariesApi.md#createlibrary) | **POST** /libraries | *LibrariesApi* | [**deleteLibrary**](doc//LibrariesApi.md#deletelibrary) | **DELETE** /libraries/{id} | *LibrariesApi* | [**getAllLibraries**](doc//LibrariesApi.md#getalllibraries) | **GET** /libraries | +*LibrariesApi* | [**getAssetCount**](doc//LibrariesApi.md#getassetcount) | **GET** /libraries/{id}/count | *LibrariesApi* | [**getLibrary**](doc//LibrariesApi.md#getlibrary) | **GET** /libraries/{id} | -*LibrariesApi* | [**getLibraryStatistics**](doc//LibrariesApi.md#getlibrarystatistics) | **GET** /libraries/{id}/statistics | *LibrariesApi* | [**scanLibrary**](doc//LibrariesApi.md#scanlibrary) | **POST** /libraries/{id}/scan | *LibrariesApi* | [**updateLibrary**](doc//LibrariesApi.md#updatelibrary) | **PUT** /libraries/{id} | *LibrariesApi* | [**validate**](doc//LibrariesApi.md#validate) | **POST** /libraries/{id}/validate | @@ -337,7 +337,6 @@ Class | Method | HTTP request | Description - [JobSettingsDto](doc//JobSettingsDto.md) - [JobStatusDto](doc//JobStatusDto.md) - [LibraryResponseDto](doc//LibraryResponseDto.md) - - [LibraryStatsResponseDto](doc//LibraryStatsResponseDto.md) - [LicenseKeyDto](doc//LicenseKeyDto.md) - [LicenseResponseDto](doc//LicenseResponseDto.md) - [LogLevel](doc//LogLevel.md) diff --git a/mobile/openapi/lib/api.dart b/mobile/openapi/lib/api.dart index 73eb02d89ed7a..3fccede06eb50 100644 --- a/mobile/openapi/lib/api.dart +++ b/mobile/openapi/lib/api.dart @@ -150,7 +150,6 @@ part 'model/job_name.dart'; part 'model/job_settings_dto.dart'; part 'model/job_status_dto.dart'; part 'model/library_response_dto.dart'; -part 'model/library_stats_response_dto.dart'; part 'model/license_key_dto.dart'; part 'model/license_response_dto.dart'; part 'model/log_level.dart'; diff --git a/mobile/openapi/lib/api/libraries_api.dart b/mobile/openapi/lib/api/libraries_api.dart index 36d98d9a88a78..6010b7a9fcf56 100644 --- a/mobile/openapi/lib/api/libraries_api.dart +++ b/mobile/openapi/lib/api/libraries_api.dart @@ -147,13 +147,13 @@ class LibrariesApi { return null; } - /// Performs an HTTP 'GET /libraries/{id}' operation and returns the [Response]. + /// Performs an HTTP 'GET /libraries/{id}/count' operation and returns the [Response]. /// Parameters: /// /// * [String] id (required): - Future getLibraryWithHttpInfo(String id,) async { + Future getAssetCountWithHttpInfo(String id,) async { // ignore: prefer_const_declarations - final path = r'/libraries/{id}' + final path = r'/libraries/{id}/count' .replaceAll('{id}', id); // ignore: prefer_final_locals @@ -180,8 +180,8 @@ class LibrariesApi { /// Parameters: /// /// * [String] id (required): - Future getLibrary(String id,) async { - final response = await getLibraryWithHttpInfo(id,); + Future getAssetCount(String id,) async { + final response = await getAssetCountWithHttpInfo(id,); if (response.statusCode >= HttpStatus.badRequest) { throw ApiException(response.statusCode, await _decodeBodyBytes(response)); } @@ -189,19 +189,19 @@ class LibrariesApi { // At the time of writing this, `dart:convert` will throw an "Unexpected end of input" // FormatException when trying to decode an empty string. if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) { - return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'LibraryResponseDto',) as LibraryResponseDto; + return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'num',) as num; } return null; } - /// Performs an HTTP 'GET /libraries/{id}/statistics' operation and returns the [Response]. + /// Performs an HTTP 'GET /libraries/{id}' operation and returns the [Response]. /// Parameters: /// /// * [String] id (required): - Future getLibraryStatisticsWithHttpInfo(String id,) async { + Future getLibraryWithHttpInfo(String id,) async { // ignore: prefer_const_declarations - final path = r'/libraries/{id}/statistics' + final path = r'/libraries/{id}' .replaceAll('{id}', id); // ignore: prefer_final_locals @@ -228,8 +228,8 @@ class LibrariesApi { /// Parameters: /// /// * [String] id (required): - Future getLibraryStatistics(String id,) async { - final response = await getLibraryStatisticsWithHttpInfo(id,); + Future getLibrary(String id,) async { + final response = await getLibraryWithHttpInfo(id,); if (response.statusCode >= HttpStatus.badRequest) { throw ApiException(response.statusCode, await _decodeBodyBytes(response)); } @@ -237,7 +237,7 @@ class LibrariesApi { // At the time of writing this, `dart:convert` will throw an "Unexpected end of input" // FormatException when trying to decode an empty string. if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) { - return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'LibraryStatsResponseDto',) as LibraryStatsResponseDto; + return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'LibraryResponseDto',) as LibraryResponseDto; } return null; diff --git a/mobile/openapi/lib/api_client.dart b/mobile/openapi/lib/api_client.dart index a6f8d551da81c..aa5db6589b462 100644 --- a/mobile/openapi/lib/api_client.dart +++ b/mobile/openapi/lib/api_client.dart @@ -354,8 +354,6 @@ class ApiClient { return JobStatusDto.fromJson(value); case 'LibraryResponseDto': return LibraryResponseDto.fromJson(value); - case 'LibraryStatsResponseDto': - return LibraryStatsResponseDto.fromJson(value); case 'LicenseKeyDto': return LicenseKeyDto.fromJson(value); case 'LicenseResponseDto': diff --git a/mobile/openapi/lib/model/library_stats_response_dto.dart b/mobile/openapi/lib/model/library_stats_response_dto.dart deleted file mode 100644 index afe67da31a251..0000000000000 --- a/mobile/openapi/lib/model/library_stats_response_dto.dart +++ /dev/null @@ -1,123 +0,0 @@ -// -// AUTO-GENERATED FILE, DO NOT MODIFY! -// -// @dart=2.18 - -// ignore_for_file: unused_element, unused_import -// ignore_for_file: always_put_required_named_parameters_first -// ignore_for_file: constant_identifier_names -// ignore_for_file: lines_longer_than_80_chars - -part of openapi.api; - -class LibraryStatsResponseDto { - /// Returns a new [LibraryStatsResponseDto] instance. - LibraryStatsResponseDto({ - this.photos = 0, - this.total = 0, - this.usage = 0, - this.videos = 0, - }); - - int photos; - - int total; - - int usage; - - int videos; - - @override - bool operator ==(Object other) => identical(this, other) || other is LibraryStatsResponseDto && - other.photos == photos && - other.total == total && - other.usage == usage && - other.videos == videos; - - @override - int get hashCode => - // ignore: unnecessary_parenthesis - (photos.hashCode) + - (total.hashCode) + - (usage.hashCode) + - (videos.hashCode); - - @override - String toString() => 'LibraryStatsResponseDto[photos=$photos, total=$total, usage=$usage, videos=$videos]'; - - Map toJson() { - final json = {}; - json[r'photos'] = this.photos; - json[r'total'] = this.total; - json[r'usage'] = this.usage; - json[r'videos'] = this.videos; - return json; - } - - /// Returns a new [LibraryStatsResponseDto] instance and imports its values from - /// [value] if it's a [Map], null otherwise. - // ignore: prefer_constructors_over_static_methods - static LibraryStatsResponseDto? fromJson(dynamic value) { - upgradeDto(value, "LibraryStatsResponseDto"); - if (value is Map) { - final json = value.cast(); - - return LibraryStatsResponseDto( - photos: mapValueOfType(json, r'photos')!, - total: mapValueOfType(json, r'total')!, - usage: mapValueOfType(json, r'usage')!, - videos: mapValueOfType(json, r'videos')!, - ); - } - return null; - } - - static List listFromJson(dynamic json, {bool growable = false,}) { - final result = []; - if (json is List && json.isNotEmpty) { - for (final row in json) { - final value = LibraryStatsResponseDto.fromJson(row); - if (value != null) { - result.add(value); - } - } - } - return result.toList(growable: growable); - } - - static Map mapFromJson(dynamic json) { - final map = {}; - if (json is Map && json.isNotEmpty) { - json = json.cast(); // ignore: parameter_assignments - for (final entry in json.entries) { - final value = LibraryStatsResponseDto.fromJson(entry.value); - if (value != null) { - map[entry.key] = value; - } - } - } - return map; - } - - // maps a json object with a list of LibraryStatsResponseDto-objects as value to a dart map - static Map> mapListFromJson(dynamic json, {bool growable = false,}) { - final map = >{}; - if (json is Map && json.isNotEmpty) { - // ignore: parameter_assignments - json = json.cast(); - for (final entry in json.entries) { - map[entry.key] = LibraryStatsResponseDto.listFromJson(entry.value, growable: growable,); - } - } - return map; - } - - /// The list of required keys that must be present in a JSON. - static const requiredKeys = { - 'photos', - 'total', - 'usage', - 'videos', - }; -} - diff --git a/open-api/immich-openapi-specs.json b/open-api/immich-openapi-specs.json index 7c8aba3b5e985..554fed25d6866 100644 --- a/open-api/immich-openapi-specs.json +++ b/open-api/immich-openapi-specs.json @@ -2853,9 +2853,9 @@ ] } }, - "/libraries/{id}/scan": { - "post": { - "operationId": "scanLibrary", + "/libraries/{id}/count": { + "get": { + "operationId": "getAssetCount", "parameters": [ { "name": "id", @@ -2868,7 +2868,14 @@ } ], "responses": { - "204": { + "200": { + "content": { + "application/json": { + "schema": { + "type": "number" + } + } + }, "description": "" } }, @@ -2888,9 +2895,9 @@ ] } }, - "/libraries/{id}/statistics": { - "get": { - "operationId": "getLibraryStatistics", + "/libraries/{id}/scan": { + "post": { + "operationId": "scanLibrary", "parameters": [ { "name": "id", @@ -2903,14 +2910,7 @@ } ], "responses": { - "200": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/LibraryStatsResponseDto" - } - } - }, + "204": { "description": "" } }, @@ -9464,34 +9464,6 @@ ], "type": "object" }, - "LibraryStatsResponseDto": { - "properties": { - "photos": { - "default": 0, - "type": "integer" - }, - "total": { - "default": 0, - "type": "integer" - }, - "usage": { - "default": 0, - "format": "int64", - "type": "integer" - }, - "videos": { - "default": 0, - "type": "integer" - } - }, - "required": [ - "photos", - "total", - "usage", - "videos" - ], - "type": "object" - }, "LicenseKeyDto": { "properties": { "activationKey": { diff --git a/open-api/typescript-sdk/src/fetch-client.ts b/open-api/typescript-sdk/src/fetch-client.ts index c31e71d05e961..f441f47fc51c6 100644 --- a/open-api/typescript-sdk/src/fetch-client.ts +++ b/open-api/typescript-sdk/src/fetch-client.ts @@ -574,12 +574,6 @@ export type UpdateLibraryDto = { importPaths?: string[]; name?: string; }; -export type LibraryStatsResponseDto = { - photos: number; - total: number; - usage: number; - videos: number; -}; export type ValidateLibraryDto = { exclusionPatterns?: string[]; importPaths?: string[]; @@ -2099,22 +2093,22 @@ export function updateLibrary({ id, updateLibraryDto }: { body: updateLibraryDto }))); } -export function scanLibrary({ id }: { +export function getAssetCount({ id }: { id: string; }, opts?: Oazapfts.RequestOpts) { - return oazapfts.ok(oazapfts.fetchText(`/libraries/${encodeURIComponent(id)}/scan`, { - ...opts, - method: "POST" + return oazapfts.ok(oazapfts.fetchJson<{ + status: 200; + data: number; + }>(`/libraries/${encodeURIComponent(id)}/count`, { + ...opts })); } -export function getLibraryStatistics({ id }: { +export function scanLibrary({ id }: { id: string; }, opts?: Oazapfts.RequestOpts) { - return oazapfts.ok(oazapfts.fetchJson<{ - status: 200; - data: LibraryStatsResponseDto; - }>(`/libraries/${encodeURIComponent(id)}/statistics`, { - ...opts + return oazapfts.ok(oazapfts.fetchText(`/libraries/${encodeURIComponent(id)}/scan`, { + ...opts, + method: "POST" })); } export function validate({ id, validateLibraryDto }: { diff --git a/server/src/controllers/library.controller.ts b/server/src/controllers/library.controller.ts index b8959ca28875c..adf0f6c106240 100644 --- a/server/src/controllers/library.controller.ts +++ b/server/src/controllers/library.controller.ts @@ -57,10 +57,10 @@ export class LibraryController { return this.service.validate(id, dto); } - @Get(':id/statistics') + @Get(':id/count') @Authenticated({ permission: Permission.LIBRARY_STATISTICS, admin: true }) - getLibraryStatistics(@Param() { id }: UUIDParamDto): Promise { - return this.service.getStatistics(id); + getAssetCount(@Param() { id }: UUIDParamDto): Promise { + return this.service.getAssetCount(id); } @Post(':id/scan') diff --git a/server/src/interfaces/asset.interface.ts b/server/src/interfaces/asset.interface.ts index b388a2339264e..f9e9a4dd212f9 100644 --- a/server/src/interfaces/asset.interface.ts +++ b/server/src/interfaces/asset.interface.ts @@ -201,5 +201,5 @@ export interface IAssetRepository { upsertFiles(files: UpsertFileOptions[]): Promise; updateOffline(library: LibraryEntity): Promise; getNewPaths(libraryId: string, paths: string[]): Promise; - getAssetCount(id: string, options: AssetSearchOptions): Promise; + getAssetCount(options: AssetSearchOptions): Promise; } diff --git a/server/src/repositories/asset.repository.ts b/server/src/repositories/asset.repository.ts index 6f8d81408eeb9..cc01d0c9bea49 100644 --- a/server/src/repositories/asset.repository.ts +++ b/server/src/repositories/asset.repository.ts @@ -786,7 +786,7 @@ export class AssetRepository implements IAssetRepository { .then((result) => result.map((row: { path: string }) => row.path)); } - async getAssetCount(id: string, options: AssetSearchOptions = {}): Promise { + async getAssetCount(options: AssetSearchOptions = {}): Promise { let builder = this.repository.createQueryBuilder('asset').leftJoinAndSelect('asset.files', 'files'); builder = searchAssetBuilder(builder, options); return builder.getCount(); diff --git a/server/src/services/asset.service.ts b/server/src/services/asset.service.ts index 87510371192e4..f2bc09c9078e3 100644 --- a/server/src/services/asset.service.ts +++ b/server/src/services/asset.service.ts @@ -249,7 +249,12 @@ export class AssetService extends BaseService { const { thumbnailFile, previewFile } = getAssetFiles(asset.files); const files = [thumbnailFile?.path, previewFile?.path, asset.encodedVideoPath]; - if (deleteOnDisk) { + + if (deleteOnDisk && !asset.isOffline) { + /* We don't want to delete an offline asset because it is either... + ...missing from disk => don't delete the file since it doesn't exist where we expect + ...outside of any import path => don't delete the file since we're not responsible for it + ...matching an exclusion pattern => don't delete the file since it's excluded */ files.push(asset.sidecarPath, asset.originalPath); } diff --git a/server/src/services/job.service.ts b/server/src/services/job.service.ts index 2faed0a51666a..a9a430858e488 100644 --- a/server/src/services/job.service.ts +++ b/server/src/services/job.service.ts @@ -266,7 +266,7 @@ export class JobService extends BaseService { } case JobName.GENERATE_THUMBNAILS: { - if (!item.data.notify && item.data.source !== 'upload') { + if (!item.data.notify && item.data.source !== 'upload' && item.data.source !== 'library-import') { break; } diff --git a/server/src/services/library.service.ts b/server/src/services/library.service.ts index 7fbaa40f6f5c2..3c4e7f7a28c8e 100644 --- a/server/src/services/library.service.ts +++ b/server/src/services/library.service.ts @@ -1,4 +1,4 @@ -import { BadRequestException, Injectable } from '@nestjs/common'; +import { BadRequestException, Injectable, InternalServerErrorException } from '@nestjs/common'; import { R_OK } from 'node:constants'; import path, { basename, isAbsolute, parse } from 'node:path'; import picomatch from 'picomatch'; @@ -174,12 +174,12 @@ export class LibraryService extends BaseService { } } - async getStatistics(id: string): Promise { - const statistics = await this.libraryRepository.getStatistics(id); - if (!statistics) { - throw new BadRequestException(`Library ${id} not found`); + async getAssetCount(id: string): Promise { + const count = await this.assetRepository.getAssetCount({ libraryId: id }); + if (count == undefined) { + throw new InternalServerErrorException(`Failed to get asset count for library ${id}`); } - return statistics; + return count; } async get(id: string): Promise { @@ -354,7 +354,8 @@ export class LibraryService extends BaseService { private processEntity(filePath: string, ownerId: string, libraryId: string): AssetCreate { const assetPath = path.normalize(filePath); - const now = new Date(); + // This date will be set until metadata extraction runs + const datePlaceholder = new Date('1900-01-01'); return { ownerId: ownerId, @@ -365,9 +366,9 @@ export class LibraryService extends BaseService { // TODO: device asset id is deprecated, remove it deviceAssetId: `${basename(assetPath)}`.replaceAll(/\s+/g, ''), deviceId: 'Library Import', - fileCreatedAt: now, - fileModifiedAt: now, - localDateTime: now, + fileCreatedAt: datePlaceholder, + fileModifiedAt: datePlaceholder, + localDateTime: datePlaceholder, type: mimeTypes.isVideo(assetPath) ? AssetType.VIDEO : AssetType.IMAGE, originalFileName: parse(assetPath).base, isExternal: true, @@ -620,7 +621,7 @@ export class LibraryService extends BaseService { return JobStatus.SKIPPED; } - const assetCount = await this.assetRepository.getAssetCount(library.id, { withDeleted: true }); + const assetCount = await this.assetRepository.getAssetCount({ libraryId: job.id, withDeleted: true }); if (!assetCount) { this.logger.log(`Library ${library.id} is empty, no need to check assets`); diff --git a/server/src/services/trash.service.ts b/server/src/services/trash.service.ts index 621dee0f8176d..549963772dcbc 100644 --- a/server/src/services/trash.service.ts +++ b/server/src/services/trash.service.ts @@ -52,7 +52,7 @@ export class TrashService extends BaseService { ); for await (const assetIds of assetPagination) { - this.logger.debug(`Queueing ${assetIds.length} assets for deletion from the trash`); + this.logger.debug(`Queueing ${assetIds.length} asset(s) for deletion from the trash`); count += assetIds.length; await this.jobRepository.queueAll( assetIds.map((assetId) => ({ diff --git a/web/src/routes/admin/library-management/+page.svelte b/web/src/routes/admin/library-management/+page.svelte index b89e81ebf687d..20d35ff76d842 100644 --- a/web/src/routes/admin/library-management/+page.svelte +++ b/web/src/routes/admin/library-management/+page.svelte @@ -12,18 +12,16 @@ notificationController, NotificationType, } from '$lib/components/shared-components/notification/notification'; - import { ByteUnit, getBytesWithUnit } from '$lib/utils/byte-units'; import { handleError } from '$lib/utils/handle-error'; import { createLibrary, deleteLibrary, getAllLibraries, - getLibraryStatistics, + getAssetCount, getUserAdmin, scanLibrary, updateLibrary, type LibraryResponseDto, - type LibraryStatsResponseDto, type UserResponseDto, } from '@immich/sdk'; import { mdiDatabase, mdiDotsVertical, mdiPlusBoxOutline, mdiSync } from '@mdi/js'; @@ -44,13 +42,8 @@ let libraries: LibraryResponseDto[] = $state([]); - let stats: LibraryStatsResponseDto[] = []; let owner: UserResponseDto[] = $state([]); - let photos: number[] = []; - let videos: number[] = []; - let totalCount: number[] = $state([]); - let diskUsage: number[] = $state([]); - let diskUsageUnit: ByteUnit[] = $state([]); + let assetCount: number[] = $state([]); let editImportPaths: number | undefined = $state(); let editScanSettings: number | undefined = $state(); let renameLibrary: number | undefined = $state(); @@ -74,12 +67,8 @@ }; const refreshStats = async (listIndex: number) => { - stats[listIndex] = await getLibraryStatistics({ id: libraries[listIndex].id }); + assetCount[listIndex] = await getAssetCount({ id: libraries[listIndex].id }); owner[listIndex] = await getUserAdmin({ id: libraries[listIndex].ownerId }); - photos[listIndex] = stats[listIndex].photos; - videos[listIndex] = stats[listIndex].videos; - totalCount[listIndex] = stats[listIndex].total; - [diskUsage[listIndex], diskUsageUnit[listIndex]] = getBytesWithUnit(stats[listIndex].usage, 0); }; async function readLibraryList() { @@ -190,10 +179,10 @@ } await refreshStats(index); - const assetCount = totalCount[index]; - if (assetCount > 0) { + const count = assetCount[index]; + if (count > 0) { const isConfirmed = await dialogController.show({ - prompt: $t('admin.confirm_delete_library_assets', { values: { count: assetCount } }), + prompt: $t('admin.confirm_delete_library_assets', { values: { count } }), }); if (!isConfirmed) { @@ -242,19 +231,18 @@ - + {$t('type')} {$t('name')} {$t('owner')} {$t('assets')} - {$t('size')} {#each libraries as library, index (library.id)} - {#if totalCount[index] == undefined} + {#if assetCount[index] == undefined} {:else} - {totalCount[index].toLocaleString($locale)} - {/if} - - - {#if diskUsage[index] == undefined} - - {:else} - {diskUsage[index]} - {diskUsageUnit[index]} + {assetCount[index].toLocaleString($locale)} {/if} From c50aa1ba06e237364d8494388b27bfb170115f09 Mon Sep 17 00:00:00 2001 From: Jonathan Jogenfors Date: Wed, 8 Jan 2025 00:43:53 +0100 Subject: [PATCH 05/10] chore: open api and sql --- mobile/openapi/README.md | 8 ++-- .../openapi/lib/model/create_library_dto.dart | 20 ++++---- .../openapi/lib/model/update_library_dto.dart | 20 ++++---- .../lib/model/validate_library_dto.dart | 20 ++++---- open-api/immich-openapi-specs.json | 48 +++++++++++++++---- server/src/queries/asset.repository.sql | 48 ------------------- 6 files changed, 72 insertions(+), 92 deletions(-) diff --git a/mobile/openapi/README.md b/mobile/openapi/README.md index f46dd4e93b5fe..b0ffe6746bc58 100644 --- a/mobile/openapi/README.md +++ b/mobile/openapi/README.md @@ -93,17 +93,17 @@ Class | Method | HTTP request | Description *AlbumsApi* | [**removeUserFromAlbum**](doc//AlbumsApi.md#removeuserfromalbum) | **DELETE** /albums/{id}/user/{userId} | *AlbumsApi* | [**updateAlbumInfo**](doc//AlbumsApi.md#updatealbuminfo) | **PATCH** /albums/{id} | *AlbumsApi* | [**updateAlbumUser**](doc//AlbumsApi.md#updatealbumuser) | **PUT** /albums/{id}/user/{userId} | -*AssetsApi* | [**checkBulkUpload**](doc//AssetsApi.md#checkbulkupload) | **POST** /assets/bulk-upload-check | -*AssetsApi* | [**checkExistingAssets**](doc//AssetsApi.md#checkexistingassets) | **POST** /assets/exist | +*AssetsApi* | [**checkBulkUpload**](doc//AssetsApi.md#checkbulkupload) | **POST** /assets/bulk-upload-check | Checks if assets exist by checksums +*AssetsApi* | [**checkExistingAssets**](doc//AssetsApi.md#checkexistingassets) | **POST** /assets/exist | Checks if multiple assets exist on the server and returns all existing - used by background backup *AssetsApi* | [**deleteAssets**](doc//AssetsApi.md#deleteassets) | **DELETE** /assets | *AssetsApi* | [**downloadAsset**](doc//AssetsApi.md#downloadasset) | **GET** /assets/{id}/original | -*AssetsApi* | [**getAllUserAssetsByDeviceId**](doc//AssetsApi.md#getalluserassetsbydeviceid) | **GET** /assets/device/{deviceId} | +*AssetsApi* | [**getAllUserAssetsByDeviceId**](doc//AssetsApi.md#getalluserassetsbydeviceid) | **GET** /assets/device/{deviceId} | Get all asset of a device that are in the database, ID only. *AssetsApi* | [**getAssetInfo**](doc//AssetsApi.md#getassetinfo) | **GET** /assets/{id} | *AssetsApi* | [**getAssetStatistics**](doc//AssetsApi.md#getassetstatistics) | **GET** /assets/statistics | *AssetsApi* | [**getMemoryLane**](doc//AssetsApi.md#getmemorylane) | **GET** /assets/memory-lane | *AssetsApi* | [**getRandom**](doc//AssetsApi.md#getrandom) | **GET** /assets/random | *AssetsApi* | [**playAssetVideo**](doc//AssetsApi.md#playassetvideo) | **GET** /assets/{id}/video/playback | -*AssetsApi* | [**replaceAsset**](doc//AssetsApi.md#replaceasset) | **PUT** /assets/{id}/original | +*AssetsApi* | [**replaceAsset**](doc//AssetsApi.md#replaceasset) | **PUT** /assets/{id}/original | Replace the asset with new file, without changing its id *AssetsApi* | [**runAssetJobs**](doc//AssetsApi.md#runassetjobs) | **POST** /assets/jobs | *AssetsApi* | [**updateAsset**](doc//AssetsApi.md#updateasset) | **PUT** /assets/{id} | *AssetsApi* | [**updateAssets**](doc//AssetsApi.md#updateassets) | **PUT** /assets | diff --git a/mobile/openapi/lib/model/create_library_dto.dart b/mobile/openapi/lib/model/create_library_dto.dart index bffa5f427950d..2b8085be6f3a6 100644 --- a/mobile/openapi/lib/model/create_library_dto.dart +++ b/mobile/openapi/lib/model/create_library_dto.dart @@ -13,15 +13,15 @@ part of openapi.api; class CreateLibraryDto { /// Returns a new [CreateLibraryDto] instance. CreateLibraryDto({ - this.exclusionPatterns = const [], - this.importPaths = const [], + this.exclusionPatterns = const {}, + this.importPaths = const {}, this.name, required this.ownerId, }); - List exclusionPatterns; + Set exclusionPatterns; - List importPaths; + Set importPaths; /// /// Please note: This property should have been non-nullable! Since the specification file @@ -53,8 +53,8 @@ class CreateLibraryDto { Map toJson() { final json = {}; - json[r'exclusionPatterns'] = this.exclusionPatterns; - json[r'importPaths'] = this.importPaths; + json[r'exclusionPatterns'] = this.exclusionPatterns.toList(growable: false); + json[r'importPaths'] = this.importPaths.toList(growable: false); if (this.name != null) { json[r'name'] = this.name; } else { @@ -74,11 +74,11 @@ class CreateLibraryDto { return CreateLibraryDto( exclusionPatterns: json[r'exclusionPatterns'] is Iterable - ? (json[r'exclusionPatterns'] as Iterable).cast().toList(growable: false) - : const [], + ? (json[r'exclusionPatterns'] as Iterable).cast().toSet() + : const {}, importPaths: json[r'importPaths'] is Iterable - ? (json[r'importPaths'] as Iterable).cast().toList(growable: false) - : const [], + ? (json[r'importPaths'] as Iterable).cast().toSet() + : const {}, name: mapValueOfType(json, r'name'), ownerId: mapValueOfType(json, r'ownerId')!, ); diff --git a/mobile/openapi/lib/model/update_library_dto.dart b/mobile/openapi/lib/model/update_library_dto.dart index b85df40172e69..6a4f36906f74a 100644 --- a/mobile/openapi/lib/model/update_library_dto.dart +++ b/mobile/openapi/lib/model/update_library_dto.dart @@ -13,14 +13,14 @@ part of openapi.api; class UpdateLibraryDto { /// Returns a new [UpdateLibraryDto] instance. UpdateLibraryDto({ - this.exclusionPatterns = const [], - this.importPaths = const [], + this.exclusionPatterns = const {}, + this.importPaths = const {}, this.name, }); - List exclusionPatterns; + Set exclusionPatterns; - List importPaths; + Set importPaths; /// /// Please note: This property should have been non-nullable! Since the specification file @@ -48,8 +48,8 @@ class UpdateLibraryDto { Map toJson() { final json = {}; - json[r'exclusionPatterns'] = this.exclusionPatterns; - json[r'importPaths'] = this.importPaths; + json[r'exclusionPatterns'] = this.exclusionPatterns.toList(growable: false); + json[r'importPaths'] = this.importPaths.toList(growable: false); if (this.name != null) { json[r'name'] = this.name; } else { @@ -68,11 +68,11 @@ class UpdateLibraryDto { return UpdateLibraryDto( exclusionPatterns: json[r'exclusionPatterns'] is Iterable - ? (json[r'exclusionPatterns'] as Iterable).cast().toList(growable: false) - : const [], + ? (json[r'exclusionPatterns'] as Iterable).cast().toSet() + : const {}, importPaths: json[r'importPaths'] is Iterable - ? (json[r'importPaths'] as Iterable).cast().toList(growable: false) - : const [], + ? (json[r'importPaths'] as Iterable).cast().toSet() + : const {}, name: mapValueOfType(json, r'name'), ); } diff --git a/mobile/openapi/lib/model/validate_library_dto.dart b/mobile/openapi/lib/model/validate_library_dto.dart index 08199e3aa66c8..79ddb9a540359 100644 --- a/mobile/openapi/lib/model/validate_library_dto.dart +++ b/mobile/openapi/lib/model/validate_library_dto.dart @@ -13,13 +13,13 @@ part of openapi.api; class ValidateLibraryDto { /// Returns a new [ValidateLibraryDto] instance. ValidateLibraryDto({ - this.exclusionPatterns = const [], - this.importPaths = const [], + this.exclusionPatterns = const {}, + this.importPaths = const {}, }); - List exclusionPatterns; + Set exclusionPatterns; - List importPaths; + Set importPaths; @override bool operator ==(Object other) => identical(this, other) || other is ValidateLibraryDto && @@ -37,8 +37,8 @@ class ValidateLibraryDto { Map toJson() { final json = {}; - json[r'exclusionPatterns'] = this.exclusionPatterns; - json[r'importPaths'] = this.importPaths; + json[r'exclusionPatterns'] = this.exclusionPatterns.toList(growable: false); + json[r'importPaths'] = this.importPaths.toList(growable: false); return json; } @@ -52,11 +52,11 @@ class ValidateLibraryDto { return ValidateLibraryDto( exclusionPatterns: json[r'exclusionPatterns'] is Iterable - ? (json[r'exclusionPatterns'] as Iterable).cast().toList(growable: false) - : const [], + ? (json[r'exclusionPatterns'] as Iterable).cast().toSet() + : const {}, importPaths: json[r'importPaths'] is Iterable - ? (json[r'importPaths'] as Iterable).cast().toList(growable: false) - : const [], + ? (json[r'importPaths'] as Iterable).cast().toSet() + : const {}, ); } return null; diff --git a/open-api/immich-openapi-specs.json b/open-api/immich-openapi-specs.json index 13ee7a30c71b8..57ee614ee75e4 100644 --- a/open-api/immich-openapi-specs.json +++ b/open-api/immich-openapi-specs.json @@ -1424,7 +1424,6 @@ }, "/assets/bulk-upload-check": { "post": { - "description": "Checks if assets exist by checksums", "operationId": "checkBulkUpload", "parameters": [], "requestBody": { @@ -1460,6 +1459,7 @@ "api_key": [] } ], + "summary": "Checks if assets exist by checksums", "tags": [ "Assets" ] @@ -1467,7 +1467,6 @@ }, "/assets/device/{deviceId}": { "get": { - "description": "Get all asset of a device that are in the database, ID only.", "operationId": "getAllUserAssetsByDeviceId", "parameters": [ { @@ -1505,6 +1504,7 @@ "api_key": [] } ], + "summary": "Get all asset of a device that are in the database, ID only.", "tags": [ "Assets" ] @@ -1512,7 +1512,6 @@ }, "/assets/exist": { "post": { - "description": "Checks if multiple assets exist on the server and returns all existing - used by background backup", "operationId": "checkExistingAssets", "parameters": [], "requestBody": { @@ -1548,6 +1547,7 @@ "api_key": [] } ], + "summary": "Checks if multiple assets exist on the server and returns all existing - used by background backup", "tags": [ "Assets" ] @@ -1903,7 +1903,6 @@ ] }, "put": { - "description": "Replace the asset with new file, without changing its id", "operationId": "replaceAsset", "parameters": [ { @@ -1957,6 +1956,7 @@ "api_key": [] } ], + "summary": "Replace the asset with new file, without changing its id", "tags": [ "Assets" ], @@ -7492,6 +7492,7 @@ "items": { "$ref": "#/components/schemas/Permission" }, + "minItems": 1, "type": "array" } }, @@ -7631,6 +7632,7 @@ "items": { "$ref": "#/components/schemas/AlbumUserAddDto" }, + "minItems": 1, "type": "array" } }, @@ -8705,6 +8707,7 @@ "items": { "type": "string" }, + "minItems": 1, "type": "array" }, "deviceId": { @@ -8771,13 +8774,17 @@ "items": { "type": "string" }, - "type": "array" + "maxItems": 128, + "type": "array", + "uniqueItems": true }, "importPaths": { "items": { "type": "string" }, - "type": "array" + "maxItems": 128, + "type": "array", + "uniqueItems": true }, "name": { "type": "string" @@ -9516,6 +9523,7 @@ "properties": { "email": { "example": "testuser@email.com", + "format": "email", "type": "string" }, "password": { @@ -11322,6 +11330,7 @@ "properties": { "email": { "example": "testuser@email.com", + "format": "email", "type": "string" }, "name": { @@ -11479,6 +11488,7 @@ "format": "uuid", "type": "string" }, + "minItems": 2, "type": "array" } }, @@ -11907,6 +11917,7 @@ "type": "string" }, "urls": { + "format": "uri", "items": { "format": "uri", "type": "string" @@ -11927,12 +11938,14 @@ "SystemConfigMapDto": { "properties": { "darkStyle": { + "format": "uri", "type": "string" }, "enabled": { "type": "boolean" }, "lightStyle": { + "format": "uri", "type": "string" } }, @@ -12007,6 +12020,7 @@ "type": "boolean" }, "mobileRedirectUri": { + "format": "uri", "type": "string" }, "profileSigningAlgorithm": { @@ -12069,6 +12083,7 @@ "SystemConfigServerDto": { "properties": { "externalDomain": { + "format": "uri", "type": "string" }, "loginPageMessage": { @@ -12325,6 +12340,7 @@ "TagCreateDto": { "properties": { "color": { + "pattern": "^#?([0-9A-F]{3}|[0-9A-F]{4}|[0-9A-F]{6}|[0-9A-F]{8})$", "type": "string" }, "name": { @@ -12380,6 +12396,7 @@ "properties": { "color": { "nullable": true, + "pattern": "^#?([0-9A-F]{3}|[0-9A-F]{4}|[0-9A-F]{6}|[0-9A-F]{8})$", "type": "string" } }, @@ -12597,13 +12614,17 @@ "items": { "type": "string" }, - "type": "array" + "maxItems": 128, + "type": "array", + "uniqueItems": true }, "importPaths": { "items": { "type": "string" }, - "type": "array" + "maxItems": 128, + "type": "array", + "uniqueItems": true }, "name": { "type": "string" @@ -12669,6 +12690,7 @@ "UserAdminCreateDto": { "properties": { "email": { + "format": "email", "type": "string" }, "name": { @@ -12802,6 +12824,7 @@ "UserAdminUpdateDto": { "properties": { "email": { + "format": "email", "type": "string" }, "name": { @@ -12979,6 +13002,7 @@ "UserUpdateMeDto": { "properties": { "email": { + "format": "email", "type": "string" }, "name": { @@ -13007,13 +13031,17 @@ "items": { "type": "string" }, - "type": "array" + "maxItems": 128, + "type": "array", + "uniqueItems": true }, "importPaths": { "items": { "type": "string" }, - "type": "array" + "maxItems": 128, + "type": "array", + "uniqueItems": true } }, "type": "object" diff --git a/server/src/queries/asset.repository.sql b/server/src/queries/asset.repository.sql index 4694cd20fc532..8de7b80e51088 100644 --- a/server/src/queries/asset.repository.sql +++ b/server/src/queries/asset.repository.sql @@ -274,54 +274,6 @@ DELETE FROM "assets" WHERE "ownerId" = $1 --- AssetRepository.getByLibraryIdAndOriginalPath -SELECT DISTINCT - "distinctAlias"."AssetEntity_id" AS "ids_AssetEntity_id" -FROM - ( - SELECT - "AssetEntity"."id" AS "AssetEntity_id", - "AssetEntity"."deviceAssetId" AS "AssetEntity_deviceAssetId", - "AssetEntity"."ownerId" AS "AssetEntity_ownerId", - "AssetEntity"."libraryId" AS "AssetEntity_libraryId", - "AssetEntity"."deviceId" AS "AssetEntity_deviceId", - "AssetEntity"."type" AS "AssetEntity_type", - "AssetEntity"."status" AS "AssetEntity_status", - "AssetEntity"."originalPath" AS "AssetEntity_originalPath", - "AssetEntity"."thumbhash" AS "AssetEntity_thumbhash", - "AssetEntity"."encodedVideoPath" AS "AssetEntity_encodedVideoPath", - "AssetEntity"."createdAt" AS "AssetEntity_createdAt", - "AssetEntity"."updatedAt" AS "AssetEntity_updatedAt", - "AssetEntity"."deletedAt" AS "AssetEntity_deletedAt", - "AssetEntity"."fileCreatedAt" AS "AssetEntity_fileCreatedAt", - "AssetEntity"."localDateTime" AS "AssetEntity_localDateTime", - "AssetEntity"."fileModifiedAt" AS "AssetEntity_fileModifiedAt", - "AssetEntity"."isFavorite" AS "AssetEntity_isFavorite", - "AssetEntity"."isArchived" AS "AssetEntity_isArchived", - "AssetEntity"."isExternal" AS "AssetEntity_isExternal", - "AssetEntity"."isOffline" AS "AssetEntity_isOffline", - "AssetEntity"."checksum" AS "AssetEntity_checksum", - "AssetEntity"."duration" AS "AssetEntity_duration", - "AssetEntity"."isVisible" AS "AssetEntity_isVisible", - "AssetEntity"."livePhotoVideoId" AS "AssetEntity_livePhotoVideoId", - "AssetEntity"."originalFileName" AS "AssetEntity_originalFileName", - "AssetEntity"."sidecarPath" AS "AssetEntity_sidecarPath", - "AssetEntity"."stackId" AS "AssetEntity_stackId", - "AssetEntity"."duplicateId" AS "AssetEntity_duplicateId" - FROM - "assets" "AssetEntity" - LEFT JOIN "libraries" "AssetEntity__AssetEntity_library" ON "AssetEntity__AssetEntity_library"."id" = "AssetEntity"."libraryId" - WHERE - ( - ((("AssetEntity__AssetEntity_library"."id" = $1))) - AND ("AssetEntity"."originalPath" = $2) - ) - ) "distinctAlias" -ORDER BY - "AssetEntity_id" ASC -LIMIT - 1 - -- AssetRepository.getPathsNotInLibrary WITH paths AS ( From 32ce59750844f7e0b21e6303a7d40cfdb8dbf168 Mon Sep 17 00:00:00 2001 From: Jonathan Jogenfors Date: Wed, 8 Jan 2025 01:03:22 +0100 Subject: [PATCH 06/10] wip --- e2e/src/utils.ts | 3 - open-api/immich-openapi-specs.json | 241 ++++++++++++++++--- server/src/controllers/library.controller.ts | 1 - server/src/interfaces/library.interface.ts | 1 - server/src/services/library.service.ts | 16 +- 5 files changed, 209 insertions(+), 53 deletions(-) diff --git a/e2e/src/utils.ts b/e2e/src/utils.ts index 7b96b712f0a51..b00c3c0b6d30d 100644 --- a/e2e/src/utils.ts +++ b/e2e/src/utils.ts @@ -460,9 +460,6 @@ export const utils = { validateLibrary: (accessToken: string, id: string, dto: ValidateLibraryDto) => validate({ id, validateLibraryDto: dto }, { headers: asBearerAuth(accessToken) }), - updateLibrary: (accessToken: string, id: string, dto: UpdateLibraryDto) => - updateLibrary({ id, updateLibraryDto: dto }, { headers: asBearerAuth(accessToken) }), - createPartner: (accessToken: string, id: string) => createPartner({ id }, { headers: asBearerAuth(accessToken) }), updateMyPreferences: (accessToken: string, userPreferencesUpdateDto: UserPreferencesUpdateDto) => diff --git a/open-api/immich-openapi-specs.json b/open-api/immich-openapi-specs.json index 57ee614ee75e4..40e9fc74494b5 100644 --- a/open-api/immich-openapi-specs.json +++ b/open-api/immich-openapi-specs.json @@ -7573,7 +7573,11 @@ "type": "string" }, "type": { - "$ref": "#/components/schemas/ReactionType" + "allOf": [ + { + "$ref": "#/components/schemas/ReactionType" + } + ] } }, "required": [ @@ -7600,7 +7604,11 @@ "type": "string" }, "type": { - "$ref": "#/components/schemas/ReactionType" + "allOf": [ + { + "$ref": "#/components/schemas/ReactionType" + } + ] }, "user": { "$ref": "#/components/schemas/UserResponseDto" @@ -7701,7 +7709,11 @@ "type": "string" }, "order": { - "$ref": "#/components/schemas/AssetOrder" + "allOf": [ + { + "$ref": "#/components/schemas/AssetOrder" + } + ] }, "owner": { "$ref": "#/components/schemas/UserResponseDto" @@ -7761,7 +7773,12 @@ "AlbumUserAddDto": { "properties": { "role": { - "$ref": "#/components/schemas/AlbumUserRole" + "allOf": [ + { + "$ref": "#/components/schemas/AlbumUserRole" + } + ], + "default": "editor" }, "userId": { "format": "uuid", @@ -7776,7 +7793,11 @@ "AlbumUserCreateDto": { "properties": { "role": { - "$ref": "#/components/schemas/AlbumUserRole" + "allOf": [ + { + "$ref": "#/components/schemas/AlbumUserRole" + } + ] }, "userId": { "format": "uuid", @@ -7792,7 +7813,11 @@ "AlbumUserResponseDto": { "properties": { "role": { - "$ref": "#/components/schemas/AlbumUserRole" + "allOf": [ + { + "$ref": "#/components/schemas/AlbumUserRole" + } + ] }, "user": { "$ref": "#/components/schemas/UserResponseDto" @@ -8089,7 +8114,11 @@ "nullable": true }, "sourceType": { - "$ref": "#/components/schemas/SourceType" + "allOf": [ + { + "$ref": "#/components/schemas/SourceType" + } + ] } }, "required": [ @@ -8160,7 +8189,11 @@ "type": "integer" }, "sourceType": { - "$ref": "#/components/schemas/SourceType" + "allOf": [ + { + "$ref": "#/components/schemas/SourceType" + } + ] } }, "required": [ @@ -8256,7 +8289,11 @@ "type": "array" }, "name": { - "$ref": "#/components/schemas/AssetJobName" + "allOf": [ + { + "$ref": "#/components/schemas/AssetJobName" + } + ] } }, "required": [ @@ -8354,7 +8391,11 @@ "type": "string" }, "status": { - "$ref": "#/components/schemas/AssetMediaStatus" + "allOf": [ + { + "$ref": "#/components/schemas/AssetMediaStatus" + } + ] } }, "required": [ @@ -8492,7 +8533,11 @@ "type": "string" }, "type": { - "$ref": "#/components/schemas/AssetTypeEnum" + "allOf": [ + { + "$ref": "#/components/schemas/AssetTypeEnum" + } + ] }, "unassignedFaces": { "items": { @@ -8605,7 +8650,11 @@ "AvatarResponse": { "properties": { "color": { - "$ref": "#/components/schemas/UserAvatarColor" + "allOf": [ + { + "$ref": "#/components/schemas/UserAvatarColor" + } + ] } }, "required": [ @@ -8616,7 +8665,11 @@ "AvatarUpdate": { "properties": { "color": { - "$ref": "#/components/schemas/UserAvatarColor" + "allOf": [ + { + "$ref": "#/components/schemas/UserAvatarColor" + } + ] } }, "type": "object" @@ -9253,10 +9306,18 @@ "type": "string" }, "entityType": { - "$ref": "#/components/schemas/PathEntityType" + "allOf": [ + { + "$ref": "#/components/schemas/PathEntityType" + } + ] }, "pathType": { - "$ref": "#/components/schemas/PathType" + "allOf": [ + { + "$ref": "#/components/schemas/PathType" + } + ] }, "pathValue": { "type": "string" @@ -9318,7 +9379,11 @@ "JobCommandDto": { "properties": { "command": { - "$ref": "#/components/schemas/JobCommand" + "allOf": [ + { + "$ref": "#/components/schemas/JobCommand" + } + ] }, "force": { "type": "boolean" @@ -9363,7 +9428,11 @@ "JobCreateDto": { "properties": { "name": { - "$ref": "#/components/schemas/ManualJobName" + "allOf": [ + { + "$ref": "#/components/schemas/ManualJobName" + } + ] } }, "required": [ @@ -9697,7 +9766,11 @@ "type": "string" }, "type": { - "$ref": "#/components/schemas/MemoryType" + "allOf": [ + { + "$ref": "#/components/schemas/MemoryType" + } + ] } }, "required": [ @@ -9762,7 +9835,11 @@ "type": "string" }, "type": { - "$ref": "#/components/schemas/MemoryType" + "allOf": [ + { + "$ref": "#/components/schemas/MemoryType" + } + ] }, "updatedAt": { "format": "date-time", @@ -9891,7 +9968,11 @@ "type": "string" }, "order": { - "$ref": "#/components/schemas/AssetOrder" + "allOf": [ + { + "$ref": "#/components/schemas/AssetOrder" + } + ] }, "originalFileName": { "type": "string" @@ -9942,7 +10023,11 @@ "type": "string" }, "type": { - "$ref": "#/components/schemas/AssetTypeEnum" + "allOf": [ + { + "$ref": "#/components/schemas/AssetTypeEnum" + } + ] }, "updatedAfter": { "format": "date-time", @@ -10026,7 +10111,11 @@ "PartnerResponseDto": { "properties": { "avatarColor": { - "$ref": "#/components/schemas/UserAvatarColor" + "allOf": [ + { + "$ref": "#/components/schemas/UserAvatarColor" + } + ] }, "email": { "type": "string" @@ -10544,7 +10633,11 @@ "type": "string" }, "type": { - "$ref": "#/components/schemas/AssetTypeEnum" + "allOf": [ + { + "$ref": "#/components/schemas/AssetTypeEnum" + } + ] }, "updatedAfter": { "format": "date-time", @@ -11212,7 +11305,11 @@ "type": "boolean" }, "type": { - "$ref": "#/components/schemas/SharedLinkType" + "allOf": [ + { + "$ref": "#/components/schemas/SharedLinkType" + } + ] } }, "required": [ @@ -11297,7 +11394,11 @@ "type": "string" }, "type": { - "$ref": "#/components/schemas/SharedLinkType" + "allOf": [ + { + "$ref": "#/components/schemas/SharedLinkType" + } + ] }, "userId": { "type": "string" @@ -11447,7 +11548,11 @@ "type": "string" }, "type": { - "$ref": "#/components/schemas/AssetTypeEnum" + "allOf": [ + { + "$ref": "#/components/schemas/AssetTypeEnum" + } + ] }, "updatedAfter": { "format": "date-time", @@ -11629,7 +11734,11 @@ "SystemConfigFFmpegDto": { "properties": { "accel": { - "$ref": "#/components/schemas/TranscodeHWAccel" + "allOf": [ + { + "$ref": "#/components/schemas/TranscodeHWAccel" + } + ] }, "accelDecode": { "type": "boolean" @@ -11658,7 +11767,11 @@ "type": "integer" }, "cqMode": { - "$ref": "#/components/schemas/CQMode" + "allOf": [ + { + "$ref": "#/components/schemas/CQMode" + } + ] }, "crf": { "maximum": 51, @@ -11684,13 +11797,21 @@ "type": "integer" }, "targetAudioCodec": { - "$ref": "#/components/schemas/AudioCodec" + "allOf": [ + { + "$ref": "#/components/schemas/AudioCodec" + } + ] }, "targetResolution": { "type": "string" }, "targetVideoCodec": { - "$ref": "#/components/schemas/VideoCodec" + "allOf": [ + { + "$ref": "#/components/schemas/VideoCodec" + } + ] }, "temporalAQ": { "type": "boolean" @@ -11700,10 +11821,18 @@ "type": "integer" }, "tonemap": { - "$ref": "#/components/schemas/ToneMapping" + "allOf": [ + { + "$ref": "#/components/schemas/ToneMapping" + } + ] }, "transcode": { - "$ref": "#/components/schemas/TranscodePolicy" + "allOf": [ + { + "$ref": "#/components/schemas/TranscodePolicy" + } + ] }, "twoPass": { "type": "boolean" @@ -11748,7 +11877,11 @@ "SystemConfigGeneratedImageDto": { "properties": { "format": { - "$ref": "#/components/schemas/ImageFormat" + "allOf": [ + { + "$ref": "#/components/schemas/ImageFormat" + } + ] }, "quality": { "maximum": 100, @@ -11770,7 +11903,11 @@ "SystemConfigImageDto": { "properties": { "colorspace": { - "$ref": "#/components/schemas/Colorspace" + "allOf": [ + { + "$ref": "#/components/schemas/Colorspace" + } + ] }, "extractEmbedded": { "type": "boolean" @@ -11888,7 +12025,11 @@ "type": "boolean" }, "level": { - "$ref": "#/components/schemas/LogLevel" + "allOf": [ + { + "$ref": "#/components/schemas/LogLevel" + } + ] } }, "required": [ @@ -12559,7 +12700,11 @@ "type": "boolean" }, "order": { - "$ref": "#/components/schemas/AssetOrder" + "allOf": [ + { + "$ref": "#/components/schemas/AssetOrder" + } + ] } }, "type": "object" @@ -12567,7 +12712,11 @@ "UpdateAlbumUserDto": { "properties": { "role": { - "$ref": "#/components/schemas/AlbumUserRole" + "allOf": [ + { + "$ref": "#/components/schemas/AlbumUserRole" + } + ] } }, "required": [ @@ -12734,7 +12883,11 @@ "UserAdminResponseDto": { "properties": { "avatarColor": { - "$ref": "#/components/schemas/UserAvatarColor" + "allOf": [ + { + "$ref": "#/components/schemas/UserAvatarColor" + } + ] }, "createdAt": { "format": "date-time", @@ -12789,7 +12942,11 @@ "type": "boolean" }, "status": { - "$ref": "#/components/schemas/UserStatus" + "allOf": [ + { + "$ref": "#/components/schemas/UserStatus" + } + ] }, "storageLabel": { "nullable": true, @@ -12962,7 +13119,11 @@ "UserResponseDto": { "properties": { "avatarColor": { - "$ref": "#/components/schemas/UserAvatarColor" + "allOf": [ + { + "$ref": "#/components/schemas/UserAvatarColor" + } + ] }, "email": { "type": "string" diff --git a/server/src/controllers/library.controller.ts b/server/src/controllers/library.controller.ts index adf0f6c106240..53691e61591b7 100644 --- a/server/src/controllers/library.controller.ts +++ b/server/src/controllers/library.controller.ts @@ -3,7 +3,6 @@ import { ApiTags } from '@nestjs/swagger'; import { CreateLibraryDto, LibraryResponseDto, - LibraryStatsResponseDto, UpdateLibraryDto, ValidateLibraryDto, ValidateLibraryResponseDto, diff --git a/server/src/interfaces/library.interface.ts b/server/src/interfaces/library.interface.ts index 803cf1bc4ed31..b5585734a1345 100644 --- a/server/src/interfaces/library.interface.ts +++ b/server/src/interfaces/library.interface.ts @@ -1,4 +1,3 @@ -import { ADDED_IN_PREFIX } from 'src/constants'; import { LibraryStatsResponseDto } from 'src/dtos/library.dto'; import { LibraryEntity } from 'src/entities/library.entity'; diff --git a/server/src/services/library.service.ts b/server/src/services/library.service.ts index cab6494b04c63..0e70803d19a93 100644 --- a/server/src/services/library.service.ts +++ b/server/src/services/library.service.ts @@ -7,7 +7,6 @@ import { OnEvent, OnJob } from 'src/decorators'; import { CreateLibraryDto, LibraryResponseDto, - LibraryStatsResponseDto, mapLibrary, UpdateLibraryDto, ValidateLibraryDto, @@ -15,7 +14,6 @@ import { ValidateLibraryResponseDto, } from 'src/dtos/library.dto'; import { AssetEntity } from 'src/entities/asset.entity'; -import { LibraryEntity } from 'src/entities/library.entity'; import { AssetStatus, AssetType, ImmichWorker } from 'src/enum'; import { AssetCreate } from 'src/interfaces/asset.interface'; import { DatabaseLock } from 'src/interfaces/database.interface'; @@ -358,8 +356,8 @@ export class LibraryService extends BaseService { const datePlaceholder = new Date('1900-01-01'); return { - ownerId: ownerId, - libraryId: libraryId, + ownerId, + libraryId, checksum: this.cryptoRepository.hashSha1(`path:${assetPath}`), originalPath: assetPath, @@ -442,16 +440,18 @@ export class LibraryService extends BaseService { for (const asset of assets) { const action = await this.handleSyncAsset(asset); switch (action) { - case AssetSyncResult.OFFLINE: + case AssetSyncResult.OFFLINE: { assetIdsToOffline.push(asset.id); break; - case AssetSyncResult.UPDATE: + } + case AssetSyncResult.UPDATE: { assetIdsToUpdate.push(asset.id); break; + } } } - if (assetIdsToOffline.length) { + if (assetIdsToOffline.length > 0) { await this.assetRepository.updateAll(assetIdsToOffline, { isOffline: true, status: AssetStatus.TRASHED, @@ -459,7 +459,7 @@ export class LibraryService extends BaseService { }); } - if (assetIdsToUpdate.length) { + if (assetIdsToUpdate.length > 0) { //TODO: When we have asset status, we need to leave deletedAt as is when status is trashed await this.assetRepository.updateAll(assetIdsToUpdate, { isOffline: false, From bc291b4d36e89ca6e4c4e3dce144a4549bbc9151 Mon Sep 17 00:00:00 2001 From: Jonathan Jogenfors Date: Thu, 9 Jan 2025 14:31:43 +0100 Subject: [PATCH 07/10] wip --- e2e/src/api/specs/library.e2e-spec.ts | 4 + e2e/src/utils.ts | 6 +- server/src/dtos/album.dto.ts | 4 +- server/src/dtos/asset-response.dto.ts | 6 +- server/src/entities/asset.entity.ts | 12 +- server/src/interfaces/job.interface.ts | 9 +- server/src/interfaces/library.interface.ts | 1 + .../1736718596137-nullable-dates.ts | 18 +++ server/src/repositories/asset.repository.ts | 11 +- server/src/services/job.service.ts | 2 +- server/src/services/library.service.ts | 146 +++++++++++++----- server/src/services/metadata.service.ts | 54 +++++-- .../src/services/storage-template.service.ts | 2 +- 13 files changed, 200 insertions(+), 75 deletions(-) create mode 100644 server/src/migrations/1736718596137-nullable-dates.ts diff --git a/e2e/src/api/specs/library.e2e-spec.ts b/e2e/src/api/specs/library.e2e-spec.ts index e2e69529fbb9d..fa85fd9d09668 100644 --- a/e2e/src/api/specs/library.e2e-spec.ts +++ b/e2e/src/api/specs/library.e2e-spec.ts @@ -550,6 +550,8 @@ describe('/libraries', () => { await scan(admin.accessToken, library.id); await utils.waitForQueueFinish(admin.accessToken, 'library'); + await utils.waitForQueueFinish(admin.accessToken, 'sidecar'); + await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction'); cpSync(`${testAssetDir}/albums/nature/tanners_ridge.jpg`, `${testAssetDir}/temp/reimport/asset.jpg`); await utimes(`${testAssetDir}/temp/reimport/asset.jpg`, 447_775_200_000); @@ -575,6 +577,8 @@ describe('/libraries', () => { expect(asset).toEqual( expect.objectContaining({ originalFileName: 'asset.jpg', + + // If the exif info contains the D750 model it means the asset was reimported which is not desired exifInfo: expect.not.objectContaining({ model: 'NIKON D750', }), diff --git a/e2e/src/utils.ts b/e2e/src/utils.ts index b00c3c0b6d30d..7b80ba49aab28 100644 --- a/e2e/src/utils.ts +++ b/e2e/src/utils.ts @@ -454,12 +454,12 @@ export const utils = { createLibrary: (accessToken: string, dto: CreateLibraryDto) => createLibrary({ createLibraryDto: dto }, { headers: asBearerAuth(accessToken) }), - updateLibrary: (accessToken: string, id: string, dto: UpdateLibraryDto) => - updateLibrary({ id, updateLibraryDto: dto }, { headers: asBearerAuth(accessToken) }), - validateLibrary: (accessToken: string, id: string, dto: ValidateLibraryDto) => validate({ id, validateLibraryDto: dto }, { headers: asBearerAuth(accessToken) }), + updateLibrary: (accessToken: string, id: string, dto: UpdateLibraryDto) => + updateLibrary({ id, updateLibraryDto: dto }, { headers: asBearerAuth(accessToken) }), + createPartner: (accessToken: string, id: string) => createPartner({ id }, { headers: asBearerAuth(accessToken) }), updateMyPreferences: (accessToken: string, userPreferencesUpdateDto: UserPreferencesUpdateDto) => diff --git a/server/src/dtos/album.dto.ts b/server/src/dtos/album.dto.ts index 76f4fdfc98f4a..4fdba1875dc58 100644 --- a/server/src/dtos/album.dto.ts +++ b/server/src/dtos/album.dto.ts @@ -165,8 +165,8 @@ export const mapAlbum = (entity: AlbumEntity, withAssets: boolean, auth?: AuthDt const hasSharedLink = entity.sharedLinks?.length > 0; const hasSharedUser = sharedUsers.length > 0; - let startDate = getAssetDateTime(assets.at(0)); - let endDate = getAssetDateTime(assets.at(-1)); + let startDate = getAssetDateTime(assets.at(0)) ?? undefined; + let endDate = getAssetDateTime(assets.at(-1)) ?? undefined; // Swap dates if start date is greater than end date. if (startDate && endDate && startDate > endDate) { [startDate, endDate] = [endDate, startDate]; diff --git a/server/src/dtos/asset-response.dto.ts b/server/src/dtos/asset-response.dto.ts index a255ac103b10e..2f0d4da649068 100644 --- a/server/src/dtos/asset-response.dto.ts +++ b/server/src/dtos/asset-response.dto.ts @@ -21,7 +21,7 @@ export class SanitizedAssetResponseDto { type!: AssetType; thumbhash!: string | null; originalMimeType?: string; - localDateTime!: Date; + localDateTime!: Date | null; duration!: string; livePhotoVideoId?: string | null; hasMetadata!: boolean; @@ -36,8 +36,8 @@ export class AssetResponseDto extends SanitizedAssetResponseDto { libraryId?: string | null; originalPath!: string; originalFileName!: string; - fileCreatedAt!: Date; - fileModifiedAt!: Date; + fileCreatedAt!: Date | null; + fileModifiedAt!: Date | null; updatedAt!: Date; isFavorite!: boolean; isArchived!: boolean; diff --git a/server/src/entities/asset.entity.ts b/server/src/entities/asset.entity.ts index f9e5c5e9813d2..adee84fd306be 100644 --- a/server/src/entities/asset.entity.ts +++ b/server/src/entities/asset.entity.ts @@ -94,14 +94,14 @@ export class AssetEntity { deletedAt!: Date | null; @Index('idx_asset_file_created_at') - @Column({ type: 'timestamptz' }) - fileCreatedAt!: Date; + @Column({ type: 'timestamptz', nullable: true }) + fileCreatedAt!: Date | null; - @Column({ type: 'timestamptz' }) - localDateTime!: Date; + @Column({ type: 'timestamptz', nullable: true }) + localDateTime!: Date | null; - @Column({ type: 'timestamptz' }) - fileModifiedAt!: Date; + @Column({ type: 'timestamptz', nullable: true }) + fileModifiedAt!: Date | null; @Column({ type: 'boolean', default: false }) isFavorite!: boolean; diff --git a/server/src/interfaces/job.interface.ts b/server/src/interfaces/job.interface.ts index 424691f087e09..5636621efd8a8 100644 --- a/server/src/interfaces/job.interface.ts +++ b/server/src/interfaces/job.interface.ts @@ -1,4 +1,5 @@ import { ClassConstructor } from 'class-transformer'; +import { LibraryEntity } from 'src/entities/library.entity'; import { EmailImageAttachment } from 'src/interfaces/notification.interface'; export enum QueueName { @@ -135,7 +136,7 @@ export interface IDelayedJob extends IBaseJob { export interface IEntityJob extends IBaseJob { id: string; - source?: 'upload' | 'library-import' | 'sidecar-write' | 'copy'; + source?: 'upload' | 'sidecar-write' | 'copy'; notify?: boolean; } @@ -147,11 +148,15 @@ export interface ILibraryFileJob { libraryId: string; ownerId: string; assetPaths: string[]; + progressCounter?: number; + totalAssets?: number; } export interface ILibraryBulkIdsJob { - libraryId: string; + library: LibraryEntity; assetIds: string[]; + progressCounter?: number; + totalAssets?: number; } export interface IBulkEntityJob { diff --git a/server/src/interfaces/library.interface.ts b/server/src/interfaces/library.interface.ts index b5585734a1345..a2052cd2247b0 100644 --- a/server/src/interfaces/library.interface.ts +++ b/server/src/interfaces/library.interface.ts @@ -7,6 +7,7 @@ export enum AssetSyncResult { DO_NOTHING, UPDATE, OFFLINE, + ONLINE, } export interface ILibraryRepository { diff --git a/server/src/migrations/1736718596137-nullable-dates.ts b/server/src/migrations/1736718596137-nullable-dates.ts new file mode 100644 index 0000000000000..4c949de531d13 --- /dev/null +++ b/server/src/migrations/1736718596137-nullable-dates.ts @@ -0,0 +1,18 @@ +import { MigrationInterface, QueryRunner } from "typeorm"; + +export class NullableDates1736718596137 implements MigrationInterface { + name = 'NullableDates1736718596137' + + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "assets" ALTER COLUMN "fileCreatedAt" DROP NOT NULL`); + await queryRunner.query(`ALTER TABLE "assets" ALTER COLUMN "localDateTime" DROP NOT NULL`); + await queryRunner.query(`ALTER TABLE "assets" ALTER COLUMN "fileModifiedAt" DROP NOT NULL`); + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "assets" ALTER COLUMN "fileModifiedAt" SET NOT NULL`); + await queryRunner.query(`ALTER TABLE "assets" ALTER COLUMN "localDateTime" SET NOT NULL`); + await queryRunner.query(`ALTER TABLE "assets" ALTER COLUMN "fileCreatedAt" SET NOT NULL`); + } + +} diff --git a/server/src/repositories/asset.repository.ts b/server/src/repositories/asset.repository.ts index cc01d0c9bea49..9be4dc70132e6 100644 --- a/server/src/repositories/asset.repository.ts +++ b/server/src/repositories/asset.repository.ts @@ -91,6 +91,7 @@ export class AssetRepository implements IAssetRepository { `entity.ownerId IN (:...ownerIds) AND entity.isVisible = true AND entity.isArchived = false + AND entity.localDateTime IS NOT NULL AND EXTRACT(DAY FROM entity.localDateTime AT TIME ZONE 'UTC') = :day AND EXTRACT(MONTH FROM entity.localDateTime AT TIME ZONE 'UTC') = :month`, { @@ -111,11 +112,13 @@ export class AssetRepository implements IAssetRepository { const groups: Record = {}; const currentYear = new Date().getFullYear(); for (const asset of assets) { - const yearsAgo = currentYear - asset.localDateTime.getFullYear(); - if (!groups[yearsAgo]) { - groups[yearsAgo] = { yearsAgo, assets: [] }; + if (asset.localDateTime) { + const yearsAgo = currentYear - asset.localDateTime.getFullYear(); + if (!groups[yearsAgo]) { + groups[yearsAgo] = { yearsAgo, assets: [] }; + } + groups[yearsAgo].assets.push(asset); } - groups[yearsAgo].assets.push(asset); } return Object.values(groups); diff --git a/server/src/services/job.service.ts b/server/src/services/job.service.ts index a9a430858e488..2faed0a51666a 100644 --- a/server/src/services/job.service.ts +++ b/server/src/services/job.service.ts @@ -266,7 +266,7 @@ export class JobService extends BaseService { } case JobName.GENERATE_THUMBNAILS: { - if (!item.data.notify && item.data.source !== 'upload' && item.data.source !== 'library-import') { + if (!item.data.notify && item.data.source !== 'upload') { break; } diff --git a/server/src/services/library.service.ts b/server/src/services/library.service.ts index 964cb5983c264..e13c11a3e5353 100644 --- a/server/src/services/library.service.ts +++ b/server/src/services/library.service.ts @@ -14,6 +14,7 @@ import { ValidateLibraryResponseDto, } from 'src/dtos/library.dto'; import { AssetEntity } from 'src/entities/asset.entity'; +import { LibraryEntity } from 'src/entities/library.entity'; import { AssetStatus, AssetType, ImmichWorker } from 'src/enum'; import { AssetCreate } from 'src/interfaces/asset.interface'; import { DatabaseLock } from 'src/interfaces/database.interface'; @@ -231,7 +232,15 @@ export class LibraryService extends BaseService { assetIds.push(...batchIds); } - this.logger.log(`Imported ${assetIds.length} file(s) into library ${job.libraryId}`); + let progressMessage = ''; + + if (job.progressCounter && job.totalAssets) { + progressMessage = `(${job.progressCounter} of ${job.totalAssets}) `; + } else { + progressMessage = `(${job.progressCounter} done so far) `; + } + + this.logger.log(`Imported ${assetIds.length} ${progressMessage}file(s) into library ${job.libraryId}`); await this.queuePostSyncJobs(assetIds); @@ -352,21 +361,19 @@ export class LibraryService extends BaseService { private processEntity(filePath: string, ownerId: string, libraryId: string): AssetCreate { const assetPath = path.normalize(filePath); - // This date will be set until metadata extraction runs - const datePlaceholder = new Date('1900-01-01'); - return { ownerId, libraryId, checksum: this.cryptoRepository.hashSha1(`path:${assetPath}`), originalPath: assetPath, + // These dates are placeholders and will be read from disk during metadata extraction + fileCreatedAt: null, + fileModifiedAt: null, + localDateTime: null, // TODO: device asset id is deprecated, remove it deviceAssetId: `${basename(assetPath)}`.replaceAll(/\s+/g, ''), deviceId: 'Library Import', - fileCreatedAt: datePlaceholder, - fileModifiedAt: datePlaceholder, - localDateTime: datePlaceholder, type: mimeTypes.isVideo(assetPath) ? AssetType.VIDEO : AssetType.IMAGE, originalFileName: parse(assetPath).base, isExternal: true, @@ -380,8 +387,7 @@ export class LibraryService extends BaseService { await this.jobRepository.queueAll( assetIds.map((assetId) => ({ name: JobName.SIDECAR_DISCOVERY, - data: { id: assetId }, - source: 'upload', + data: { id: assetId, source: 'upload' }, })), ); } @@ -434,17 +440,22 @@ export class LibraryService extends BaseService { const assets = await this.assetRepository.getByIds(job.assetIds); const assetIdsToOffline: string[] = []; + const assetIdsToOnline: string[] = []; const assetIdsToUpdate: string[] = []; - this.logger.debug(`Checking batch of ${assets.length} existing asset(s) in library ${job.libraryId}`); + this.logger.debug(`Checking batch of ${assets.length} existing asset(s) in library ${job.library.id}`); for (const asset of assets) { - const action = await this.handleSyncAsset(asset); + const action = await this.checkExistingAsset(asset, job.library); switch (action) { case AssetSyncResult.OFFLINE: { assetIdsToOffline.push(asset.id); break; } + case AssetSyncResult.ONLINE: { + assetIdsToOnline.push(asset.id); + break; + } case AssetSyncResult.UPDATE: { assetIdsToUpdate.push(asset.id); break; @@ -452,39 +463,75 @@ export class LibraryService extends BaseService { } } + let progressMessage = ''; + if (assetIdsToOffline.length > 0) { await this.assetRepository.updateAll(assetIdsToOffline, { isOffline: true, status: AssetStatus.TRASHED, deletedAt: new Date(), }); + + progressMessage += `${assetIdsToOffline.length} offlined`; } - if (assetIdsToUpdate.length > 0) { + if (assetIdsToOnline.length > 0) { //TODO: When we have asset status, we need to leave deletedAt as is when status is trashed - await this.assetRepository.updateAll(assetIdsToUpdate, { + await this.assetRepository.updateAll(assetIdsToOnline, { isOffline: false, status: AssetStatus.ACTIVE, deletedAt: null, }); + await this.queuePostSyncJobs(assetIdsToOnline); + + if (progressMessage !== '') { + progressMessage + ', '; + } + + progressMessage += `${assetIdsToOnline.length} onlined`; + } + + if (assetIdsToUpdate.length > 0) { + //TODO: When we have asset status, we need to leave deletedAt as is when status is trashed await this.queuePostSyncJobs(assetIdsToUpdate); + + if (progressMessage !== '') { + progressMessage + ', '; + } + + progressMessage += `${assetIdsToUpdate.length} updated`; + } + + const remainingCount = assets.length - assetIdsToOffline.length - assetIdsToUpdate.length - assetIdsToOnline.length; + + if (remainingCount) { + if (progressMessage !== '') { + progressMessage + ', '; + } + + progressMessage += `${remainingCount} unchanged`; } - const remainingCount = assets.length - assetIdsToOffline.length - assetIdsToUpdate.length; + let cumulativeProgressMessage = ''; + + if (job.progressCounter && job.totalAssets) { + const cumulativePercentage = ((100 * job.progressCounter) / job.totalAssets).toFixed(1); + + cumulativeProgressMessage = `(Total progress: ${job.progressCounter} of ${job.totalAssets}, ${cumulativePercentage} %) `; + } this.logger.log( - `Checked existing asset(s): ${assetIdsToOffline.length} offlined, ${assetIdsToUpdate.length} updated, ${remainingCount} unchanged of batch of ${assets.length} in library ${job.libraryId}.`, + `Checked existing asset(s): ${progressMessage} of current batch of ${assets.length} ${cumulativeProgressMessage}in library ${job.library.id}.`, ); return JobStatus.SUCCESS; } - private async checkOfflineAsset(asset: AssetEntity) { + private async checkOfflineAsset(asset: AssetEntity, library: LibraryEntity): Promise { if (!asset.libraryId) { return false; } - const library = await this.libraryRepository.get(asset.libraryId); if (!library) { return false; } @@ -502,7 +549,7 @@ export class LibraryService extends BaseService { return true; } - private async handleSyncAsset(asset: AssetEntity): Promise { + private async checkExistingAsset(asset: AssetEntity, library: LibraryEntity): Promise { if (!asset) { return AssetSyncResult.DO_NOTHING; } @@ -522,16 +569,31 @@ export class LibraryService extends BaseService { } const mtime = stat.mtime; - const isAssetModified = mtime.toISOString() !== asset.fileModifiedAt.toISOString(); + const isAssetTimeUpdated = asset.fileModifiedAt ? mtime.toISOString() !== asset.fileModifiedAt.toISOString() : true; + let shouldAssetGoOnline = false; if (asset.isOffline && asset.status != AssetStatus.DELETED) { // Only perform the expensive check if the asset is offline - shouldAssetGoOnline = await this.checkOfflineAsset(asset); + + // TODO: give more feedback on why asset was onlined + shouldAssetGoOnline = await this.checkOfflineAsset(asset, library); + + if (shouldAssetGoOnline) { + this.logger.debug(`Asset is back online: ${asset.originalPath}`); + + return AssetSyncResult.ONLINE; + } else { + this.logger.debug(`Asset is still offline: ${asset.originalPath}`); + + return AssetSyncResult.DO_NOTHING; + } } - if (shouldAssetGoOnline || isAssetModified) { - this.logger.debug(`Asset was offline or modified, updating asset record ${asset.originalPath}`); + if (isAssetTimeUpdated) { + this.logger.verbose( + `Asset ${asset.originalPath} modification time changed from ${asset.fileModifiedAt?.toISOString()} to ${mtime.toISOString()}, queuing re-import`, + ); return AssetSyncResult.UPDATE; } @@ -566,6 +628,8 @@ export class LibraryService extends BaseService { return JobStatus.SKIPPED; } + let assetsOnDiskCount = 0; + const pathsOnDisk = this.storageRepository.walk({ pathsToCrawl: validImportPaths, includeHidden: false, @@ -576,27 +640,31 @@ export class LibraryService extends BaseService { let importCount = 0; let crawlCount = 0; - this.logger.log(`Starting crawl of ${validImportPaths.length} path(s) for library ${library.id}...`); + this.logger.log(`Starting disk crawl of ${validImportPaths.length} import path(s) for library ${library.id}...`); for await (const pathBatch of pathsOnDisk) { crawlCount += pathBatch.length; - this.logger.debug( - `Crawled ${pathBatch.length} file(s) for library ${library.id}, in total ${crawlCount} file(s) crawled so far`, - ); const newPaths = await this.assetRepository.getNewPaths(library.id, pathBatch); + if (newPaths.length > 0) { importCount += newPaths.length; await this.jobRepository.queue({ name: JobName.LIBRARY_SYNC_FILES, - data: { libraryId: library.id, ownerId: library.ownerId, assetPaths: newPaths }, + data: { + libraryId: library.id, + ownerId: library.ownerId, + assetPaths: newPaths, + progressCounter: crawlCount, + totalAssets: assetsOnDiskCount, + }, }); this.logger.log( - `Crawled ${crawlCount} file(s) so far: ${newPaths.length} of current batch queued for import for ${library.id}...`, + `Crawled ${crawlCount} file(s) so far: ${newPaths.length} of current batch of ${pathBatch.length} will be imported to library ${library.id}...`, ); } else { this.logger.log( - `Crawled ${crawlCount} file(s) so far: ${pathBatch.length} of current batch already in library ${library.id}...`, + `Crawled ${crawlCount} file(s) so far: All ${pathBatch.length} of current batch already in library ${library.id}...`, ); } } @@ -633,7 +701,7 @@ export class LibraryService extends BaseService { } this.logger.log( - `${assetCount} asset(s) in library ${library.id} will be checked against import paths and exclusion patterns...`, + `Checking ${assetCount} asset(s) against import paths and exclusion patterns in library ${library.id}...`, ); const offlineResult = await this.assetRepository.updateOffline(library); @@ -646,17 +714,15 @@ export class LibraryService extends BaseService { if (affectedAssetCount === assetCount) { this.logger.log( - `All ${assetCount} asset(s) in ${library.id} are outside of import paths and/or match an exclusion pattern, marked as offline`, + `All ${assetCount} asset(s) were offlined due to import paths and/or exclusion pattern(s) in ${library.id}`, ); return JobStatus.SUCCESS; - } else if (affectedAssetCount !== assetCount && affectedAssetCount > 0) { - this.logger.log( - `${offlineResult.affected} asset(s) out of ${assetCount} were marked offline due to import paths and/or exclusion patterns for library ${library.id}`, - ); + } else if (affectedAssetCount == 0) { + this.logger.log(`No assets were offlined due to import paths and/or exclusion pattern(s) in ${library.id} `); } else { this.logger.log( - `All ${assetCount} asset(s) in library ${library.id} were in an import path and none matched an exclusion pattern`, + `${offlineResult.affected} asset(s) out of ${assetCount} were offlined due to import paths and/or exclusion pattern(s) in library ${library.id}`, ); } @@ -673,13 +739,17 @@ export class LibraryService extends BaseService { await this.jobRepository.queue({ name: JobName.LIBRARY_SYNC_ASSETS, data: { - libraryId: library.id, + library, assetIds: assets.map((asset) => asset.id), + progressCounter: currentAssetCount, + totalAssets: assetCount, }, }); + const completePercentage = ((100 * currentAssetCount) / assetCount).toFixed(1); + this.logger.log( - `Queued check of ${currentAssetCount} of ${assetCount} existing asset(s) so far in library ${library.id}`, + `Queued check of ${currentAssetCount} of ${assetCount} (${completePercentage} %) existing asset(s) so far in library ${library.id}`, ); } diff --git a/server/src/services/metadata.service.ts b/server/src/services/metadata.service.ts index ea6a27bdea985..244b6acc54c9c 100644 --- a/server/src/services/metadata.service.ts +++ b/server/src/services/metadata.service.ts @@ -3,6 +3,7 @@ import { ContainerDirectoryItem, ExifDateTime, Maybe, Tags } from 'exiftool-vend import { firstDateTime } from 'exiftool-vendored/dist/FirstDateTime'; import _ from 'lodash'; import { Duration } from 'luxon'; +import { file } from 'mock-fs/lib/filesystem'; import { constants } from 'node:fs/promises'; import path from 'node:path'; import { SystemConfig } from 'src/config'; @@ -148,24 +149,20 @@ export class MetadataService extends BaseService { } @OnJob({ name: JobName.METADATA_EXTRACTION, queue: QueueName.METADATA_EXTRACTION }) - async handleMetadataExtraction({ id, source }: JobOf): Promise { + async handleMetadataExtraction({ id }: JobOf): Promise { const { metadata, reverseGeocoding } = await this.getConfig({ withCache: true }); const [asset] = await this.assetRepository.getByIds([id], { faces: { person: false } }); if (!asset) { return JobStatus.FAILED; } - if (source === 'library-import') { - await this.processSidecar(id, false); - } - const stats = await this.storageRepository.stat(asset.originalPath); const exifTags = await this.getExifTags(asset); this.logger.verbose('Exif Tags', exifTags); - const { dateTimeOriginal, localDateTime, timeZone, modifyDate } = this.getDates(asset, exifTags); + const dates = await this.getDates(asset, exifTags); const { latitude, longitude, country, state, city } = await this.getGeo(exifTags, reverseGeocoding); const { width, height } = this.getImageDimensions(exifTags); @@ -174,9 +171,9 @@ export class MetadataService extends BaseService { assetId: asset.id, // dates - dateTimeOriginal, - modifyDate, - timeZone, + dateTimeOriginal: dates.dateTimeOriginal, + modifyDate: dates.modifyDate, + timeZone: dates.timeZone, // gps latitude, @@ -222,8 +219,9 @@ export class MetadataService extends BaseService { await this.assetRepository.update({ id: asset.id, duration: exifTags.Duration?.toString() ?? null, - localDateTime, + localDateTime: dates.localDateTime, fileCreatedAt: exifData.dateTimeOriginal ?? undefined, + fileModifiedAt: exifData.dateTimeOriginal ?? undefined, }); await this.assetRepository.upsertJobStatus({ @@ -454,7 +452,7 @@ export class MetadataService extends BaseService { } } else { const motionAssetId = this.cryptoRepository.randomUUID(); - const dates = this.getDates(asset, tags); + const dates = await this.getDates(asset, tags); motionAsset = await this.assetRepository.create({ id: motionAssetId, libraryId: asset.libraryId, @@ -572,7 +570,7 @@ export class MetadataService extends BaseService { } } - private getDates(asset: AssetEntity, exifTags: ImmichTags) { + private async getDates(asset: AssetEntity, exifTags: ImmichTags) { const dateTime = firstDateTime(exifTags as Maybe, EXIF_DATE_TAGS); this.logger.verbose(`Asset ${asset.id} date time is ${dateTime}`); @@ -590,25 +588,51 @@ export class MetadataService extends BaseService { this.logger.warn(`Asset ${asset.id} has no time zone information`); } + let fileCreatedAt = asset.fileCreatedAt; + let fileModifiedAt = asset.fileModifiedAt; + + if (!fileCreatedAt || !fileModifiedAt) { + let stat; + + // Throw error if the file does not exist + stat = await this.storageRepository.stat(asset.originalPath); + + if (!fileCreatedAt) { + fileCreatedAt = stat.mtime; + this.logger.debug( + `No valid fileCreatedAt date found for asset ${asset.id}, read file creation date from filesystem: ${fileCreatedAt.toISOString()}`, + ); + } + + if (!fileModifiedAt) { + fileModifiedAt = stat.mtime; + this.logger.debug( + `No valid fileModifiedAt date found for asset ${asset.id}, read file modification date from filesystem: ${fileModifiedAt.toISOString()}`, + ); + } + } + let dateTimeOriginal = dateTime?.toDate(); let localDateTime = dateTime?.toDateTime().setZone('UTC', { keepLocalTime: true }).toJSDate(); if (!localDateTime || !dateTimeOriginal) { this.logger.debug( `No valid date found in exif tags from asset ${asset.id}, falling back to earliest timestamp between file creation and file modification`, ); - const earliestDate = this.earliestDate(asset.fileModifiedAt, asset.fileCreatedAt); + const earliestDate = this.earliestDate(fileModifiedAt, fileCreatedAt); dateTimeOriginal = earliestDate; localDateTime = earliestDate; } - this.logger.verbose(`Asset ${asset.id} has a local time of ${localDateTime.toISOString()}`); + this.logger.verbose(`Asset ${asset.id} has a local time of ${localDateTime?.toISOString()}`); - let modifyDate = asset.fileModifiedAt; + let modifyDate = fileModifiedAt; try { modifyDate = (exifTags.ModifyDate as ExifDateTime)?.toDate() ?? modifyDate; } catch {} return { + fileCreatedAt, + fileModifiedAt, dateTimeOriginal, timeZone, localDateTime, diff --git a/server/src/services/storage-template.service.ts b/server/src/services/storage-template.service.ts index e8e4bd12a5569..0e0b345d7175b 100644 --- a/server/src/services/storage-template.service.ts +++ b/server/src/services/storage-template.service.ts @@ -310,7 +310,7 @@ export class StorageTemplateService extends BaseService { const systemTimeZone = Intl.DateTimeFormat().resolvedOptions().timeZone; const zone = asset.exifInfo?.timeZone || systemTimeZone; - const dt = DateTime.fromJSDate(asset.fileCreatedAt, { zone }); + const dt = DateTime.fromJSDate(asset.fileCreatedAt ?? asset.createdAt, { zone }); for (const token of Object.values(storageTokens).flat()) { substitutions[token] = dt.toFormat(token); From 8bc9b668a0e56a4a250a4e241a40e71fe00adfd2 Mon Sep 17 00:00:00 2001 From: Jonathan Jogenfors Date: Mon, 13 Jan 2025 13:27:25 +0100 Subject: [PATCH 08/10] wip --- e2e/src/api/specs/library.e2e-spec.ts | 83 ++++++++++++++----- mobile/openapi/README.md | 2 +- mobile/openapi/lib/api/libraries_api.dart | 24 +++--- open-api/immich-openapi-specs.json | 33 ++++---- open-api/typescript-sdk/src/fetch-client.ts | 20 ++--- server/src/controllers/library.controller.ts | 4 +- server/src/db.d.ts | 6 +- server/src/dtos/album.dto.ts | 4 +- server/src/dtos/asset-response.dto.ts | 6 +- server/src/entities/asset.entity.ts | 12 +-- .../1736718596137-nullable-dates.ts | 18 ---- server/src/repositories/asset.repository.ts | 2 +- .../src/services/asset-media.service.spec.ts | 4 +- server/src/services/library.service.ts | 39 ++++----- server/src/services/metadata.service.ts | 69 +++++++-------- server/test/fixtures/shared-link.stub.ts | 6 +- .../admin/library-management/+page.svelte | 4 +- 17 files changed, 176 insertions(+), 160 deletions(-) delete mode 100644 server/src/migrations/1736718596137-nullable-dates.ts diff --git a/e2e/src/api/specs/library.e2e-spec.ts b/e2e/src/api/specs/library.e2e-spec.ts index fa85fd9d09668..202d5ea6535ec 100644 --- a/e2e/src/api/specs/library.e2e-spec.ts +++ b/e2e/src/api/specs/library.e2e-spec.ts @@ -716,7 +716,7 @@ describe('/libraries', () => { const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); - expect(assets).toEqual(assetsBefore); + expect(assets.items.map((asset) => asset.id)).toEqual(assetsBefore.items.map((asset) => asset.id)); }); describe('xmp metadata', async () => { @@ -735,12 +735,17 @@ describe('/libraries', () => { await utils.waitForQueueFinish(admin.accessToken, 'sidecar'); await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction'); - const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { + libraryId: library.id, + withExif: true, + }); expect(newAssets.items).toEqual([ expect.objectContaining({ originalFileName: 'glarus.nef', - fileCreatedAt: '2000-09-27T12:35:33.000Z', + exifInfo: expect.objectContaining({ + dateTimeOriginal: '2000-09-27T12:35:33+00:00', + }), }), ]); @@ -761,12 +766,17 @@ describe('/libraries', () => { await utils.waitForQueueFinish(admin.accessToken, 'sidecar'); await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction'); - const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { + libraryId: library.id, + withExif: true, + }); expect(newAssets.items).toEqual([ expect.objectContaining({ originalFileName: 'glarus.nef', - fileCreatedAt: '2000-09-27T12:35:33.000Z', + exifInfo: expect.objectContaining({ + dateTimeOriginal: '2000-09-27T12:35:33+00:00', + }), }), ]); @@ -788,12 +798,17 @@ describe('/libraries', () => { await utils.waitForQueueFinish(admin.accessToken, 'sidecar'); await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction'); - const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { + libraryId: library.id, + withExif: true, + }); expect(newAssets.items).toEqual([ expect.objectContaining({ originalFileName: 'glarus.nef', - fileCreatedAt: '2000-09-27T12:35:33.000Z', + exifInfo: expect.objectContaining({ + dateTimeOriginal: '2000-09-27T12:35:33+00:00', + }), }), ]); @@ -824,12 +839,17 @@ describe('/libraries', () => { await utils.waitForQueueFinish(admin.accessToken, 'sidecar'); await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction'); - const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { + libraryId: library.id, + withExif: true, + }); expect(newAssets.items).toEqual([ expect.objectContaining({ originalFileName: 'glarus.nef', - fileCreatedAt: '2010-09-27T12:35:33.000Z', + exifInfo: expect.objectContaining({ + dateTimeOriginal: '2010-09-27T12:35:33+00:00', + }), }), ]); @@ -858,12 +878,17 @@ describe('/libraries', () => { await utils.waitForQueueFinish(admin.accessToken, 'sidecar'); await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction'); - const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { + libraryId: library.id, + withExif: true, + }); expect(newAssets.items).toEqual([ expect.objectContaining({ originalFileName: 'glarus.nef', - fileCreatedAt: '2000-09-27T12:35:33.000Z', + exifInfo: expect.objectContaining({ + dateTimeOriginal: '2000-09-27T12:35:33+00:00', + }), }), ]); @@ -892,12 +917,17 @@ describe('/libraries', () => { await utils.waitForQueueFinish(admin.accessToken, 'sidecar'); await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction'); - const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { + libraryId: library.id, + withExif: true, + }); expect(newAssets.items).toEqual([ expect.objectContaining({ originalFileName: 'glarus.nef', - fileCreatedAt: '2000-09-27T12:35:33.000Z', + exifInfo: expect.objectContaining({ + dateTimeOriginal: '2000-09-27T12:35:33+00:00', + }), }), ]); @@ -928,12 +958,17 @@ describe('/libraries', () => { await utils.waitForQueueFinish(admin.accessToken, 'sidecar'); await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction'); - const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { + libraryId: library.id, + withExif: true, + }); expect(newAssets.items).toEqual([ expect.objectContaining({ originalFileName: 'glarus.nef', - fileCreatedAt: '2010-09-27T12:35:33.000Z', + exifInfo: expect.objectContaining({ + dateTimeOriginal: '2010-09-27T12:35:33+00:00', + }), }), ]); @@ -963,12 +998,17 @@ describe('/libraries', () => { await utils.waitForQueueFinish(admin.accessToken, 'sidecar'); await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction'); - const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { + libraryId: library.id, + withExif: true, + }); expect(newAssets.items).toEqual([ expect.objectContaining({ originalFileName: 'glarus.nef', - fileCreatedAt: '2010-07-20T17:27:12.000Z', + exifInfo: expect.objectContaining({ + dateTimeOriginal: '2010-07-20T17:27:12+00:00', + }), }), ]); @@ -998,12 +1038,17 @@ describe('/libraries', () => { await utils.waitForQueueFinish(admin.accessToken, 'sidecar'); await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction'); - const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + const { assets: newAssets } = await utils.searchAssets(admin.accessToken, { + libraryId: library.id, + withExif: true, + }); expect(newAssets.items).toEqual([ expect.objectContaining({ originalFileName: 'glarus.nef', - fileCreatedAt: '2010-07-20T17:27:12.000Z', + exifInfo: expect.objectContaining({ + dateTimeOriginal: '2010-07-20T17:27:12+00:00', + }), }), ]); diff --git a/mobile/openapi/README.md b/mobile/openapi/README.md index 867092eef53e6..8190f94c6ab5c 100644 --- a/mobile/openapi/README.md +++ b/mobile/openapi/README.md @@ -130,8 +130,8 @@ Class | Method | HTTP request | Description *LibrariesApi* | [**createLibrary**](doc//LibrariesApi.md#createlibrary) | **POST** /libraries | *LibrariesApi* | [**deleteLibrary**](doc//LibrariesApi.md#deletelibrary) | **DELETE** /libraries/{id} | *LibrariesApi* | [**getAllLibraries**](doc//LibrariesApi.md#getalllibraries) | **GET** /libraries | -*LibrariesApi* | [**getAssetCount**](doc//LibrariesApi.md#getassetcount) | **GET** /libraries/{id}/count | *LibrariesApi* | [**getLibrary**](doc//LibrariesApi.md#getlibrary) | **GET** /libraries/{id} | +*LibrariesApi* | [**getLibraryStatistics**](doc//LibrariesApi.md#getlibrarystatistics) | **GET** /libraries/{id}/statistics | *LibrariesApi* | [**scanLibrary**](doc//LibrariesApi.md#scanlibrary) | **POST** /libraries/{id}/scan | *LibrariesApi* | [**updateLibrary**](doc//LibrariesApi.md#updatelibrary) | **PUT** /libraries/{id} | *LibrariesApi* | [**validate**](doc//LibrariesApi.md#validate) | **POST** /libraries/{id}/validate | diff --git a/mobile/openapi/lib/api/libraries_api.dart b/mobile/openapi/lib/api/libraries_api.dart index 6010b7a9fcf56..9ed89fcff24fa 100644 --- a/mobile/openapi/lib/api/libraries_api.dart +++ b/mobile/openapi/lib/api/libraries_api.dart @@ -147,13 +147,13 @@ class LibrariesApi { return null; } - /// Performs an HTTP 'GET /libraries/{id}/count' operation and returns the [Response]. + /// Performs an HTTP 'GET /libraries/{id}' operation and returns the [Response]. /// Parameters: /// /// * [String] id (required): - Future getAssetCountWithHttpInfo(String id,) async { + Future getLibraryWithHttpInfo(String id,) async { // ignore: prefer_const_declarations - final path = r'/libraries/{id}/count' + final path = r'/libraries/{id}' .replaceAll('{id}', id); // ignore: prefer_final_locals @@ -180,8 +180,8 @@ class LibrariesApi { /// Parameters: /// /// * [String] id (required): - Future getAssetCount(String id,) async { - final response = await getAssetCountWithHttpInfo(id,); + Future getLibrary(String id,) async { + final response = await getLibraryWithHttpInfo(id,); if (response.statusCode >= HttpStatus.badRequest) { throw ApiException(response.statusCode, await _decodeBodyBytes(response)); } @@ -189,19 +189,19 @@ class LibrariesApi { // At the time of writing this, `dart:convert` will throw an "Unexpected end of input" // FormatException when trying to decode an empty string. if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) { - return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'num',) as num; + return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'LibraryResponseDto',) as LibraryResponseDto; } return null; } - /// Performs an HTTP 'GET /libraries/{id}' operation and returns the [Response]. + /// Performs an HTTP 'GET /libraries/{id}/statistics' operation and returns the [Response]. /// Parameters: /// /// * [String] id (required): - Future getLibraryWithHttpInfo(String id,) async { + Future getLibraryStatisticsWithHttpInfo(String id,) async { // ignore: prefer_const_declarations - final path = r'/libraries/{id}' + final path = r'/libraries/{id}/statistics' .replaceAll('{id}', id); // ignore: prefer_final_locals @@ -228,8 +228,8 @@ class LibrariesApi { /// Parameters: /// /// * [String] id (required): - Future getLibrary(String id,) async { - final response = await getLibraryWithHttpInfo(id,); + Future getLibraryStatistics(String id,) async { + final response = await getLibraryStatisticsWithHttpInfo(id,); if (response.statusCode >= HttpStatus.badRequest) { throw ApiException(response.statusCode, await _decodeBodyBytes(response)); } @@ -237,7 +237,7 @@ class LibrariesApi { // At the time of writing this, `dart:convert` will throw an "Unexpected end of input" // FormatException when trying to decode an empty string. if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) { - return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'LibraryResponseDto',) as LibraryResponseDto; + return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'num',) as num; } return null; diff --git a/open-api/immich-openapi-specs.json b/open-api/immich-openapi-specs.json index e49b3e148f987..01de992d8d3c7 100644 --- a/open-api/immich-openapi-specs.json +++ b/open-api/immich-openapi-specs.json @@ -2853,9 +2853,9 @@ ] } }, - "/libraries/{id}/count": { - "get": { - "operationId": "getAssetCount", + "/libraries/{id}/scan": { + "post": { + "operationId": "scanLibrary", "parameters": [ { "name": "id", @@ -2868,14 +2868,7 @@ } ], "responses": { - "200": { - "content": { - "application/json": { - "schema": { - "type": "number" - } - } - }, + "204": { "description": "" } }, @@ -2895,9 +2888,9 @@ ] } }, - "/libraries/{id}/scan": { - "post": { - "operationId": "scanLibrary", + "/libraries/{id}/statistics": { + "get": { + "operationId": "getLibraryStatistics", "parameters": [ { "name": "id", @@ -2910,7 +2903,14 @@ } ], "responses": { - "204": { + "200": { + "content": { + "application/json": { + "schema": { + "type": "number" + } + } + }, "description": "" } }, @@ -8450,12 +8450,10 @@ }, "fileCreatedAt": { "format": "date-time", - "nullable": true, "type": "string" }, "fileModifiedAt": { "format": "date-time", - "nullable": true, "type": "string" }, "hasMetadata": { @@ -8488,7 +8486,6 @@ }, "localDateTime": { "format": "date-time", - "nullable": true, "type": "string" }, "originalFileName": { diff --git a/open-api/typescript-sdk/src/fetch-client.ts b/open-api/typescript-sdk/src/fetch-client.ts index bf8e26b5b878b..42aafac9ceb0c 100644 --- a/open-api/typescript-sdk/src/fetch-client.ts +++ b/open-api/typescript-sdk/src/fetch-client.ts @@ -2093,16 +2093,6 @@ export function updateLibrary({ id, updateLibraryDto }: { body: updateLibraryDto }))); } -export function getAssetCount({ id }: { - id: string; -}, opts?: Oazapfts.RequestOpts) { - return oazapfts.ok(oazapfts.fetchJson<{ - status: 200; - data: number; - }>(`/libraries/${encodeURIComponent(id)}/count`, { - ...opts - })); -} export function scanLibrary({ id }: { id: string; }, opts?: Oazapfts.RequestOpts) { @@ -2111,6 +2101,16 @@ export function scanLibrary({ id }: { method: "POST" })); } +export function getLibraryStatistics({ id }: { + id: string; +}, opts?: Oazapfts.RequestOpts) { + return oazapfts.ok(oazapfts.fetchJson<{ + status: 200; + data: number; + }>(`/libraries/${encodeURIComponent(id)}/statistics`, { + ...opts + })); +} export function validate({ id, validateLibraryDto }: { id: string; validateLibraryDto: ValidateLibraryDto; diff --git a/server/src/controllers/library.controller.ts b/server/src/controllers/library.controller.ts index 53691e61591b7..8bdf4f2e3bd15 100644 --- a/server/src/controllers/library.controller.ts +++ b/server/src/controllers/library.controller.ts @@ -56,9 +56,9 @@ export class LibraryController { return this.service.validate(id, dto); } - @Get(':id/count') + @Get(':id/statistics') @Authenticated({ permission: Permission.LIBRARY_STATISTICS, admin: true }) - getAssetCount(@Param() { id }: UUIDParamDto): Promise { + getLibraryStatistics(@Param() { id }: UUIDParamDto): Promise { return this.service.getAssetCount(id); } diff --git a/server/src/db.d.ts b/server/src/db.d.ts index 636963bfcf84c..a5cab5dab7a06 100644 --- a/server/src/db.d.ts +++ b/server/src/db.d.ts @@ -121,8 +121,8 @@ export interface Assets { duplicateId: string | null; duration: string | null; encodedVideoPath: Generated; - fileCreatedAt: Timestamp | null; - fileModifiedAt: Timestamp | null; + fileCreatedAt: Timestamp; + fileModifiedAt: Timestamp; id: Generated; isArchived: Generated; isExternal: Generated; @@ -131,7 +131,7 @@ export interface Assets { isVisible: Generated; libraryId: string | null; livePhotoVideoId: string | null; - localDateTime: Timestamp | null; + localDateTime: Timestamp; originalFileName: string; originalPath: string; ownerId: string; diff --git a/server/src/dtos/album.dto.ts b/server/src/dtos/album.dto.ts index 4fdba1875dc58..76f4fdfc98f4a 100644 --- a/server/src/dtos/album.dto.ts +++ b/server/src/dtos/album.dto.ts @@ -165,8 +165,8 @@ export const mapAlbum = (entity: AlbumEntity, withAssets: boolean, auth?: AuthDt const hasSharedLink = entity.sharedLinks?.length > 0; const hasSharedUser = sharedUsers.length > 0; - let startDate = getAssetDateTime(assets.at(0)) ?? undefined; - let endDate = getAssetDateTime(assets.at(-1)) ?? undefined; + let startDate = getAssetDateTime(assets.at(0)); + let endDate = getAssetDateTime(assets.at(-1)); // Swap dates if start date is greater than end date. if (startDate && endDate && startDate > endDate) { [startDate, endDate] = [endDate, startDate]; diff --git a/server/src/dtos/asset-response.dto.ts b/server/src/dtos/asset-response.dto.ts index 6f0e557cc236b..0658567912a35 100644 --- a/server/src/dtos/asset-response.dto.ts +++ b/server/src/dtos/asset-response.dto.ts @@ -21,7 +21,7 @@ export class SanitizedAssetResponseDto { type!: AssetType; thumbhash!: string | null; originalMimeType?: string; - localDateTime!: Date | null; + localDateTime!: Date; duration!: string; livePhotoVideoId?: string | null; hasMetadata!: boolean; @@ -36,8 +36,8 @@ export class AssetResponseDto extends SanitizedAssetResponseDto { libraryId?: string | null; originalPath!: string; originalFileName!: string; - fileCreatedAt!: Date | null; - fileModifiedAt!: Date | null; + fileCreatedAt!: Date; + fileModifiedAt!: Date; updatedAt!: Date; isFavorite!: boolean; isArchived!: boolean; diff --git a/server/src/entities/asset.entity.ts b/server/src/entities/asset.entity.ts index 8ba87b55c752a..401f599d6ffbf 100644 --- a/server/src/entities/asset.entity.ts +++ b/server/src/entities/asset.entity.ts @@ -100,14 +100,14 @@ export class AssetEntity { deletedAt!: Date | null; @Index('idx_asset_file_created_at') - @Column({ type: 'timestamptz', nullable: true }) - fileCreatedAt!: Date | null; + @Column({ type: 'timestamptz' }) + fileCreatedAt!: Date; - @Column({ type: 'timestamptz', nullable: true }) - localDateTime!: Date | null; + @Column({ type: 'timestamptz' }) + localDateTime!: Date; - @Column({ type: 'timestamptz', nullable: true }) - fileModifiedAt!: Date | null; + @Column({ type: 'timestamptz' }) + fileModifiedAt!: Date; @Column({ type: 'boolean', default: false }) isFavorite!: boolean; diff --git a/server/src/migrations/1736718596137-nullable-dates.ts b/server/src/migrations/1736718596137-nullable-dates.ts deleted file mode 100644 index 4c949de531d13..0000000000000 --- a/server/src/migrations/1736718596137-nullable-dates.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { MigrationInterface, QueryRunner } from "typeorm"; - -export class NullableDates1736718596137 implements MigrationInterface { - name = 'NullableDates1736718596137' - - public async up(queryRunner: QueryRunner): Promise { - await queryRunner.query(`ALTER TABLE "assets" ALTER COLUMN "fileCreatedAt" DROP NOT NULL`); - await queryRunner.query(`ALTER TABLE "assets" ALTER COLUMN "localDateTime" DROP NOT NULL`); - await queryRunner.query(`ALTER TABLE "assets" ALTER COLUMN "fileModifiedAt" DROP NOT NULL`); - } - - public async down(queryRunner: QueryRunner): Promise { - await queryRunner.query(`ALTER TABLE "assets" ALTER COLUMN "fileModifiedAt" SET NOT NULL`); - await queryRunner.query(`ALTER TABLE "assets" ALTER COLUMN "localDateTime" SET NOT NULL`); - await queryRunner.query(`ALTER TABLE "assets" ALTER COLUMN "fileCreatedAt" SET NOT NULL`); - } - -} diff --git a/server/src/repositories/asset.repository.ts b/server/src/repositories/asset.repository.ts index 23c5aac679671..76fa03c351e19 100644 --- a/server/src/repositories/asset.repository.ts +++ b/server/src/repositories/asset.repository.ts @@ -1,5 +1,5 @@ import { Injectable } from '@nestjs/common'; -import { CompiledQuery, Insertable, Kysely, UpdateResult, Updateable, sql } from 'kysely'; +import { Insertable, Kysely, UpdateResult, Updateable, sql } from 'kysely'; import { isEmpty, isUndefined, omitBy } from 'lodash'; import { InjectKysely } from 'nestjs-kysely'; import { ASSET_FILE_CONFLICT_KEYS, EXIF_CONFLICT_KEYS, JOB_STATUS_CONFLICT_KEYS } from 'src/constants'; diff --git a/server/src/services/asset-media.service.spec.ts b/server/src/services/asset-media.service.spec.ts index 9dcfa3cbd9ce5..09cb11b5c3043 100644 --- a/server/src/services/asset-media.service.spec.ts +++ b/server/src/services/asset-media.service.spec.ts @@ -546,7 +546,7 @@ describe(AssetMediaService.name, () => { files: [ { assetId: assetStub.image.id, - createdAt: assetStub.image.fileCreatedAt, + createdAt: assetStub.image.fileCreatedAt ?? new Date(), id: '42', path: '/path/to/preview', type: AssetFileType.THUMBNAIL, @@ -566,7 +566,7 @@ describe(AssetMediaService.name, () => { files: [ { assetId: assetStub.image.id, - createdAt: assetStub.image.fileCreatedAt, + createdAt: assetStub.image.fileCreatedAt ?? new Date(), id: '42', path: '/path/to/preview.jpg', type: AssetFileType.PREVIEW, diff --git a/server/src/services/library.service.ts b/server/src/services/library.service.ts index 2e317c9a0f8dd..718c185cbd333 100644 --- a/server/src/services/library.service.ts +++ b/server/src/services/library.service.ts @@ -229,11 +229,10 @@ export class LibraryService extends BaseService { let progressMessage = ''; - if (job.progressCounter && job.totalAssets) { - progressMessage = `(${job.progressCounter} of ${job.totalAssets})`; - } else { - progressMessage = `(${job.progressCounter} done so far)`; - } + progressMessage = + job.progressCounter && job.totalAssets + ? `(${job.progressCounter} of ${job.totalAssets})` + : `(${job.progressCounter} done so far)`; this.logger.log(`Imported ${assetIds.length} ${progressMessage} file(s) into library ${job.libraryId}`); @@ -362,10 +361,9 @@ export class LibraryService extends BaseService { checksum: this.cryptoRepository.hashSha1(`path:${assetPath}`), originalPath: assetPath, - // These dates are placeholders and will be read from disk during metadata extraction - fileCreatedAt: null, - fileModifiedAt: null, - localDateTime: null, + fileCreatedAt: new Date(), + fileModifiedAt: new Date(), + localDateTime: new Date(), // TODO: device asset id is deprecated, remove it deviceAssetId: `${basename(assetPath)}`.replaceAll(/\s+/g, ''), deviceId: 'Library Import', @@ -480,8 +478,8 @@ export class LibraryService extends BaseService { }); await this.queuePostSyncJobs(assetIdsToOnline); - if (progressMessage !== '') { - progressMessage + ', '; + if (progressMessage) { + progressMessage += ', '; } progressMessage += `${assetIdsToOnline.length} onlined`; @@ -491,8 +489,8 @@ export class LibraryService extends BaseService { //TODO: When we have asset status, we need to leave deletedAt as is when status is trashed await this.queuePostSyncJobs(assetIdsToUpdate); - if (progressMessage !== '') { - progressMessage + ', '; + if (progressMessage) { + progressMessage += ', '; } progressMessage += `${assetIdsToUpdate.length} updated`; @@ -501,8 +499,8 @@ export class LibraryService extends BaseService { const remainingCount = assets.length - assetIdsToOffline.length - assetIdsToUpdate.length - assetIdsToOnline.length; if (remainingCount) { - if (progressMessage !== '') { - progressMessage + ', '; + if (progressMessage) { + progressMessage += ', '; } progressMessage += `${remainingCount} unchanged`; @@ -523,7 +521,7 @@ export class LibraryService extends BaseService { return JobStatus.SUCCESS; } - private async checkOfflineAsset(asset: AssetEntity, library: LibraryEntity): Promise { + private checkOfflineAsset(asset: AssetEntity, library: LibraryEntity): boolean { if (!asset.libraryId) { return false; } @@ -567,7 +565,7 @@ export class LibraryService extends BaseService { } const mtime = stat.mtime; - const isAssetTimeUpdated = asset.fileModifiedAt ? mtime.toISOString() !== asset.fileModifiedAt.toISOString() : true; + const isAssetTimeUpdated = mtime.toISOString() !== asset.fileModifiedAt.toISOString(); let shouldAssetGoOnline = false; @@ -575,7 +573,7 @@ export class LibraryService extends BaseService { // Only perform the expensive check if the asset is offline // TODO: give more feedback on why asset was onlined - shouldAssetGoOnline = await this.checkOfflineAsset(asset, library); + shouldAssetGoOnline = this.checkOfflineAsset(asset, library); if (shouldAssetGoOnline) { this.logger.debug(`Asset is back online: ${asset.originalPath}`); @@ -590,7 +588,7 @@ export class LibraryService extends BaseService { if (isAssetTimeUpdated) { this.logger.verbose( - `Asset ${asset.originalPath} modification time changed from ${asset.fileModifiedAt?.toISOString()} to ${mtime.toISOString()}, queuing re-import`, + `Asset ${asset.originalPath} modification time changed from ${asset.fileModifiedAt?.toISOString()} to ${mtime.toISOString()}, queuing re-import. Creation time is ${asset.fileCreatedAt?.toISOString()}`, ); return AssetSyncResult.UPDATE; @@ -626,8 +624,6 @@ export class LibraryService extends BaseService { return JobStatus.SKIPPED; } - let assetsOnDiskCount = 0; - const pathsOnDisk = this.storageRepository.walk({ pathsToCrawl: validImportPaths, includeHidden: false, @@ -654,7 +650,6 @@ export class LibraryService extends BaseService { ownerId: library.ownerId, assetPaths: newPaths, progressCounter: crawlCount, - totalAssets: assetsOnDiskCount, }, }); this.logger.log( diff --git a/server/src/services/metadata.service.ts b/server/src/services/metadata.service.ts index 89d1b74e54e2f..338c8c33c5c01 100644 --- a/server/src/services/metadata.service.ts +++ b/server/src/services/metadata.service.ts @@ -4,7 +4,7 @@ import { firstDateTime } from 'exiftool-vendored/dist/FirstDateTime'; import { Insertable } from 'kysely'; import _ from 'lodash'; import { Duration } from 'luxon'; -import { file } from 'mock-fs/lib/filesystem'; +import { Stats } from 'node:fs'; import { constants } from 'node:fs/promises'; import path from 'node:path'; import { SystemConfig } from 'src/config'; @@ -163,18 +163,30 @@ export class MetadataService extends BaseService { this.logger.verbose('Exif Tags', exifTags); - const dates = await this.getDates(asset, exifTags); + const { dateTimeOriginal, localDateTime, timeZone, modifyDate, fileCreatedAt, fileModifiedAt } = this.getDates( + asset, + exifTags, + stats, + ); const { latitude, longitude, country, state, city } = await this.getGeo(exifTags, reverseGeocoding); const { width, height } = this.getImageDimensions(exifTags); + let fileCreatedAtDate = dateTimeOriginal; + let fileModifiedAtDate = modifyDate; + + if (asset.isExternal) { + fileCreatedAtDate = fileCreatedAt; + fileModifiedAtDate = fileModifiedAt; + } + const exifData: Insertable = { assetId: asset.id, // dates - dateTimeOriginal: dates.dateTimeOriginal, - modifyDate: dates.modifyDate, - timeZone: dates.timeZone, + dateTimeOriginal, + modifyDate, + timeZone, // gps latitude, @@ -220,9 +232,9 @@ export class MetadataService extends BaseService { await this.assetRepository.update({ id: asset.id, duration: exifTags.Duration?.toString() ?? null, - localDateTime: dates.localDateTime, - fileCreatedAt: exifData.dateTimeOriginal ?? undefined, - fileModifiedAt: exifData.dateTimeOriginal ?? undefined, + localDateTime, + fileCreatedAt: fileCreatedAtDate, + fileModifiedAt: fileModifiedAtDate, }); await this.assetRepository.upsertJobStatus({ @@ -453,7 +465,7 @@ export class MetadataService extends BaseService { } } else { const motionAssetId = this.cryptoRepository.randomUUID(); - const dates = await this.getDates(asset, tags); + const dates = this.getDates(asset, tags, stat); motionAsset = await this.assetRepository.create({ id: motionAssetId, libraryId: asset.libraryId, @@ -571,7 +583,7 @@ export class MetadataService extends BaseService { } } - private async getDates(asset: AssetEntity, exifTags: ImmichTags) { + private getDates(asset: AssetEntity, exifTags: ImmichTags, stat: Stats) { const dateTime = firstDateTime(exifTags as Maybe, EXIF_DATE_TAGS); this.logger.verbose(`Asset ${asset.id} date time is ${dateTime}`); @@ -592,52 +604,39 @@ export class MetadataService extends BaseService { let fileCreatedAt = asset.fileCreatedAt; let fileModifiedAt = asset.fileModifiedAt; - if (!fileCreatedAt || !fileModifiedAt) { - let stat; - - // Throw error if the file does not exist - stat = await this.storageRepository.stat(asset.originalPath); - - if (!fileCreatedAt) { - fileCreatedAt = stat.mtime; - this.logger.debug( - `No valid fileCreatedAt date found for asset ${asset.id}, read file creation date from filesystem: ${fileCreatedAt.toISOString()}`, - ); - } + if (asset.isExternal) { + // With external assets we need to extract dates from the filesystem, this can't be done with uploades assets as that information is lost on upload + fileCreatedAt = stat.mtime; + fileModifiedAt = stat.mtime; - if (!fileModifiedAt) { - fileModifiedAt = stat.mtime; - this.logger.debug( - `No valid fileModifiedAt date found for asset ${asset.id}, read file modification date from filesystem: ${fileModifiedAt.toISOString()}`, - ); - } + this.logger.verbose(`External asset ${asset.id} has a file modification time of ${fileCreatedAt.toISOString()}`); } let dateTimeOriginal = dateTime?.toDate(); let localDateTime = dateTime?.toDateTime().setZone('UTC', { keepLocalTime: true }).toJSDate(); if (!localDateTime || !dateTimeOriginal) { + const earliestDate = this.earliestDate(fileModifiedAt, fileCreatedAt); this.logger.debug( - `No valid date found in exif tags from asset ${asset.id}, falling back to earliest timestamp between file creation and file modification`, + `No valid date found in exif tags from asset ${asset.id}, falling back to earliest timestamp between file creation and file modification: ${earliestDate.toISOString()}`, ); - const earliestDate = this.earliestDate(fileModifiedAt, fileCreatedAt); dateTimeOriginal = earliestDate; localDateTime = earliestDate; } - this.logger.verbose(`Asset ${asset.id} has a local time of ${localDateTime?.toISOString()}`); + this.logger.verbose(`Asset ${asset.id} has a local time of ${localDateTime.toISOString()}`); - let modifyDate = fileModifiedAt; + let modifyDate = asset.fileModifiedAt; try { modifyDate = (exifTags.ModifyDate as ExifDateTime)?.toDate() ?? modifyDate; } catch {} return { - fileCreatedAt, - fileModifiedAt, dateTimeOriginal, timeZone, localDateTime, modifyDate, + fileCreatedAt, + fileModifiedAt, }; } @@ -758,6 +757,8 @@ export class MetadataService extends BaseService { if (asset.isExternal) { if (sidecarPath !== asset.sidecarPath) { + this.logger.verbose(`External asset ${asset.id} has sidecar path ${sidecarPath}`); + await this.assetRepository.update({ id: asset.id, sidecarPath }); } return JobStatus.SUCCESS; diff --git a/server/test/fixtures/shared-link.stub.ts b/server/test/fixtures/shared-link.stub.ts index 514bafe6a600a..a8b8e02d742b6 100644 --- a/server/test/fixtures/shared-link.stub.ts +++ b/server/test/fixtures/shared-link.stub.ts @@ -311,11 +311,7 @@ export const sharedLinkResponseStub = { allowUpload: false, allowDownload: false, showMetadata: false, - album: { - ...albumResponse, - startDate: assetResponse.fileCreatedAt ?? undefined, - endDate: assetResponse.fileCreatedAt ?? undefined, - }, + album: { ...albumResponse, startDate: assetResponse.fileCreatedAt, endDate: assetResponse.fileCreatedAt }, assets: [{ ...assetResponseWithoutMetadata, exifInfo: undefined }], }), }; diff --git a/web/src/routes/admin/library-management/+page.svelte b/web/src/routes/admin/library-management/+page.svelte index 20d35ff76d842..69e5b944ae49c 100644 --- a/web/src/routes/admin/library-management/+page.svelte +++ b/web/src/routes/admin/library-management/+page.svelte @@ -17,7 +17,7 @@ createLibrary, deleteLibrary, getAllLibraries, - getAssetCount, + getLibraryStatistics, getUserAdmin, scanLibrary, updateLibrary, @@ -67,7 +67,7 @@ }; const refreshStats = async (listIndex: number) => { - assetCount[listIndex] = await getAssetCount({ id: libraries[listIndex].id }); + assetCount[listIndex] = await getLibraryStatistics({ id: libraries[listIndex].id }); owner[listIndex] = await getUserAdmin({ id: libraries[listIndex].ownerId }); }; From f588e00ba670d4846991869c09bbb84b7704b2c4 Mon Sep 17 00:00:00 2001 From: Jonathan Jogenfors Date: Mon, 13 Jan 2025 22:09:38 +0100 Subject: [PATCH 09/10] wip --- docs/docs/features/libraries.md | 8 +- server/src/controllers/library.controller.ts | 2 +- server/src/interfaces/asset.interface.ts | 4 +- server/src/interfaces/job.interface.ts | 13 +- server/src/interfaces/library.interface.ts | 2 +- server/src/queries/trash.repository.sql | 5 +- server/src/repositories/asset.repository.ts | 8 +- server/src/repositories/trash.repository.ts | 8 +- server/src/services/library.service.spec.ts | 542 ++++++++---------- server/src/services/library.service.ts | 182 +++--- server/src/services/metadata.service.spec.ts | 7 + server/src/services/metadata.service.ts | 2 +- server/src/utils/misc.spec.ts | 3 +- server/src/utils/misc.ts | 10 - .../repositories/asset.repository.mock.ts | 4 +- 15 files changed, 346 insertions(+), 454 deletions(-) diff --git a/docs/docs/features/libraries.md b/docs/docs/features/libraries.md index 6a1dba9ebaab2..34ae0fdbb9ab3 100644 --- a/docs/docs/features/libraries.md +++ b/docs/docs/features/libraries.md @@ -37,7 +37,7 @@ To validate that Immich can reach your external library, start a shell inside th ### Exclusion Patterns -By default, all files in the import paths will be added to the library. If there are files that should not be added, exclusion patterns can be used to exclude them. Exclusion patterns are glob patterns are matched against the full file path. If a file matches an exclusion pattern, it will not be added to the library. Exclusion patterns can be added in the Scan Settings page for each library. Under the hood, Immich uses the [glob](https://www.npmjs.com/package/glob) package to match patterns, so please refer to [their documentation](https://github.com/isaacs/node-glob#glob-primer) to see what patterns are supported. +By default, all files in the import paths will be added to the library. If there are files that should not be added, exclusion patterns can be used to exclude them. Exclusion patterns are glob patterns are matched against the full file path. If a file matches an exclusion pattern, it will not be added to the library. Exclusion patterns can be added in the Scan Settings page for each library. Some basic examples: @@ -48,7 +48,11 @@ Some basic examples: Special characters such as @ should be escaped, for instance: -- `**/\@eadir/**` will exclude all files in any directory named `@eadir` +- `**/\@eaDir/**` will exclude all files in any directory named `@eaDir` + +:::info +Internally, Immich uses the [glob](https://www.npmjs.com/package/glob) package to process exclusion patterns, and sometimes those patterns are translated into [Postgres LIKE patterns](https://www.postgresql.org/docs/current/functions-matching.html). The intention is to support basic folder exclusions but we recommend against advanced usage since those can't reliably be translated to the Postgres syntax. Please refer to the [glob documentation](https://github.com/isaacs/node-glob#glob-primer) for a basic overview on glob patterns. +::: ### Automatic watching (EXPERIMENTAL) diff --git a/server/src/controllers/library.controller.ts b/server/src/controllers/library.controller.ts index 8bdf4f2e3bd15..14711b2db4bdb 100644 --- a/server/src/controllers/library.controller.ts +++ b/server/src/controllers/library.controller.ts @@ -59,7 +59,7 @@ export class LibraryController { @Get(':id/statistics') @Authenticated({ permission: Permission.LIBRARY_STATISTICS, admin: true }) getLibraryStatistics(@Param() { id }: UUIDParamDto): Promise { - return this.service.getAssetCount(id); + return this.service.getStatistics(id); } @Post(':id/scan') diff --git a/server/src/interfaces/asset.interface.ts b/server/src/interfaces/asset.interface.ts index 97b2a5a6f552a..2296091064889 100644 --- a/server/src/interfaces/asset.interface.ts +++ b/server/src/interfaces/asset.interface.ts @@ -170,7 +170,7 @@ export interface IAssetRepository { getChangedDeltaSync(options: AssetDeltaSyncOptions): Promise; upsertFile(file: UpsertFileOptions): Promise; upsertFiles(files: UpsertFileOptions[]): Promise; - updateOffline(library: LibraryEntity): Promise; - getNewPaths(libraryId: string, paths: string[]): Promise; + detectOfflineExternalAssets(library: LibraryEntity): Promise; + filterNewExternalAssetPaths(libraryId: string, paths: string[]): Promise; getAssetCount(options: AssetSearchOptions): Promise; } diff --git a/server/src/interfaces/job.interface.ts b/server/src/interfaces/job.interface.ts index 5636621efd8a8..4eedb57e15308 100644 --- a/server/src/interfaces/job.interface.ts +++ b/server/src/interfaces/job.interface.ts @@ -1,5 +1,4 @@ import { ClassConstructor } from 'class-transformer'; -import { LibraryEntity } from 'src/entities/library.entity'; import { EmailImageAttachment } from 'src/interfaces/notification.interface'; export enum QueueName { @@ -88,6 +87,7 @@ export enum JobName { LIBRARY_SYNC_FILES = 'library-sync-files', LIBRARY_SYNC_ASSETS = 'library-sync-assets', LIBRARY_DELETE = 'library-delete', + LIBRARY_ASSET_REMOVAL = 'handle-library-file-deletion', LIBRARY_QUEUE_SYNC_ALL = 'library-queue-sync-all', LIBRARY_QUEUE_CLEANUP = 'library-queue-cleanup', @@ -146,14 +146,15 @@ export interface IAssetDeleteJob extends IEntityJob { export interface ILibraryFileJob { libraryId: string; - ownerId: string; assetPaths: string[]; progressCounter?: number; totalAssets?: number; } export interface ILibraryBulkIdsJob { - library: LibraryEntity; + libraryId: string; + importPaths: string[]; + exclusionPatterns: string[]; assetIds: string[]; progressCounter?: number; totalAssets?: number; @@ -163,11 +164,6 @@ export interface IBulkEntityJob { ids: string[]; } -export interface ILibraryAssetsJob extends IBulkEntityJob { - importPaths: string[]; - exclusionPatterns: string[]; -} - export interface IDeleteFilesJob extends IBaseJob { files: Array; } @@ -300,6 +296,7 @@ export type JobItem = | { name: JobName.LIBRARY_QUEUE_SYNC_ASSETS; data: IEntityJob } | { name: JobName.LIBRARY_SYNC_ASSETS; data: ILibraryBulkIdsJob } | { name: JobName.LIBRARY_DELETE; data: IEntityJob } + | { name: JobName.LIBRARY_ASSET_REMOVAL; data: ILibraryFileJob } | { name: JobName.LIBRARY_QUEUE_SYNC_ALL; data?: IBaseJob } | { name: JobName.LIBRARY_QUEUE_CLEANUP; data: IBaseJob } diff --git a/server/src/interfaces/library.interface.ts b/server/src/interfaces/library.interface.ts index a2052cd2247b0..dc0452013c197 100644 --- a/server/src/interfaces/library.interface.ts +++ b/server/src/interfaces/library.interface.ts @@ -7,7 +7,7 @@ export enum AssetSyncResult { DO_NOTHING, UPDATE, OFFLINE, - ONLINE, + CHECK_OFFLINE, } export interface ILibraryRepository { diff --git a/server/src/queries/trash.repository.sql b/server/src/queries/trash.repository.sql index 7c9fdb4c5d312..77c2ea51d0d4d 100644 --- a/server/src/queries/trash.repository.sql +++ b/server/src/queries/trash.repository.sql @@ -15,10 +15,7 @@ set "status" = $1 where "ownerId" = $2 - and ( - "status" = $3 - or "isOffline" = $4 - ) + and "status" = $3 -- TrashRepository.restoreAll update "assets" diff --git a/server/src/repositories/asset.repository.ts b/server/src/repositories/asset.repository.ts index 76fa03c351e19..abd827213d549 100644 --- a/server/src/repositories/asset.repository.ts +++ b/server/src/repositories/asset.repository.ts @@ -744,7 +744,7 @@ export class AssetRepository implements IAssetRepository { .execute(); } - async updateOffline(library: LibraryEntity): Promise { + async detectOfflineExternalAssets(library: LibraryEntity): Promise { const paths = library.importPaths.map((importPath) => `${importPath}%`); const exclusions = library.exclusionPatterns.map((pattern) => globToSqlPattern(pattern)); @@ -755,6 +755,7 @@ export class AssetRepository implements IAssetRepository { deletedAt: new Date(), }) .where('isOffline', '=', false) + .where('isExternal', '=', true) .where('libraryId', '=', asUuid(library.id)) .where((eb) => eb.or([eb('originalPath', 'not like', paths.join('|')), eb('originalPath', 'like', exclusions.join('|'))]), @@ -762,7 +763,7 @@ export class AssetRepository implements IAssetRepository { .executeTakeFirstOrThrow(); } - async getNewPaths(libraryId: string, paths: string[]): Promise { + async filterNewExternalAssetPaths(libraryId: string, paths: string[]): Promise { const result = await this.db .selectFrom( this.db @@ -783,7 +784,8 @@ export class AssetRepository implements IAssetRepository { .selectFrom('assets') .select('originalPath') .whereRef('assets.originalPath', '=', sql.ref('unnested_paths.path')) - .where('libraryId', '=', asUuid(libraryId)), + .where('libraryId', '=', asUuid(libraryId)) + .where('isExternal', '=', true), ), ), ) diff --git a/server/src/repositories/trash.repository.ts b/server/src/repositories/trash.repository.ts index 14a7b09c6681f..c1db31a3db38d 100644 --- a/server/src/repositories/trash.repository.ts +++ b/server/src/repositories/trash.repository.ts @@ -9,11 +9,7 @@ export class TrashRepository implements ITrashRepository { constructor(@InjectKysely() private db: Kysely) {} getDeletedIds(): AsyncIterableIterator<{ id: string }> { - return this.db - .selectFrom('assets') - .select(['id']) - .where((eb) => eb.or([eb('status', '=', AssetStatus.DELETED), eb('isOffline', '=', true)])) - .stream(); + return this.db.selectFrom('assets').select(['id']).where('status', '=', AssetStatus.DELETED).stream(); } @GenerateSql({ params: [DummyValue.UUID] }) @@ -33,7 +29,7 @@ export class TrashRepository implements ITrashRepository { const { numUpdatedRows } = await this.db .updateTable('assets') .where('ownerId', '=', userId) - .where((eb) => eb.or([eb('status', '=', AssetStatus.TRASHED), eb('isOffline', '=', true)])) + .where('status', '=', AssetStatus.TRASHED) .set({ status: AssetStatus.DELETED }) .executeTakeFirst(); diff --git a/server/src/services/library.service.spec.ts b/server/src/services/library.service.spec.ts index a1b7193b02d62..f4c02c4805f0d 100644 --- a/server/src/services/library.service.spec.ts +++ b/server/src/services/library.service.spec.ts @@ -3,14 +3,14 @@ import { Stats } from 'node:fs'; import { defaults, SystemConfig } from 'src/config'; import { mapLibrary } from 'src/dtos/library.dto'; import { UserEntity } from 'src/entities/user.entity'; -import { AssetType, ImmichWorker } from 'src/enum'; +import { AssetStatus, AssetType, ImmichWorker } from 'src/enum'; import { IAssetRepository } from 'src/interfaces/asset.interface'; import { IConfigRepository } from 'src/interfaces/config.interface'; import { ICronRepository } from 'src/interfaces/cron.interface'; import { IDatabaseRepository } from 'src/interfaces/database.interface'; import { IJobRepository, - ILibraryAssetsJob, + ILibraryBulkIdsJob, ILibraryFileJob, JobName, JOBS_LIBRARY_PAGINATION_SIZE, @@ -172,21 +172,22 @@ describe(LibraryService.name, () => { describe('handleQueueSyncFiles', () => { it('should queue refresh of a new asset', async () => { - libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1); + libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1); storageMock.walk.mockImplementation(mockWalk); + storageMock.stat.mockResolvedValue({ isDirectory: () => true } as Stats); + storageMock.checkFileExists.mockResolvedValue(true); + assetMock.filterNewExternalAssetPaths.mockResolvedValue(['/data/user1/photo.jpg']); - await sut.handleQueueSyncFiles({ id: libraryStub.externalLibrary1.id }); + await sut.handleQueueSyncFiles({ id: libraryStub.externalLibraryWithImportPaths1.id }); - expect(jobMock.queueAll).toHaveBeenCalledWith([ - { - name: JobName.LIBRARY_SYNC_FILES, - data: { - id: libraryStub.externalLibrary1.id, - ownerId: libraryStub.externalLibrary1.owner.id, - assetPath: '/data/user1/photo.jpg', - }, + expect(jobMock.queue).toHaveBeenCalledWith({ + name: JobName.LIBRARY_SYNC_FILES, + data: { + libraryId: libraryStub.externalLibraryWithImportPaths1.id, + assetPaths: ['/data/user1/photo.jpg'], + progressCounter: 1, }, - ]); + }); }); it("should fail when library can't be found", async () => { @@ -221,158 +222,233 @@ describe(LibraryService.name, () => { }); }); - describe('handleQueueRemoveDeleted', () => { - it('should queue online check of existing assets', async () => { + describe('handleQueueSyncAssets', () => { + it('should call the offline check', async () => { libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1); storageMock.walk.mockImplementation(async function* generator() {}); assetMock.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false }); + assetMock.getAssetCount.mockResolvedValue(1); + assetMock.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) }); - await sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id }); + const response = await sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id }); - expect(jobMock.queueAll).toHaveBeenCalledWith([ - { - name: JobName.LIBRARY_SYNC_ASSETS, - data: { - id: assetStub.external.id, - importPaths: libraryStub.externalLibrary1.importPaths, - exclusionPatterns: [], - }, - }, - ]); + expect(response).toBe(JobStatus.SUCCESS); + expect(assetMock.detectOfflineExternalAssets).toHaveBeenCalledWith(libraryStub.externalLibrary1); }); - it("should fail when library can't be found", async () => { - libraryMock.get.mockResolvedValue(null); + it('should skip an empty library', async () => { + libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1); + storageMock.walk.mockImplementation(async function* generator() {}); + assetMock.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false }); + assetMock.getAssetCount.mockResolvedValue(0); + assetMock.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) }); - await expect(sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id })).resolves.toBe(JobStatus.SKIPPED); + const response = await sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id }); + + expect(response).toBe(JobStatus.SUCCESS); + expect(assetMock.detectOfflineExternalAssets).not.toHaveBeenCalled(); }); - }); - describe('handleSyncAsset', () => { - it('should skip missing assets', async () => { - const mockAssetJob: ILibraryAssetsJob = { - ids: [assetStub.external.id], - importPaths: ['/'], - exclusionPatterns: [], - }; + it('should queue asset sync', async () => { + libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1); + storageMock.walk.mockImplementation(async function* generator() {}); + assetMock.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false }); + assetMock.getAssetCount.mockResolvedValue(1); + assetMock.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(0) }); + assetMock.getAllInLibrary.mockResolvedValue({ items: [assetStub.external], hasNextPage: false }); - await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SKIPPED); + const response = await sut.handleQueueSyncAssets({ id: libraryStub.externalLibraryWithImportPaths1.id }); - expect(assetMock.remove).not.toHaveBeenCalled(); + expect(jobMock.queue).toBeCalledWith({ + name: JobName.LIBRARY_SYNC_ASSETS, + data: { + libraryId: libraryStub.externalLibraryWithImportPaths1.id, + importPaths: libraryStub.externalLibraryWithImportPaths1.importPaths, + exclusionPatterns: libraryStub.externalLibraryWithImportPaths1.exclusionPatterns, + assetIds: [assetStub.external.id], + progressCounter: 1, + totalAssets: 1, + }, + }); + + expect(response).toBe(JobStatus.SUCCESS); + expect(assetMock.detectOfflineExternalAssets).toHaveBeenCalledWith(libraryStub.externalLibraryWithImportPaths1); }); + it("should fail if library can't be found", async () => { + libraryMock.get.mockResolvedValue(null); + + await expect(sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id })).resolves.toBe(JobStatus.SKIPPED); + }); + }); + + describe('handleSyncAssets', () => { it('should offline assets no longer on disk', async () => { - const mockAssetJob: ILibraryAssetsJob = { - ids: [assetStub.external.id], + const mockAssetJob: ILibraryBulkIdsJob = { + assetIds: [assetStub.external.id], + libraryId: libraryStub.externalLibrary1.id, importPaths: ['/'], exclusionPatterns: [], }; - assetMock.getById.mockResolvedValue(assetStub.external); + assetMock.getByIds.mockResolvedValue([assetStub.external]); storageMock.stat.mockRejectedValue(new Error('ENOENT, no such file or directory')); await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], { isOffline: true, - deletedAt: expect.any(Date), + deletedAt: expect.anything(), + status: 'trashed', }); }); - it('should offline assets matching an exclusion pattern', async () => { - const mockAssetJob: ILibraryAssetsJob = { - ids: [assetStub.external.id], - importPaths: ['/'], - exclusionPatterns: ['**/user1/**'], + it('should set assets deleted from disk as offline', async () => { + const mockAssetJob: ILibraryBulkIdsJob = { + assetIds: [assetStub.external.id], + libraryId: libraryStub.externalLibrary1.id, + importPaths: ['/data/user2'], + exclusionPatterns: [], }; - assetMock.getById.mockResolvedValue(assetStub.external); + assetMock.getByIds.mockResolvedValue([assetStub.external]); + storageMock.stat.mockRejectedValue(new Error('Could not read file')); await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], { isOffline: true, - deletedAt: expect.any(Date), + deletedAt: expect.anything(), + status: AssetStatus.TRASHED, }); }); - it('should set assets outside of import paths as offline', async () => { - const mockAssetJob: ILibraryAssetsJob = { - ids: [assetStub.external.id], + it('should do nothing with offline assets deleted from disk', async () => { + const mockAssetJob: ILibraryBulkIdsJob = { + assetIds: [assetStub.trashedOffline.id], + libraryId: libraryStub.externalLibrary1.id, importPaths: ['/data/user2'], exclusionPatterns: [], }; - assetMock.getById.mockResolvedValue(assetStub.external); - storageMock.checkFileExists.mockResolvedValue(true); + assetMock.getByIds.mockResolvedValue([assetStub.trashedOffline]); + storageMock.stat.mockRejectedValue(new Error('Could not read file')); + + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + + expect(assetMock.updateAll).not.toHaveBeenCalled(); + }); + + it('should un-trash an asset previously marked as offline', async () => { + const mockAssetJob: ILibraryBulkIdsJob = { + assetIds: [assetStub.trashedOffline.id], + libraryId: libraryStub.externalLibrary1.id, + importPaths: ['/original/'], + exclusionPatterns: [], + }; + + assetMock.getByIds.mockResolvedValue([assetStub.trashedOffline]); + storageMock.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats); await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], { - isOffline: true, - deletedAt: expect.any(Date), + isOffline: false, + deletedAt: null, + status: AssetStatus.ACTIVE, }); + + expect(jobMock.queueAll).toHaveBeenCalledWith([ + { + name: JobName.SIDECAR_DISCOVERY, + data: { + id: assetStub.external.id, + source: 'upload', + }, + }, + ]); }); - it('should do nothing with online assets', async () => { - const mockAssetJob: ILibraryAssetsJob = { - ids: [assetStub.external.id], - importPaths: ['/'], + it('should do nothing with offline asset if covered by exclusion pattern', async () => { + const mockAssetJob: ILibraryBulkIdsJob = { + assetIds: [assetStub.trashedOffline.id], + libraryId: libraryStub.externalLibrary1.id, + importPaths: ['/original/'], + exclusionPatterns: ['**/path.jpg'], + }; + + assetMock.getByIds.mockResolvedValue([assetStub.trashedOffline]); + storageMock.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats); + + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + + expect(assetMock.updateAll).not.toHaveBeenCalled(); + + expect(jobMock.queueAll).not.toHaveBeenCalled(); + }); + + it('should do nothing with offline asset if not in import path', async () => { + const mockAssetJob: ILibraryBulkIdsJob = { + assetIds: [assetStub.trashedOffline.id], + libraryId: libraryStub.externalLibrary1.id, + importPaths: ['/import/'], exclusionPatterns: [], }; - assetMock.getById.mockResolvedValue(assetStub.external); + assetMock.getByIds.mockResolvedValue([assetStub.trashedOffline]); storageMock.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats); await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); expect(assetMock.updateAll).not.toHaveBeenCalled(); + + expect(jobMock.queueAll).not.toHaveBeenCalled(); }); - it('should un-trash an asset previously marked as offline', async () => { - const mockAssetJob: ILibraryAssetsJob = { - ids: [assetStub.external.id], + it('should do nothing with unchanged online assets', async () => { + const mockAssetJob: ILibraryBulkIdsJob = { + assetIds: [assetStub.external.id], + libraryId: libraryStub.externalLibrary1.id, importPaths: ['/'], exclusionPatterns: [], }; - assetMock.getById.mockResolvedValue(assetStub.trashedOffline); - storageMock.stat.mockResolvedValue({ mtime: assetStub.trashedOffline.fileModifiedAt } as Stats); + assetMock.getByIds.mockResolvedValue([assetStub.external]); + storageMock.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats); await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); - expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.trashedOffline.id], { - deletedAt: null, - fileCreatedAt: assetStub.trashedOffline.fileModifiedAt, - fileModifiedAt: assetStub.trashedOffline.fileModifiedAt, - isOffline: false, - originalFileName: 'path.jpg', - }); + expect(assetMock.updateAll).not.toHaveBeenCalled(); }); - }); - it('should update file when mtime has changed', async () => { - const mockAssetJob: ILibraryAssetsJob = { - ids: [assetStub.external.id], - importPaths: ['/'], - exclusionPatterns: [], - }; - - const newMTime = new Date(); - assetMock.getById.mockResolvedValue(assetStub.external); - storageMock.stat.mockResolvedValue({ mtime: newMTime } as Stats); - - await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); - - expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], { - fileModifiedAt: newMTime, - fileCreatedAt: newMTime, - isOffline: false, - originalFileName: 'photo.jpg', - deletedAt: null, + it('should update with online assets that have changed', async () => { + const mockAssetJob: ILibraryBulkIdsJob = { + assetIds: [assetStub.external.id], + libraryId: libraryStub.externalLibrary1.id, + importPaths: ['/'], + exclusionPatterns: [], + }; + + assetMock.getByIds.mockResolvedValue([assetStub.external]); + storageMock.stat.mockResolvedValue({ mtime: new Date(assetStub.external.fileModifiedAt.getDate() + 1) } as Stats); + + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + + expect(assetMock.updateAll).not.toHaveBeenCalled(); + + expect(jobMock.queueAll).toHaveBeenCalledWith([ + { + name: JobName.SIDECAR_DISCOVERY, + data: { + id: assetStub.external.id, + source: 'upload', + }, + }, + ]); }); }); - describe('handleSyncFile', () => { + describe('handleSyncFiles', () => { let mockUser: UserEntity; beforeEach(() => { @@ -387,187 +463,57 @@ describe(LibraryService.name, () => { it('should import a new asset', async () => { const mockLibraryJob: ILibraryFileJob = { - id: libraryStub.externalLibrary1.id, - ownerId: mockUser.id, - assetPath: '/data/user1/photo.jpg', + libraryId: libraryStub.externalLibrary1.id, + assetPaths: ['/data/user1/photo.jpg'], }; - assetMock.create.mockResolvedValue(assetStub.image); + assetMock.createAll.mockResolvedValue([assetStub.image]); libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1); - await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS); - - expect(assetMock.create.mock.calls).toEqual([ - [ - { - ownerId: mockUser.id, - libraryId: libraryStub.externalLibrary1.id, - checksum: expect.any(Buffer), - originalPath: '/data/user1/photo.jpg', - deviceAssetId: expect.any(String), - deviceId: 'Library Import', - fileCreatedAt: expect.any(Date), - fileModifiedAt: expect.any(Date), - localDateTime: expect.any(Date), - type: AssetType.IMAGE, - originalFileName: 'photo.jpg', - isExternal: true, - }, - ], - ]); + await expect(sut.handleSyncFiles(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS); - expect(jobMock.queue.mock.calls).toEqual([ + expect(assetMock.createAll.mock.calls).toEqual([ [ - { - name: JobName.SIDECAR_DISCOVERY, - data: { - id: assetStub.image.id, - source: 'upload', - }, - }, - ], - ]); - }); - - it('should import a new video', async () => { - const mockLibraryJob: ILibraryFileJob = { - id: libraryStub.externalLibrary1.id, - ownerId: mockUser.id, - assetPath: '/data/user1/video.mp4', - }; - - assetMock.create.mockResolvedValue(assetStub.video); - libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1); - - await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS); - - expect(assetMock.create.mock.calls).toEqual([ - [ - { - ownerId: mockUser.id, - libraryId: libraryStub.externalLibrary1.id, - checksum: expect.any(Buffer), - originalPath: '/data/user1/video.mp4', - deviceAssetId: expect.any(String), - deviceId: 'Library Import', - fileCreatedAt: expect.any(Date), - fileModifiedAt: expect.any(Date), - localDateTime: expect.any(Date), - type: AssetType.VIDEO, - originalFileName: 'video.mp4', - isExternal: true, - }, + [ + expect.objectContaining({ + ownerId: mockUser.id, + libraryId: libraryStub.externalLibrary1.id, + originalPath: '/data/user1/photo.jpg', + deviceId: 'Library Import', + type: AssetType.IMAGE, + originalFileName: 'photo.jpg', + isExternal: true, + }), + ], ], ]); - expect(jobMock.queue.mock.calls).toEqual([ + expect(jobMock.queueAll.mock.calls).toEqual([ [ - { - name: JobName.SIDECAR_DISCOVERY, - data: { - id: assetStub.image.id, - source: 'upload', + [ + { + name: JobName.SIDECAR_DISCOVERY, + data: { + id: assetStub.image.id, + source: 'upload', + }, }, - }, + ], ], ]); }); it('should not import an asset to a soft deleted library', async () => { const mockLibraryJob: ILibraryFileJob = { - id: libraryStub.externalLibrary1.id, - ownerId: mockUser.id, - assetPath: '/data/user1/photo.jpg', + libraryId: libraryStub.externalLibrary1.id, + assetPaths: ['/data/user1/photo.jpg'], }; - assetMock.create.mockResolvedValue(assetStub.image); libraryMock.get.mockResolvedValue({ ...libraryStub.externalLibrary1, deletedAt: new Date() }); - await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.FAILED); - - expect(assetMock.create.mock.calls).toEqual([]); - }); - - it('should not refresh a file whose mtime matches existing asset', async () => { - const mockLibraryJob: ILibraryFileJob = { - id: libraryStub.externalLibrary1.id, - ownerId: mockUser.id, - assetPath: assetStub.hasFileExtension.originalPath, - }; - - storageMock.stat.mockResolvedValue({ - size: 100, - mtime: assetStub.hasFileExtension.fileModifiedAt, - ctime: new Date('2023-01-01'), - } as Stats); - - assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.hasFileExtension); - - await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED); - - expect(jobMock.queue).not.toHaveBeenCalled(); - expect(jobMock.queueAll).not.toHaveBeenCalled(); - }); - - it('should skip existing asset', async () => { - const mockLibraryJob: ILibraryFileJob = { - id: libraryStub.externalLibrary1.id, - ownerId: mockUser.id, - assetPath: '/data/user1/photo.jpg', - }; - - assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image); - - await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED); - }); - - it('should not refresh an asset trashed by user', async () => { - const mockLibraryJob: ILibraryFileJob = { - id: libraryStub.externalLibrary1.id, - ownerId: mockUser.id, - assetPath: assetStub.hasFileExtension.originalPath, - }; - - assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.trashed); - - await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED); - - expect(jobMock.queue).not.toHaveBeenCalled(); - expect(jobMock.queueAll).not.toHaveBeenCalled(); - }); - - it('should fail when the file could not be read', async () => { - storageMock.stat.mockRejectedValue(new Error('Could not read file')); - - const mockLibraryJob: ILibraryFileJob = { - id: libraryStub.externalLibrary1.id, - ownerId: userStub.admin.id, - assetPath: '/data/user1/photo.jpg', - }; - - assetMock.create.mockResolvedValue(assetStub.image); + await expect(sut.handleSyncFiles(mockLibraryJob)).resolves.toBe(JobStatus.FAILED); - await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.FAILED); - expect(libraryMock.get).not.toHaveBeenCalled(); - expect(assetMock.create).not.toHaveBeenCalled(); - }); - - it('should skip if the file could not be found', async () => { - const error = new Error('File not found') as any; - error.code = 'ENOENT'; - storageMock.stat.mockRejectedValue(error); - - const mockLibraryJob: ILibraryFileJob = { - id: libraryStub.externalLibrary1.id, - ownerId: userStub.admin.id, - assetPath: '/data/user1/photo.jpg', - }; - - assetMock.create.mockResolvedValue(assetStub.image); - - await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED); - expect(libraryMock.get).not.toHaveBeenCalled(); - expect(assetMock.create).not.toHaveBeenCalled(); + expect(assetMock.createAll.mock.calls).toEqual([]); }); }); @@ -638,19 +584,10 @@ describe(LibraryService.name, () => { describe('getStatistics', () => { it('should return library statistics', async () => { - libraryMock.getStatistics.mockResolvedValue({ photos: 10, videos: 0, total: 10, usage: 1337 }); - await expect(sut.getStatistics(libraryStub.externalLibrary1.id)).resolves.toEqual({ - photos: 10, - videos: 0, - total: 10, - usage: 1337, - }); - - expect(libraryMock.getStatistics).toHaveBeenCalledWith(libraryStub.externalLibrary1.id); - }); + assetMock.getAssetCount.mockResolvedValue(10); + await expect(sut.getStatistics(libraryStub.externalLibrary1.id)).resolves.toEqual(10); - it('should throw an error if the library could not be found', async () => { - await expect(sut.getStatistics('foo')).rejects.toBeInstanceOf(BadRequestException); + expect(assetMock.getAssetCount).toHaveBeenCalledWith({ libraryId: libraryStub.externalLibrary1.id }); }); }); @@ -902,19 +839,13 @@ describe(LibraryService.name, () => { await sut.watchAll(); - expect(jobMock.queueAll).toHaveBeenCalledWith([ - { - name: JobName.LIBRARY_SYNC_FILES, - data: { - id: libraryStub.externalLibraryWithImportPaths1.id, - assetPath: '/foo/photo.jpg', - ownerId: libraryStub.externalLibraryWithImportPaths1.owner.id, - }, + expect(jobMock.queue).toHaveBeenCalledWith({ + name: JobName.LIBRARY_SYNC_FILES, + data: { + libraryId: libraryStub.externalLibraryWithImportPaths1.id, + assetPaths: ['/foo/photo.jpg'], }, - ]); - expect(jobMock.queueAll).toHaveBeenCalledWith([ - { name: JobName.LIBRARY_SYNC_ASSETS, data: expect.objectContaining({ ids: [assetStub.image.id] }) }, - ]); + }); }); it('should handle a file change event', async () => { @@ -927,19 +858,13 @@ describe(LibraryService.name, () => { await sut.watchAll(); - expect(jobMock.queueAll).toHaveBeenCalledWith([ - { - name: JobName.LIBRARY_SYNC_FILES, - data: { - id: libraryStub.externalLibraryWithImportPaths1.id, - assetPath: '/foo/photo.jpg', - ownerId: libraryStub.externalLibraryWithImportPaths1.owner.id, - }, + expect(jobMock.queue).toHaveBeenCalledWith({ + name: JobName.LIBRARY_SYNC_FILES, + data: { + libraryId: libraryStub.externalLibraryWithImportPaths1.id, + assetPaths: ['/foo/photo.jpg'], }, - ]); - expect(jobMock.queueAll).toHaveBeenCalledWith([ - { name: JobName.LIBRARY_SYNC_ASSETS, data: expect.objectContaining({ id: [assetStub.image.id] }) }, - ]); + }); }); it('should handle a file unlink event', async () => { @@ -947,14 +872,18 @@ describe(LibraryService.name, () => { libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]); assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image); storageMock.watch.mockImplementation( - makeMockWatcher({ items: [{ event: 'unlink', value: '/foo/photo.jpg' }] }), + makeMockWatcher({ items: [{ event: 'unlink', value: assetStub.image.originalPath }] }), ); await sut.watchAll(); - expect(jobMock.queueAll).toHaveBeenCalledWith([ - { name: JobName.LIBRARY_SYNC_ASSETS, data: expect.objectContaining({ ids: [assetStub.image.id] }) }, - ]); + expect(jobMock.queue).toHaveBeenCalledWith({ + name: JobName.LIBRARY_ASSET_REMOVAL, + data: { + libraryId: libraryStub.externalLibraryWithImportPaths1.id, + assetPaths: [assetStub.image.originalPath], + }, + }); }); it('should handle an error event', async () => { @@ -970,10 +899,10 @@ describe(LibraryService.name, () => { await expect(sut.watchAll()).resolves.toBeUndefined(); }); - it('should ignore unknown extensions', async () => { + it('should not import a file with unknown extension', async () => { libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1); libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]); - storageMock.watch.mockImplementation(makeMockWatcher({ items: [{ event: 'add', value: '/foo/photo.jpg' }] })); + storageMock.watch.mockImplementation(makeMockWatcher({ items: [{ event: 'add', value: '/foo/photo.xyz' }] })); await sut.watchAll(); @@ -1100,27 +1029,6 @@ describe(LibraryService.name, () => { }); }); - describe('handleQueueAssetOfflineCheck', () => { - it('should queue removal jobs', async () => { - libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1); - assetMock.getAll.mockResolvedValue({ items: [assetStub.image1], hasNextPage: false }); - assetMock.getById.mockResolvedValue(assetStub.image1); - - await expect(sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id })).resolves.toBe(JobStatus.SUCCESS); - - expect(jobMock.queueAll).toHaveBeenCalledWith([ - { - name: JobName.LIBRARY_SYNC_ASSETS, - data: { - ids: [assetStub.image1.id], - importPaths: libraryStub.externalLibrary1.importPaths, - exclusionPatterns: libraryStub.externalLibrary1.exclusionPatterns, - }, - }, - ]); - }); - }); - describe('validate', () => { it('should not require import paths', async () => { await expect(sut.validate('library-id', {})).resolves.toEqual({ importPaths: [] }); diff --git a/server/src/services/library.service.ts b/server/src/services/library.service.ts index 718c185cbd333..842fab82d8e5f 100644 --- a/server/src/services/library.service.ts +++ b/server/src/services/library.service.ts @@ -14,7 +14,6 @@ import { ValidateLibraryResponseDto, } from 'src/dtos/library.dto'; import { AssetEntity } from 'src/entities/asset.entity'; -import { LibraryEntity } from 'src/entities/library.entity'; import { AssetStatus, AssetType, ImmichWorker } from 'src/enum'; import { DatabaseLock } from 'src/interfaces/database.interface'; import { ArgOf } from 'src/interfaces/event.interface'; @@ -103,13 +102,21 @@ export class LibraryService extends BaseService { this.logger.debug(`File ${event} event received for ${path} in library ${library.id}}`); await this.jobRepository.queue({ name: JobName.LIBRARY_SYNC_FILES, - data: { libraryId: library.id, ownerId: library.ownerId, assetPaths: [path] }, + data: { libraryId: library.id, assetPaths: [path] }, }); } else { this.logger.verbose(`Ignoring file ${event} event for ${path} in library ${library.id}`); } }; + const deletionHandler = async (path: string) => { + this.logger.debug(`File unlink event received for ${path} in library ${library.id}}`); + await this.jobRepository.queue({ + name: JobName.LIBRARY_ASSET_REMOVAL, + data: { libraryId: library.id, assetPaths: [path] }, + }); + }; + this.watchers[id] = this.storageRepository.watch( library.importPaths, { @@ -125,7 +132,7 @@ export class LibraryService extends BaseService { return handlePromiseError(handler('change', path), this.logger); }, onUnlink: (path) => { - return handlePromiseError(handler('delete', path), this.logger); + return handlePromiseError(deletionHandler(path), this.logger); }, onError: (error) => { this.logger.error(`Library watcher for library ${library.id} encountered error: ${error}`); @@ -172,7 +179,7 @@ export class LibraryService extends BaseService { } } - async getAssetCount(id: string): Promise { + async getStatistics(id: string): Promise { const count = await this.assetRepository.getAssetCount({ libraryId: id }); if (count == undefined) { throw new InternalServerErrorException(`Failed to get asset count for library ${id}`); @@ -213,19 +220,27 @@ export class LibraryService extends BaseService { @OnJob({ name: JobName.LIBRARY_SYNC_FILES, queue: QueueName.LIBRARY }) async handleSyncFiles(job: JobOf): Promise { const library = await this.libraryRepository.get(job.libraryId); + // We need to check if the library still exists as it could have been deleted after the scan was queued + if (!library) { - // We need to check if the library still exists as it could have been deleted after the scan was queued this.logger.debug(`Library ${job.libraryId} not found, skipping file import`); return JobStatus.FAILED; + } else if (library.deletedAt) { + this.logger.debug(`Library ${job.libraryId} is deleted, won't import assets into it`); + return JobStatus.FAILED; } - const assetImports = job.assetPaths.map((assetPath) => this.processEntity(assetPath, job.ownerId, job.libraryId)); + const assetImports = job.assetPaths.map((assetPath) => + this.processEntity(assetPath, library.ownerId, job.libraryId), + ); const assetIds: string[] = []; - await this.assetRepository - .createAll(assetImports) - .then((assets) => assetIds.push(...assets.map((asset) => asset.id))); + for (let i = 0; i < assetImports.length; i += 5000) { + // Chunk the imports to avoid the postgres limit of max parameters at once + const chunk = assetImports.slice(i, i + 5000); + await this.assetRepository.createAll(chunk).then((assets) => assetIds.push(...assets.map((asset) => asset.id))); + } let progressMessage = ''; @@ -375,7 +390,7 @@ export class LibraryService extends BaseService { } async queuePostSyncJobs(assetIds: string[]) { - this.logger.debug(`Queuing metadata extraction for ${assetIds.length} asset(s)`); + this.logger.debug(`Queuing sidecar discovery for ${assetIds.length} asset(s)`); // We queue a sidecar discovery which, in turn, queues metadata extraction await this.jobRepository.queueAll( @@ -437,27 +452,44 @@ export class LibraryService extends BaseService { const assetIdsToOnline: string[] = []; const assetIdsToUpdate: string[] = []; - this.logger.debug(`Checking batch of ${assets.length} existing asset(s) in library ${job.library.id}`); + this.logger.debug(`Checking batch of ${assets.length} existing asset(s) in library ${job.libraryId}`); for (const asset of assets) { - const action = await this.checkExistingAsset(asset, job.library); + const action = await this.checkExistingAsset(asset, job.libraryId); switch (action) { case AssetSyncResult.OFFLINE: { assetIdsToOffline.push(asset.id); break; } - case AssetSyncResult.ONLINE: { - assetIdsToOnline.push(asset.id); - break; - } case AssetSyncResult.UPDATE: { assetIdsToUpdate.push(asset.id); break; } + case AssetSyncResult.CHECK_OFFLINE: { + const isInImportPath = job.importPaths.find((path) => asset.originalPath.startsWith(path)); + + if (isInImportPath) { + const isExcluded = job.exclusionPatterns.some((pattern) => picomatch.isMatch(asset.originalPath, pattern)); + + if (isExcluded) { + this.logger.verbose( + `Offline asset ${asset.originalPath} is in an import path but still covered by exclusion pattern, keeping offline in library ${job.libraryId}`, + ); + } else { + this.logger.debug(`Offline asset ${asset.originalPath} is now online in library ${job.libraryId}`); + assetIdsToOnline.push(asset.id); + } + } else { + this.logger.verbose( + `Offline asset ${asset.originalPath} is still not in any import path, keeping offline in library ${job.libraryId}`, + ); + } + break; + } } } - let progressMessage = ''; + const progressMessage: string[] = []; if (assetIdsToOffline.length > 0) { await this.assetRepository.updateAll(assetIdsToOffline, { @@ -466,7 +498,7 @@ export class LibraryService extends BaseService { deletedAt: new Date(), }); - progressMessage += `${assetIdsToOffline.length} offlined`; + progressMessage.push(`${assetIdsToOffline.length} offlined`); } if (assetIdsToOnline.length > 0) { @@ -478,32 +510,20 @@ export class LibraryService extends BaseService { }); await this.queuePostSyncJobs(assetIdsToOnline); - if (progressMessage) { - progressMessage += ', '; - } - - progressMessage += `${assetIdsToOnline.length} onlined`; + progressMessage.push(`${assetIdsToOnline.length} onlined`); } if (assetIdsToUpdate.length > 0) { //TODO: When we have asset status, we need to leave deletedAt as is when status is trashed await this.queuePostSyncJobs(assetIdsToUpdate); - if (progressMessage) { - progressMessage += ', '; - } - - progressMessage += `${assetIdsToUpdate.length} updated`; + progressMessage.push(`${assetIdsToUpdate.length} updated`); } const remainingCount = assets.length - assetIdsToOffline.length - assetIdsToUpdate.length - assetIdsToOnline.length; if (remainingCount) { - if (progressMessage) { - progressMessage += ', '; - } - - progressMessage += `${remainingCount} unchanged`; + progressMessage.push(`${remainingCount} unchanged`); } let cumulativeProgressMessage = ''; @@ -515,80 +535,44 @@ export class LibraryService extends BaseService { } this.logger.log( - `Checked existing asset(s): ${progressMessage} of current batch of ${assets.length} ${cumulativeProgressMessage}in library ${job.library.id}.`, + `Checked existing asset(s): ${progressMessage.join(', ')} of current batch of ${assets.length} ${cumulativeProgressMessage}in library ${job.libraryId}.`, ); return JobStatus.SUCCESS; } - private checkOfflineAsset(asset: AssetEntity, library: LibraryEntity): boolean { - if (!asset.libraryId) { - return false; - } - - if (!library) { - return false; - } - - const isInImportPath = library.importPaths.find((path) => asset.originalPath.startsWith(path)); - if (!isInImportPath) { - return false; - } - - const isExcluded = library.exclusionPatterns.some((pattern) => picomatch.isMatch(asset.originalPath, pattern)); - if (isExcluded) { - return false; - } - - return true; - } - - private async checkExistingAsset(asset: AssetEntity, library: LibraryEntity): Promise { - this.logger.verbose(`Checking existing asset ${asset.originalPath} in library ${library.id}`); - - if (!asset) { - return AssetSyncResult.DO_NOTHING; - } + private async checkExistingAsset(asset: AssetEntity, libraryId: string): Promise { + this.logger.verbose(`Checking existing asset ${asset.originalPath} in library ${libraryId}`); let stat; try { stat = await this.storageRepository.stat(asset.originalPath); } catch { + // File not found on disk or permission error if (asset.isOffline) { + this.logger.verbose( + `Asset ${asset.originalPath} is still not accessible, keeping offline in library ${libraryId}`, + ); return AssetSyncResult.DO_NOTHING; } this.logger.debug( - `Asset is no longer on disk or is inaccessible because of permissions, moving to trash: ${asset.originalPath}`, + `Asset ${asset.originalPath} is no longer on disk or is inaccessible because of permissions, moving to trash in library ${libraryId}`, ); return AssetSyncResult.OFFLINE; } - const mtime = stat.mtime; - const isAssetTimeUpdated = mtime.toISOString() !== asset.fileModifiedAt.toISOString(); - - let shouldAssetGoOnline = false; - if (asset.isOffline && asset.status != AssetStatus.DELETED) { // Only perform the expensive check if the asset is offline - - // TODO: give more feedback on why asset was onlined - shouldAssetGoOnline = this.checkOfflineAsset(asset, library); - - if (shouldAssetGoOnline) { - this.logger.debug(`Asset is back online: ${asset.originalPath}`); - - return AssetSyncResult.ONLINE; - } else { - this.logger.debug(`Asset is still offline: ${asset.originalPath}`); - - return AssetSyncResult.DO_NOTHING; - } + return AssetSyncResult.CHECK_OFFLINE; } - if (isAssetTimeUpdated) { + const mtime = stat.mtime; + const isTimeUpdated = mtime.toISOString() !== asset.fileModifiedAt.toISOString(); + + if (isTimeUpdated) { this.logger.verbose( - `Asset ${asset.originalPath} modification time changed from ${asset.fileModifiedAt?.toISOString()} to ${mtime.toISOString()}, queuing re-import. Creation time is ${asset.fileCreatedAt?.toISOString()}`, + `Asset ${asset.originalPath} modification time changed from ${asset.fileModifiedAt?.toISOString()} to ${mtime.toISOString()}, queuing re-import in library ${libraryId}`, ); return AssetSyncResult.UPDATE; @@ -638,7 +622,7 @@ export class LibraryService extends BaseService { for await (const pathBatch of pathsOnDisk) { crawlCount += pathBatch.length; - const newPaths = await this.assetRepository.getNewPaths(library.id, pathBatch); + const newPaths = await this.assetRepository.filterNewExternalAssetPaths(library.id, pathBatch); if (newPaths.length > 0) { importCount += newPaths.length; @@ -647,7 +631,6 @@ export class LibraryService extends BaseService { name: JobName.LIBRARY_SYNC_FILES, data: { libraryId: library.id, - ownerId: library.ownerId, assetPaths: newPaths, progressCounter: crawlCount, }, @@ -679,6 +662,20 @@ export class LibraryService extends BaseService { return JobStatus.SUCCESS; } + @OnJob({ name: JobName.LIBRARY_ASSET_REMOVAL, queue: QueueName.LIBRARY }) + async handleAssetRemoval(job: JobOf): Promise { + // This is only for handling file unlink events via the file watcher + this.logger.verbose(`Deleting asset(s) ${job.assetPaths} from library ${job.libraryId}`); + for (const assetPath of job.assetPaths) { + const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(job.libraryId, assetPath); + if (asset) { + await this.assetRepository.remove(asset); + } + } + + return JobStatus.SUCCESS; + } + @OnJob({ name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, queue: QueueName.LIBRARY }) async handleQueueSyncAssets(job: JobOf): Promise { const library = await this.libraryRepository.get(job.id); @@ -697,19 +694,10 @@ export class LibraryService extends BaseService { `Checking ${assetCount} asset(s) against import paths and exclusion patterns in library ${library.id}...`, ); - const offlineResult = await this.assetRepository.updateOffline(library); - - if (offlineResult.numUpdatedRows > Number.MAX_SAFE_INTEGER) { - throw new InternalServerErrorException(`Affected asset count is too large: ${offlineResult.numUpdatedRows}`); - } + const offlineResult = await this.assetRepository.detectOfflineExternalAssets(library); const affectedAssetCount = Number(offlineResult.numUpdatedRows); - if (affectedAssetCount === undefined) { - this.logger.error(`Unknown error occurred when updating offline status in ${library.id}`); - return JobStatus.FAILED; - } - if (affectedAssetCount === assetCount) { this.logger.log( `All ${assetCount} asset(s) were offlined due to import paths and/or exclusion pattern(s) in ${library.id}`, @@ -741,7 +729,9 @@ export class LibraryService extends BaseService { await this.jobRepository.queue({ name: JobName.LIBRARY_SYNC_ASSETS, data: { - library, + libraryId: library.id, + importPaths: library.importPaths, + exclusionPatterns: library.exclusionPatterns, assetIds: assets.map((asset) => asset.id), progressCounter: currentAssetCount, totalAssets: assetCount, diff --git a/server/src/services/metadata.service.spec.ts b/server/src/services/metadata.service.spec.ts index a92433e88f6db..e6f9093d6c3f9 100644 --- a/server/src/services/metadata.service.spec.ts +++ b/server/src/services/metadata.service.spec.ts @@ -270,6 +270,7 @@ describe(MetadataService.name, () => { id: assetStub.image.id, duration: null, fileCreatedAt: sidecarDate, + fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'), localDateTime: sidecarDate, }); }); @@ -287,6 +288,7 @@ describe(MetadataService.name, () => { id: assetStub.image.id, duration: null, fileCreatedAt: fileModifiedAt, + fileModifiedAt, localDateTime: fileModifiedAt, }); }); @@ -304,6 +306,7 @@ describe(MetadataService.name, () => { id: assetStub.image.id, duration: null, fileCreatedAt, + fileModifiedAt, localDateTime: fileCreatedAt, }); }); @@ -338,6 +341,7 @@ describe(MetadataService.name, () => { id: assetStub.image.id, duration: null, fileCreatedAt: assetStub.image.createdAt, + fileModifiedAt: assetStub.image.createdAt, localDateTime: new Date('2023-02-23T05:06:29.716Z'), }); }); @@ -360,6 +364,7 @@ describe(MetadataService.name, () => { id: assetStub.withLocation.id, duration: null, fileCreatedAt: assetStub.withLocation.createdAt, + fileModifiedAt: new Date('2023-02-22T05:06:29.716Z'), localDateTime: new Date('2023-02-22T05:06:29.716Z'), }); }); @@ -786,6 +791,7 @@ describe(MetadataService.name, () => { MicroVideo: 1, MicroVideoOffset: 1, }); + storageMock.stat.mockResolvedValue({ mtime: new Date('1970-01-01T00:00:00.000-11:30') } as Stats); cryptoMock.hashSha1.mockReturnValue(randomBytes(512)); assetMock.create.mockResolvedValue(assetStub.livePhotoMotionAsset); const video = randomBytes(512); @@ -861,6 +867,7 @@ describe(MetadataService.name, () => { id: assetStub.image.id, duration: null, fileCreatedAt: dateForTest, + fileModifiedAt: dateForTest, localDateTime: dateForTest, }); }); diff --git a/server/src/services/metadata.service.ts b/server/src/services/metadata.service.ts index 338c8c33c5c01..4b092d27614e4 100644 --- a/server/src/services/metadata.service.ts +++ b/server/src/services/metadata.service.ts @@ -598,7 +598,7 @@ export class MetadataService extends BaseService { if (timeZone) { this.logger.verbose(`Asset ${asset.id} timezone is ${timeZone} (via ${exifTags.tzSource})`); } else { - this.logger.warn(`Asset ${asset.id} has no time zone information`); + this.logger.verbose(`Asset ${asset.id} has no time zone information`); } let fileCreatedAt = asset.fileCreatedAt; diff --git a/server/src/utils/misc.spec.ts b/server/src/utils/misc.spec.ts index 87ab6d4399bbf..3c454829382dc 100644 --- a/server/src/utils/misc.spec.ts +++ b/server/src/utils/misc.spec.ts @@ -59,7 +59,8 @@ describe('globToSqlPattern', () => { ['**/*.tif', '%/%.tif'], ['**/*.jp?', '%/%.jp_'], ['**/@eaDir/**', '%/@eaDir/%'], - ['**/._*', `%/.\\_%`], + ['**/._*', `%/._%`], + ['/absolute/path/**', `/absolute/path/%`], ]; it.each(testCases)('should convert %s to %s', (input, expected) => { diff --git a/server/src/utils/misc.ts b/server/src/utils/misc.ts index 3543cf20b02f5..81ab43ecef803 100644 --- a/server/src/utils/misc.ts +++ b/server/src/utils/misc.ts @@ -291,16 +291,6 @@ const convertTokenToSqlPattern = (token: any): string => { case 'dot': { return '.'; } - case 'bracket': { - return `[${token.value}]`; - } - case 'negate': { - return `[^${token.value}]`; - } - case 'brace': { - const options = token.value.split(','); - return `(${options.join('|')})`; - } default: { return ''; } diff --git a/server/test/repositories/asset.repository.mock.ts b/server/test/repositories/asset.repository.mock.ts index a978fbe08b26a..107b81b977042 100644 --- a/server/test/repositories/asset.repository.mock.ts +++ b/server/test/repositories/asset.repository.mock.ts @@ -41,7 +41,7 @@ export const newAssetRepositoryMock = (): Mocked => { getDuplicates: vitest.fn(), upsertFile: vitest.fn(), upsertFiles: vitest.fn(), - updateOffline: vitest.fn(), - getNewPaths: vitest.fn(), + detectOfflineExternalAssets: vitest.fn(), + filterNewExternalAssetPaths: vitest.fn(), }; }; From fc0560898564e1407e0a6d14d6736eec95dd1957 Mon Sep 17 00:00:00 2001 From: Jonathan Jogenfors Date: Tue, 14 Jan 2025 23:17:17 +0100 Subject: [PATCH 10/10] Fix error with asset count --- server/src/interfaces/asset.interface.ts | 2 +- server/src/repositories/asset.repository.ts | 10 +++++++--- server/src/services/library.service.spec.ts | 10 +++++----- server/src/services/library.service.ts | 4 ++-- server/test/repositories/asset.repository.mock.ts | 2 +- 5 files changed, 16 insertions(+), 12 deletions(-) diff --git a/server/src/interfaces/asset.interface.ts b/server/src/interfaces/asset.interface.ts index 2296091064889..f6254fc850a63 100644 --- a/server/src/interfaces/asset.interface.ts +++ b/server/src/interfaces/asset.interface.ts @@ -172,5 +172,5 @@ export interface IAssetRepository { upsertFiles(files: UpsertFileOptions[]): Promise; detectOfflineExternalAssets(library: LibraryEntity): Promise; filterNewExternalAssetPaths(libraryId: string, paths: string[]): Promise; - getAssetCount(options: AssetSearchOptions): Promise; + getLibraryAssetCount(options: AssetSearchOptions): Promise; } diff --git a/server/src/repositories/asset.repository.ts b/server/src/repositories/asset.repository.ts index abd827213d549..72027288bc468 100644 --- a/server/src/repositories/asset.repository.ts +++ b/server/src/repositories/asset.repository.ts @@ -794,11 +794,15 @@ export class AssetRepository implements IAssetRepository { return result.map((row) => row.path as string); } - async getAssetCount(options: AssetSearchOptions = {}): Promise { - const { count } = await searchAssetBuilder(this.db, options) - .select(sql`COUNT(*) OVER ()`.as('count')) + async getLibraryAssetCount(options: AssetSearchOptions = {}): Promise { + const { count } = await this.db + .selectFrom('assets') + .select(sql`COUNT(*)`.as('count')) + .where('libraryId', '=', asUuid(options.libraryId!)) .executeTakeFirstOrThrow(); + console.log(count); + return count as number; } } diff --git a/server/src/services/library.service.spec.ts b/server/src/services/library.service.spec.ts index f4c02c4805f0d..9e75972411264 100644 --- a/server/src/services/library.service.spec.ts +++ b/server/src/services/library.service.spec.ts @@ -227,7 +227,7 @@ describe(LibraryService.name, () => { libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1); storageMock.walk.mockImplementation(async function* generator() {}); assetMock.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false }); - assetMock.getAssetCount.mockResolvedValue(1); + assetMock.getLibraryAssetCount.mockResolvedValue(1); assetMock.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) }); const response = await sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id }); @@ -240,7 +240,7 @@ describe(LibraryService.name, () => { libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1); storageMock.walk.mockImplementation(async function* generator() {}); assetMock.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false }); - assetMock.getAssetCount.mockResolvedValue(0); + assetMock.getLibraryAssetCount.mockResolvedValue(0); assetMock.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) }); const response = await sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id }); @@ -253,7 +253,7 @@ describe(LibraryService.name, () => { libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1); storageMock.walk.mockImplementation(async function* generator() {}); assetMock.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false }); - assetMock.getAssetCount.mockResolvedValue(1); + assetMock.getLibraryAssetCount.mockResolvedValue(1); assetMock.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(0) }); assetMock.getAllInLibrary.mockResolvedValue({ items: [assetStub.external], hasNextPage: false }); @@ -584,10 +584,10 @@ describe(LibraryService.name, () => { describe('getStatistics', () => { it('should return library statistics', async () => { - assetMock.getAssetCount.mockResolvedValue(10); + assetMock.getLibraryAssetCount.mockResolvedValue(10); await expect(sut.getStatistics(libraryStub.externalLibrary1.id)).resolves.toEqual(10); - expect(assetMock.getAssetCount).toHaveBeenCalledWith({ libraryId: libraryStub.externalLibrary1.id }); + expect(assetMock.getLibraryAssetCount).toHaveBeenCalledWith({ libraryId: libraryStub.externalLibrary1.id }); }); }); diff --git a/server/src/services/library.service.ts b/server/src/services/library.service.ts index 4a9aefd0799d8..200bf540e8e0a 100644 --- a/server/src/services/library.service.ts +++ b/server/src/services/library.service.ts @@ -180,7 +180,7 @@ export class LibraryService extends BaseService { } async getStatistics(id: string): Promise { - const count = await this.assetRepository.getAssetCount({ libraryId: id }); + const count = await this.assetRepository.getLibraryAssetCount({ libraryId: id }); if (count == undefined) { throw new InternalServerErrorException(`Failed to get asset count for library ${id}`); } @@ -682,7 +682,7 @@ export class LibraryService extends BaseService { return JobStatus.SKIPPED; } - const assetCount = await this.assetRepository.getAssetCount({ libraryId: job.id, withDeleted: true }); + const assetCount = await this.assetRepository.getLibraryAssetCount({ libraryId: job.id, withDeleted: true }); if (!assetCount) { this.logger.log(`Library ${library.id} is empty, no need to check assets`); diff --git a/server/test/repositories/asset.repository.mock.ts b/server/test/repositories/asset.repository.mock.ts index 107b81b977042..01d73e418d06e 100644 --- a/server/test/repositories/asset.repository.mock.ts +++ b/server/test/repositories/asset.repository.mock.ts @@ -24,7 +24,7 @@ export const newAssetRepositoryMock = (): Mocked => { getAllByDeviceId: vitest.fn(), getLivePhotoCount: vitest.fn(), getAllInLibrary: vitest.fn(), - getAssetCount: vitest.fn(), + getLibraryAssetCount: vitest.fn(), updateAll: vitest.fn(), updateDuplicates: vitest.fn(), getByLibraryIdAndOriginalPath: vitest.fn(),