diff --git a/e2e/src/api/specs/library.e2e-spec.ts b/e2e/src/api/specs/library.e2e-spec.ts index 3f910fa1e3756..1959a230ad231 100644 --- a/e2e/src/api/specs/library.e2e-spec.ts +++ b/e2e/src/api/specs/library.e2e-spec.ts @@ -421,7 +421,7 @@ describe('/libraries', () => { const { status } = await request(app) .post(`/libraries/${library.id}/scan`) .set('Authorization', `Bearer ${admin.accessToken}`) - .send({ refreshModifiedFiles: true }); + .send(); expect(status).toBe(204); await utils.waitForQueueFinish(admin.accessToken, 'library'); @@ -453,7 +453,7 @@ describe('/libraries', () => { const { status } = await request(app) .post(`/libraries/${library.id}/scan`) .set('Authorization', `Bearer ${admin.accessToken}`) - .send({ refreshModifiedFiles: true }); + .send(); expect(status).toBe(204); await utils.waitForQueueFinish(admin.accessToken, 'library'); @@ -499,7 +499,7 @@ describe('/libraries', () => { expect(newAssets.items).toEqual([]); }); - it('should set an asset offline its file is not in any import path', async () => { + it('should set an asset offline if its file is not in any import path', async () => { utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`); const library = await utils.createLibrary(admin.accessToken, { @@ -577,7 +577,7 @@ describe('/libraries', () => { ]); }); - it('should not trash an online asset', async () => { + it('should not set an asset offline if its file exists, is in an import path, and not covered by an exclusion pattern', async () => { const library = await utils.createLibrary(admin.accessToken, { ownerId: admin.userId, importPaths: [`${testAssetDirInternal}/temp`], @@ -601,6 +601,195 @@ describe('/libraries', () => { expect(assets).toEqual(assetsBefore); }); + + it('should set an offline asset to online if its file exists, is in an import path, and not covered by an exclusion pattern', async () => { + utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`); + + const library = await utils.createLibrary(admin.accessToken, { + ownerId: admin.userId, + importPaths: [`${testAssetDirInternal}/temp/offline`], + }); + + await scan(admin.accessToken, library.id); + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + + utils.renameImageFile(`${testAssetDir}/temp/offline/offline.png`, `${testAssetDir}/temp/offline.png`); + + { + const { status } = await request(app) + .post(`/libraries/${library.id}/scan`) + .set('Authorization', `Bearer ${admin.accessToken}`) + .send(); + expect(status).toBe(204); + } + + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + const offlineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id); + expect(offlineAsset.isTrashed).toBe(true); + expect(offlineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`); + expect(offlineAsset.isOffline).toBe(true); + + { + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true }); + expect(assets.count).toBe(1); + } + + utils.renameImageFile(`${testAssetDir}/temp/offline.png`, `${testAssetDir}/temp/offline/offline.png`); + + { + const { status } = await request(app) + .post(`/libraries/${library.id}/scan`) + .set('Authorization', `Bearer ${admin.accessToken}`) + .send(); + expect(status).toBe(204); + } + + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + const backOnlineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id); + + expect(backOnlineAsset.isTrashed).toBe(false); + expect(backOnlineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`); + expect(backOnlineAsset.isOffline).toBe(false); + + { + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + expect(assets.count).toBe(1); + } + }); + + it('should not set an offline asset to online if its file exists, is not covered by an exclusion pattern, but is outside of all import paths', async () => { + utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`); + + const library = await utils.createLibrary(admin.accessToken, { + ownerId: admin.userId, + importPaths: [`${testAssetDirInternal}/temp/offline`], + }); + + await scan(admin.accessToken, library.id); + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + + utils.renameImageFile(`${testAssetDir}/temp/offline/offline.png`, `${testAssetDir}/temp/offline.png`); + + { + const { status } = await request(app) + .post(`/libraries/${library.id}/scan`) + .set('Authorization', `Bearer ${admin.accessToken}`) + .send(); + expect(status).toBe(204); + } + + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + { + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true }); + expect(assets.count).toBe(1); + } + + const offlineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id); + + expect(offlineAsset.isTrashed).toBe(true); + expect(offlineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`); + expect(offlineAsset.isOffline).toBe(true); + + utils.renameImageFile(`${testAssetDir}/temp/offline.png`, `${testAssetDir}/temp/offline/offline.png`); + + utils.createDirectory(`${testAssetDir}/temp/another-path/`); + + await utils.updateLibrary(admin.accessToken, library.id, { + importPaths: [`${testAssetDirInternal}/temp/another-path`], + }); + + { + const { status } = await request(app) + .post(`/libraries/${library.id}/scan`) + .set('Authorization', `Bearer ${admin.accessToken}`) + .send(); + expect(status).toBe(204); + } + + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + const stillOfflineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id); + + expect(stillOfflineAsset.isTrashed).toBe(true); + expect(stillOfflineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`); + expect(stillOfflineAsset.isOffline).toBe(true); + + { + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true }); + expect(assets.count).toBe(1); + } + + utils.removeDirectory(`${testAssetDir}/temp/another-path/`); + }); + + it('should not set an offline asset to online if its file exists, is in an import path, but is covered by an exclusion pattern', async () => { + utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`); + + const library = await utils.createLibrary(admin.accessToken, { + ownerId: admin.userId, + importPaths: [`${testAssetDirInternal}/temp/offline`], + }); + + await scan(admin.accessToken, library.id); + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id }); + + utils.renameImageFile(`${testAssetDir}/temp/offline/offline.png`, `${testAssetDir}/temp/offline.png`); + + { + const { status } = await request(app) + .post(`/libraries/${library.id}/scan`) + .set('Authorization', `Bearer ${admin.accessToken}`) + .send(); + expect(status).toBe(204); + } + + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + { + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true }); + expect(assets.count).toBe(1); + } + + const offlineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id); + + expect(offlineAsset.isTrashed).toBe(true); + expect(offlineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`); + expect(offlineAsset.isOffline).toBe(true); + + utils.renameImageFile(`${testAssetDir}/temp/offline.png`, `${testAssetDir}/temp/offline/offline.png`); + + await utils.updateLibrary(admin.accessToken, library.id, { exclusionPatterns: ['**/offline/**'] }); + + { + const { status } = await request(app) + .post(`/libraries/${library.id}/scan`) + .set('Authorization', `Bearer ${admin.accessToken}`) + .send(); + expect(status).toBe(204); + } + + await utils.waitForQueueFinish(admin.accessToken, 'library'); + + const stillOfflineAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id); + + expect(stillOfflineAsset.isTrashed).toBe(true); + expect(stillOfflineAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`); + expect(stillOfflineAsset.isOffline).toBe(true); + + { + const { assets } = await utils.searchAssets(admin.accessToken, { libraryId: library.id, withDeleted: true }); + expect(assets.count).toBe(1); + } + }); }); describe('POST /libraries/:id/validate', () => { diff --git a/e2e/src/utils.ts b/e2e/src/utils.ts index 14225ff063038..b00c3c0b6d30d 100644 --- a/e2e/src/utils.ts +++ b/e2e/src/utils.ts @@ -10,6 +10,7 @@ import { Permission, PersonCreateDto, SharedLinkCreateDto, + UpdateLibraryDto, UserAdminCreateDto, UserPreferencesUpdateDto, ValidateLibraryDto, @@ -35,6 +36,7 @@ import { updateAlbumUser, updateAssets, updateConfig, + updateLibrary, updateMyPreferences, upsertTags, validate, @@ -42,7 +44,7 @@ import { import { BrowserContext } from '@playwright/test'; import { exec, spawn } from 'node:child_process'; import { createHash } from 'node:crypto'; -import { existsSync, mkdirSync, rmSync, writeFileSync } from 'node:fs'; +import { existsSync, mkdirSync, renameSync, rmSync, writeFileSync } from 'node:fs'; import { tmpdir } from 'node:os'; import path, { dirname } from 'node:path'; import { setTimeout as setAsyncTimeout } from 'node:timers/promises'; @@ -392,6 +394,14 @@ export const utils = { rmSync(path); }, + renameImageFile: (oldPath: string, newPath: string) => { + if (!existsSync(oldPath)) { + return; + } + + renameSync(oldPath, newPath); + }, + removeDirectory: (path: string) => { if (!existsSync(path)) { return; @@ -444,6 +454,9 @@ export const utils = { createLibrary: (accessToken: string, dto: CreateLibraryDto) => createLibrary({ createLibraryDto: dto }, { headers: asBearerAuth(accessToken) }), + updateLibrary: (accessToken: string, id: string, dto: UpdateLibraryDto) => + updateLibrary({ id, updateLibraryDto: dto }, { headers: asBearerAuth(accessToken) }), + validateLibrary: (accessToken: string, id: string, dto: ValidateLibraryDto) => validate({ id, validateLibraryDto: dto }, { headers: asBearerAuth(accessToken) }), diff --git a/e2e/test-assets b/e2e/test-assets index 99544a200412d..c4a0575c3e89a 160000 --- a/e2e/test-assets +++ b/e2e/test-assets @@ -1 +1 @@ -Subproject commit 99544a200412d553103cc7b8f1a28f339c7cffd9 +Subproject commit c4a0575c3e89a755b951ae6d91e7307cd34c606f diff --git a/mobile/openapi/README.md b/mobile/openapi/README.md index b336b1bfb6f40..e03f4dac77564 100644 --- a/mobile/openapi/README.md +++ b/mobile/openapi/README.md @@ -130,8 +130,8 @@ Class | Method | HTTP request | Description *LibrariesApi* | [**createLibrary**](doc//LibrariesApi.md#createlibrary) | **POST** /libraries | *LibrariesApi* | [**deleteLibrary**](doc//LibrariesApi.md#deletelibrary) | **DELETE** /libraries/{id} | *LibrariesApi* | [**getAllLibraries**](doc//LibrariesApi.md#getalllibraries) | **GET** /libraries | +*LibrariesApi* | [**getAssetCount**](doc//LibrariesApi.md#getassetcount) | **GET** /libraries/{id}/count | *LibrariesApi* | [**getLibrary**](doc//LibrariesApi.md#getlibrary) | **GET** /libraries/{id} | -*LibrariesApi* | [**getLibraryStatistics**](doc//LibrariesApi.md#getlibrarystatistics) | **GET** /libraries/{id}/statistics | *LibrariesApi* | [**scanLibrary**](doc//LibrariesApi.md#scanlibrary) | **POST** /libraries/{id}/scan | *LibrariesApi* | [**updateLibrary**](doc//LibrariesApi.md#updatelibrary) | **PUT** /libraries/{id} | *LibrariesApi* | [**validate**](doc//LibrariesApi.md#validate) | **POST** /libraries/{id}/validate | @@ -337,7 +337,6 @@ Class | Method | HTTP request | Description - [JobSettingsDto](doc//JobSettingsDto.md) - [JobStatusDto](doc//JobStatusDto.md) - [LibraryResponseDto](doc//LibraryResponseDto.md) - - [LibraryStatsResponseDto](doc//LibraryStatsResponseDto.md) - [LicenseKeyDto](doc//LicenseKeyDto.md) - [LicenseResponseDto](doc//LicenseResponseDto.md) - [LogLevel](doc//LogLevel.md) diff --git a/mobile/openapi/lib/api.dart b/mobile/openapi/lib/api.dart index 73eb02d89ed7a..3fccede06eb50 100644 --- a/mobile/openapi/lib/api.dart +++ b/mobile/openapi/lib/api.dart @@ -150,7 +150,6 @@ part 'model/job_name.dart'; part 'model/job_settings_dto.dart'; part 'model/job_status_dto.dart'; part 'model/library_response_dto.dart'; -part 'model/library_stats_response_dto.dart'; part 'model/license_key_dto.dart'; part 'model/license_response_dto.dart'; part 'model/log_level.dart'; diff --git a/mobile/openapi/lib/api/libraries_api.dart b/mobile/openapi/lib/api/libraries_api.dart index 36d98d9a88a78..6010b7a9fcf56 100644 --- a/mobile/openapi/lib/api/libraries_api.dart +++ b/mobile/openapi/lib/api/libraries_api.dart @@ -147,13 +147,13 @@ class LibrariesApi { return null; } - /// Performs an HTTP 'GET /libraries/{id}' operation and returns the [Response]. + /// Performs an HTTP 'GET /libraries/{id}/count' operation and returns the [Response]. /// Parameters: /// /// * [String] id (required): - Future getLibraryWithHttpInfo(String id,) async { + Future getAssetCountWithHttpInfo(String id,) async { // ignore: prefer_const_declarations - final path = r'/libraries/{id}' + final path = r'/libraries/{id}/count' .replaceAll('{id}', id); // ignore: prefer_final_locals @@ -180,8 +180,8 @@ class LibrariesApi { /// Parameters: /// /// * [String] id (required): - Future getLibrary(String id,) async { - final response = await getLibraryWithHttpInfo(id,); + Future getAssetCount(String id,) async { + final response = await getAssetCountWithHttpInfo(id,); if (response.statusCode >= HttpStatus.badRequest) { throw ApiException(response.statusCode, await _decodeBodyBytes(response)); } @@ -189,19 +189,19 @@ class LibrariesApi { // At the time of writing this, `dart:convert` will throw an "Unexpected end of input" // FormatException when trying to decode an empty string. if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) { - return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'LibraryResponseDto',) as LibraryResponseDto; + return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'num',) as num; } return null; } - /// Performs an HTTP 'GET /libraries/{id}/statistics' operation and returns the [Response]. + /// Performs an HTTP 'GET /libraries/{id}' operation and returns the [Response]. /// Parameters: /// /// * [String] id (required): - Future getLibraryStatisticsWithHttpInfo(String id,) async { + Future getLibraryWithHttpInfo(String id,) async { // ignore: prefer_const_declarations - final path = r'/libraries/{id}/statistics' + final path = r'/libraries/{id}' .replaceAll('{id}', id); // ignore: prefer_final_locals @@ -228,8 +228,8 @@ class LibrariesApi { /// Parameters: /// /// * [String] id (required): - Future getLibraryStatistics(String id,) async { - final response = await getLibraryStatisticsWithHttpInfo(id,); + Future getLibrary(String id,) async { + final response = await getLibraryWithHttpInfo(id,); if (response.statusCode >= HttpStatus.badRequest) { throw ApiException(response.statusCode, await _decodeBodyBytes(response)); } @@ -237,7 +237,7 @@ class LibrariesApi { // At the time of writing this, `dart:convert` will throw an "Unexpected end of input" // FormatException when trying to decode an empty string. if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) { - return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'LibraryStatsResponseDto',) as LibraryStatsResponseDto; + return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'LibraryResponseDto',) as LibraryResponseDto; } return null; diff --git a/mobile/openapi/lib/api_client.dart b/mobile/openapi/lib/api_client.dart index a6f8d551da81c..aa5db6589b462 100644 --- a/mobile/openapi/lib/api_client.dart +++ b/mobile/openapi/lib/api_client.dart @@ -354,8 +354,6 @@ class ApiClient { return JobStatusDto.fromJson(value); case 'LibraryResponseDto': return LibraryResponseDto.fromJson(value); - case 'LibraryStatsResponseDto': - return LibraryStatsResponseDto.fromJson(value); case 'LicenseKeyDto': return LicenseKeyDto.fromJson(value); case 'LicenseResponseDto': diff --git a/mobile/openapi/lib/model/library_stats_response_dto.dart b/mobile/openapi/lib/model/library_stats_response_dto.dart deleted file mode 100644 index afe67da31a251..0000000000000 --- a/mobile/openapi/lib/model/library_stats_response_dto.dart +++ /dev/null @@ -1,123 +0,0 @@ -// -// AUTO-GENERATED FILE, DO NOT MODIFY! -// -// @dart=2.18 - -// ignore_for_file: unused_element, unused_import -// ignore_for_file: always_put_required_named_parameters_first -// ignore_for_file: constant_identifier_names -// ignore_for_file: lines_longer_than_80_chars - -part of openapi.api; - -class LibraryStatsResponseDto { - /// Returns a new [LibraryStatsResponseDto] instance. - LibraryStatsResponseDto({ - this.photos = 0, - this.total = 0, - this.usage = 0, - this.videos = 0, - }); - - int photos; - - int total; - - int usage; - - int videos; - - @override - bool operator ==(Object other) => identical(this, other) || other is LibraryStatsResponseDto && - other.photos == photos && - other.total == total && - other.usage == usage && - other.videos == videos; - - @override - int get hashCode => - // ignore: unnecessary_parenthesis - (photos.hashCode) + - (total.hashCode) + - (usage.hashCode) + - (videos.hashCode); - - @override - String toString() => 'LibraryStatsResponseDto[photos=$photos, total=$total, usage=$usage, videos=$videos]'; - - Map toJson() { - final json = {}; - json[r'photos'] = this.photos; - json[r'total'] = this.total; - json[r'usage'] = this.usage; - json[r'videos'] = this.videos; - return json; - } - - /// Returns a new [LibraryStatsResponseDto] instance and imports its values from - /// [value] if it's a [Map], null otherwise. - // ignore: prefer_constructors_over_static_methods - static LibraryStatsResponseDto? fromJson(dynamic value) { - upgradeDto(value, "LibraryStatsResponseDto"); - if (value is Map) { - final json = value.cast(); - - return LibraryStatsResponseDto( - photos: mapValueOfType(json, r'photos')!, - total: mapValueOfType(json, r'total')!, - usage: mapValueOfType(json, r'usage')!, - videos: mapValueOfType(json, r'videos')!, - ); - } - return null; - } - - static List listFromJson(dynamic json, {bool growable = false,}) { - final result = []; - if (json is List && json.isNotEmpty) { - for (final row in json) { - final value = LibraryStatsResponseDto.fromJson(row); - if (value != null) { - result.add(value); - } - } - } - return result.toList(growable: growable); - } - - static Map mapFromJson(dynamic json) { - final map = {}; - if (json is Map && json.isNotEmpty) { - json = json.cast(); // ignore: parameter_assignments - for (final entry in json.entries) { - final value = LibraryStatsResponseDto.fromJson(entry.value); - if (value != null) { - map[entry.key] = value; - } - } - } - return map; - } - - // maps a json object with a list of LibraryStatsResponseDto-objects as value to a dart map - static Map> mapListFromJson(dynamic json, {bool growable = false,}) { - final map = >{}; - if (json is Map && json.isNotEmpty) { - // ignore: parameter_assignments - json = json.cast(); - for (final entry in json.entries) { - map[entry.key] = LibraryStatsResponseDto.listFromJson(entry.value, growable: growable,); - } - } - return map; - } - - /// The list of required keys that must be present in a JSON. - static const requiredKeys = { - 'photos', - 'total', - 'usage', - 'videos', - }; -} - diff --git a/open-api/immich-openapi-specs.json b/open-api/immich-openapi-specs.json index 7c8aba3b5e985..554fed25d6866 100644 --- a/open-api/immich-openapi-specs.json +++ b/open-api/immich-openapi-specs.json @@ -2853,9 +2853,9 @@ ] } }, - "/libraries/{id}/scan": { - "post": { - "operationId": "scanLibrary", + "/libraries/{id}/count": { + "get": { + "operationId": "getAssetCount", "parameters": [ { "name": "id", @@ -2868,7 +2868,14 @@ } ], "responses": { - "204": { + "200": { + "content": { + "application/json": { + "schema": { + "type": "number" + } + } + }, "description": "" } }, @@ -2888,9 +2895,9 @@ ] } }, - "/libraries/{id}/statistics": { - "get": { - "operationId": "getLibraryStatistics", + "/libraries/{id}/scan": { + "post": { + "operationId": "scanLibrary", "parameters": [ { "name": "id", @@ -2903,14 +2910,7 @@ } ], "responses": { - "200": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/LibraryStatsResponseDto" - } - } - }, + "204": { "description": "" } }, @@ -9464,34 +9464,6 @@ ], "type": "object" }, - "LibraryStatsResponseDto": { - "properties": { - "photos": { - "default": 0, - "type": "integer" - }, - "total": { - "default": 0, - "type": "integer" - }, - "usage": { - "default": 0, - "format": "int64", - "type": "integer" - }, - "videos": { - "default": 0, - "type": "integer" - } - }, - "required": [ - "photos", - "total", - "usage", - "videos" - ], - "type": "object" - }, "LicenseKeyDto": { "properties": { "activationKey": { diff --git a/open-api/typescript-sdk/src/fetch-client.ts b/open-api/typescript-sdk/src/fetch-client.ts index c31e71d05e961..f441f47fc51c6 100644 --- a/open-api/typescript-sdk/src/fetch-client.ts +++ b/open-api/typescript-sdk/src/fetch-client.ts @@ -574,12 +574,6 @@ export type UpdateLibraryDto = { importPaths?: string[]; name?: string; }; -export type LibraryStatsResponseDto = { - photos: number; - total: number; - usage: number; - videos: number; -}; export type ValidateLibraryDto = { exclusionPatterns?: string[]; importPaths?: string[]; @@ -2099,22 +2093,22 @@ export function updateLibrary({ id, updateLibraryDto }: { body: updateLibraryDto }))); } -export function scanLibrary({ id }: { +export function getAssetCount({ id }: { id: string; }, opts?: Oazapfts.RequestOpts) { - return oazapfts.ok(oazapfts.fetchText(`/libraries/${encodeURIComponent(id)}/scan`, { - ...opts, - method: "POST" + return oazapfts.ok(oazapfts.fetchJson<{ + status: 200; + data: number; + }>(`/libraries/${encodeURIComponent(id)}/count`, { + ...opts })); } -export function getLibraryStatistics({ id }: { +export function scanLibrary({ id }: { id: string; }, opts?: Oazapfts.RequestOpts) { - return oazapfts.ok(oazapfts.fetchJson<{ - status: 200; - data: LibraryStatsResponseDto; - }>(`/libraries/${encodeURIComponent(id)}/statistics`, { - ...opts + return oazapfts.ok(oazapfts.fetchText(`/libraries/${encodeURIComponent(id)}/scan`, { + ...opts, + method: "POST" })); } export function validate({ id, validateLibraryDto }: { diff --git a/server/src/controllers/library.controller.ts b/server/src/controllers/library.controller.ts index b8959ca28875c..adf0f6c106240 100644 --- a/server/src/controllers/library.controller.ts +++ b/server/src/controllers/library.controller.ts @@ -57,10 +57,10 @@ export class LibraryController { return this.service.validate(id, dto); } - @Get(':id/statistics') + @Get(':id/count') @Authenticated({ permission: Permission.LIBRARY_STATISTICS, admin: true }) - getLibraryStatistics(@Param() { id }: UUIDParamDto): Promise { - return this.service.getStatistics(id); + getAssetCount(@Param() { id }: UUIDParamDto): Promise { + return this.service.getAssetCount(id); } @Post(':id/scan') diff --git a/server/src/interfaces/asset.interface.ts b/server/src/interfaces/asset.interface.ts index b25e42ba0e9e0..f9e9a4dd212f9 100644 --- a/server/src/interfaces/asset.interface.ts +++ b/server/src/interfaces/asset.interface.ts @@ -1,10 +1,11 @@ import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity'; import { AssetEntity } from 'src/entities/asset.entity'; import { ExifEntity } from 'src/entities/exif.entity'; +import { LibraryEntity } from 'src/entities/library.entity'; import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum'; import { AssetSearchOptions, SearchExploreItem } from 'src/interfaces/search.interface'; import { Paginated, PaginationOptions } from 'src/utils/pagination'; -import { FindOptionsOrder, FindOptionsRelations, FindOptionsSelect } from 'typeorm'; +import { FindOptionsOrder, FindOptionsRelations, FindOptionsSelect, UpdateResult } from 'typeorm'; export type AssetStats = Record; @@ -155,6 +156,7 @@ export const IAssetRepository = 'IAssetRepository'; export interface IAssetRepository { create(asset: AssetCreate): Promise; + createAll(assets: AssetCreate[]): Promise; getByIds( ids: string[], relations?: FindOptionsRelations, @@ -176,9 +178,9 @@ export interface IAssetRepository { getWithout(pagination: PaginationOptions, property: WithoutProperty): Paginated; getRandom(userIds: string[], count: number): Promise; getLastUpdatedAssetForAlbumId(albumId: string): Promise; - getByLibraryIdAndOriginalPath(libraryId: string, originalPath: string): Promise; deleteAll(ownerId: string): Promise; getAll(pagination: PaginationOptions, options?: AssetSearchOptions): Paginated; + getAllInLibrary(pagination: PaginationOptions, libraryId: string): Paginated; getAllByDeviceId(userId: string, deviceId: string): Promise; getLivePhotoCount(motionId: string): Promise; updateAll(ids: string[], options: Partial): Promise; @@ -197,4 +199,7 @@ export interface IAssetRepository { getChangedDeltaSync(options: AssetDeltaSyncOptions): Promise; upsertFile(file: UpsertFileOptions): Promise; upsertFiles(files: UpsertFileOptions[]): Promise; + updateOffline(library: LibraryEntity): Promise; + getNewPaths(libraryId: string, paths: string[]): Promise; + getAssetCount(options: AssetSearchOptions): Promise; } diff --git a/server/src/interfaces/job.interface.ts b/server/src/interfaces/job.interface.ts index 7976f813022ff..424691f087e09 100644 --- a/server/src/interfaces/job.interface.ts +++ b/server/src/interfaces/job.interface.ts @@ -84,8 +84,8 @@ export enum JobName { // library management LIBRARY_QUEUE_SYNC_FILES = 'library-queue-sync-files', LIBRARY_QUEUE_SYNC_ASSETS = 'library-queue-sync-assets', - LIBRARY_SYNC_FILE = 'library-sync-file', - LIBRARY_SYNC_ASSET = 'library-sync-asset', + LIBRARY_SYNC_FILES = 'library-sync-files', + LIBRARY_SYNC_ASSETS = 'library-sync-assets', LIBRARY_DELETE = 'library-delete', LIBRARY_QUEUE_SYNC_ALL = 'library-queue-sync-all', LIBRARY_QUEUE_CLEANUP = 'library-queue-cleanup', @@ -135,7 +135,7 @@ export interface IDelayedJob extends IBaseJob { export interface IEntityJob extends IBaseJob { id: string; - source?: 'upload' | 'sidecar-write' | 'copy'; + source?: 'upload' | 'library-import' | 'sidecar-write' | 'copy'; notify?: boolean; } @@ -143,20 +143,26 @@ export interface IAssetDeleteJob extends IEntityJob { deleteOnDisk: boolean; } -export interface ILibraryFileJob extends IEntityJob { +export interface ILibraryFileJob { + libraryId: string; ownerId: string; - assetPath: string; + assetPaths: string[]; } -export interface ILibraryAssetJob extends IEntityJob { - importPaths: string[]; - exclusionPatterns: string[]; +export interface ILibraryBulkIdsJob { + libraryId: string; + assetIds: string[]; } -export interface IBulkEntityJob extends IBaseJob { +export interface IBulkEntityJob { ids: string[]; } +export interface ILibraryAssetsJob extends IBulkEntityJob { + importPaths: string[]; + exclusionPatterns: string[]; +} + export interface IDeleteFilesJob extends IBaseJob { files: Array; } @@ -284,10 +290,10 @@ export type JobItem = | { name: JobName.ASSET_DELETION_CHECK; data?: IBaseJob } // Library Management - | { name: JobName.LIBRARY_SYNC_FILE; data: ILibraryFileJob } + | { name: JobName.LIBRARY_SYNC_FILES; data: ILibraryFileJob } | { name: JobName.LIBRARY_QUEUE_SYNC_FILES; data: IEntityJob } | { name: JobName.LIBRARY_QUEUE_SYNC_ASSETS; data: IEntityJob } - | { name: JobName.LIBRARY_SYNC_ASSET; data: ILibraryAssetJob } + | { name: JobName.LIBRARY_SYNC_ASSETS; data: ILibraryBulkIdsJob } | { name: JobName.LIBRARY_DELETE; data: IEntityJob } | { name: JobName.LIBRARY_QUEUE_SYNC_ALL; data?: IBaseJob } | { name: JobName.LIBRARY_QUEUE_CLEANUP; data: IBaseJob } diff --git a/server/src/interfaces/library.interface.ts b/server/src/interfaces/library.interface.ts index d8f1a1303116e..803cf1bc4ed31 100644 --- a/server/src/interfaces/library.interface.ts +++ b/server/src/interfaces/library.interface.ts @@ -1,8 +1,15 @@ +import { ADDED_IN_PREFIX } from 'src/constants'; import { LibraryStatsResponseDto } from 'src/dtos/library.dto'; import { LibraryEntity } from 'src/entities/library.entity'; export const ILibraryRepository = 'ILibraryRepository'; +export enum AssetSyncResult { + DO_NOTHING, + UPDATE, + OFFLINE, +} + export interface ILibraryRepository { getAll(withDeleted?: boolean): Promise; getAllDeleted(): Promise; diff --git a/server/src/repositories/asset.repository.ts b/server/src/repositories/asset.repository.ts index 33d1e2457eb85..cc01d0c9bea49 100644 --- a/server/src/repositories/asset.repository.ts +++ b/server/src/repositories/asset.repository.ts @@ -5,6 +5,7 @@ import { AssetFileEntity } from 'src/entities/asset-files.entity'; import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity'; import { AssetEntity } from 'src/entities/asset.entity'; import { ExifEntity } from 'src/entities/exif.entity'; +import { LibraryEntity } from 'src/entities/library.entity'; import { AssetFileType, AssetOrder, AssetStatus, AssetType, PaginationMode } from 'src/enum'; import { AssetBuilderOptions, @@ -29,9 +30,11 @@ import { } from 'src/interfaces/asset.interface'; import { AssetSearchOptions, SearchExploreItem } from 'src/interfaces/search.interface'; import { searchAssetBuilder } from 'src/utils/database'; +import { globToSqlPattern } from 'src/utils/misc'; import { Paginated, PaginationOptions, paginate, paginatedBuilder } from 'src/utils/pagination'; import { Brackets, + DataSource, FindOptionsOrder, FindOptionsRelations, FindOptionsSelect, @@ -41,6 +44,7 @@ import { MoreThan, Not, Repository, + UpdateResult, } from 'typeorm'; const truncateMap: Record = { @@ -60,6 +64,7 @@ export class AssetRepository implements IAssetRepository { @InjectRepository(AssetFileEntity) private fileRepository: Repository, @InjectRepository(ExifEntity) private exifRepository: Repository, @InjectRepository(AssetJobStatusEntity) private jobStatusRepository: Repository, + private dataSource: DataSource, ) {} async upsertExif(exif: Partial): Promise { @@ -74,6 +79,10 @@ export class AssetRepository implements IAssetRepository { return this.repository.save(asset); } + createAll(assets: AssetCreate[]): Promise { + return this.repository.save(assets); + } + @GenerateSql({ params: [[DummyValue.UUID], { day: 1, month: 1 }] }) async getByDayOfYear(ownerIds: string[], { day, month }: MonthDay): Promise { const assets = await this.repository @@ -188,14 +197,6 @@ export class AssetRepository implements IAssetRepository { return this.getAll(pagination, { ...options, userIds: [userId] }); } - @GenerateSql({ params: [DummyValue.UUID, DummyValue.STRING] }) - getByLibraryIdAndOriginalPath(libraryId: string, originalPath: string): Promise { - return this.repository.findOne({ - where: { library: { id: libraryId }, originalPath }, - withDeleted: true, - }); - } - @GenerateSql({ params: [DummyValue.UUID, [DummyValue.STRING]] }) @ChunkedArray({ paramIndex: 1 }) async getPathsNotInLibrary(libraryId: string, originalPaths: string[]): Promise { @@ -222,6 +223,20 @@ export class AssetRepository implements IAssetRepository { }); } + getAllInLibrary(pagination: PaginationOptions, libraryId: string): Paginated { + const builder = this.repository + .createQueryBuilder('asset') + .select('asset.id') + .where('asset.libraryId = :libraryId', { libraryId }) + .withDeleted(); + + return paginatedBuilder(builder, { + mode: PaginationMode.SKIP_TAKE, + skip: pagination.skip, + take: pagination.take, + }); + } + /** * Get assets by device's Id on the database * @param ownerId @@ -726,4 +741,54 @@ export class AssetRepository implements IAssetRepository { async upsertFiles(files: { assetId: string; type: AssetFileType; path: string }[]): Promise { await this.fileRepository.upsert(files, { conflictPaths: ['assetId', 'type'] }); } + + updateOffline(library: LibraryEntity): Promise { + const paths = library.importPaths.map((importPath) => `${importPath}%`).join('|'); + const exclusions = library.exclusionPatterns.map((pattern) => globToSqlPattern(pattern)).join('|'); + return this.repository + .createQueryBuilder() + .update() + .set({ + isOffline: true, + deletedAt: new Date(), + }) + .where({ isOffline: false }) + .andWhere({ libraryId: library.id }) + .andWhere( + new Brackets((qb) => { + qb.where('originalPath NOT SIMILAR TO :paths', { + paths, + }).orWhere('originalPath SIMILAR TO :exclusions', { + exclusions, + }); + }), + ) + .execute(); + } + + async getNewPaths(libraryId: string, paths: string[]): Promise { + const rawSql = ` + WITH unnested_paths AS ( + SELECT unnest($1::text[]) AS path + ) + SELECT unnested_paths.path AS path + FROM unnested_paths + WHERE not exists( + SELECT 1 + FROM assets + WHERE "originalPath" = unnested_paths.path AND + "libraryId" = $2 + ); + `; + + return this.repository + .query(rawSql, [paths, libraryId]) + .then((result) => result.map((row: { path: string }) => row.path)); + } + + async getAssetCount(options: AssetSearchOptions = {}): Promise { + let builder = this.repository.createQueryBuilder('asset').leftJoinAndSelect('asset.files', 'files'); + builder = searchAssetBuilder(builder, options); + return builder.getCount(); + } } diff --git a/server/src/repositories/trash.repository.ts b/server/src/repositories/trash.repository.ts index d24f4f709afac..6ca6f4351f48f 100644 --- a/server/src/repositories/trash.repository.ts +++ b/server/src/repositories/trash.repository.ts @@ -14,6 +14,7 @@ export class TrashRepository implements ITrashRepository { .createQueryBuilder('asset') .select('asset.id') .where({ status: AssetStatus.DELETED }) + .orWhere({ isOffline: true }) .withDeleted(), pagination, ); @@ -34,10 +35,13 @@ export class TrashRepository implements ITrashRepository { } async empty(userId: string): Promise { - const result = await this.assetRepository.update( - { ownerId: userId, status: AssetStatus.TRASHED }, - { status: AssetStatus.DELETED }, - ); + const result = await this.assetRepository + .createQueryBuilder() + .update(AssetEntity) + .set({ status: AssetStatus.DELETED }) + .where({ ownerId: userId, status: AssetStatus.TRASHED }) + .orWhere({ ownerId: userId, isOffline: true }) + .execute(); return result.affected || 0; } diff --git a/server/src/services/asset.service.ts b/server/src/services/asset.service.ts index 87510371192e4..f2bc09c9078e3 100644 --- a/server/src/services/asset.service.ts +++ b/server/src/services/asset.service.ts @@ -249,7 +249,12 @@ export class AssetService extends BaseService { const { thumbnailFile, previewFile } = getAssetFiles(asset.files); const files = [thumbnailFile?.path, previewFile?.path, asset.encodedVideoPath]; - if (deleteOnDisk) { + + if (deleteOnDisk && !asset.isOffline) { + /* We don't want to delete an offline asset because it is either... + ...missing from disk => don't delete the file since it doesn't exist where we expect + ...outside of any import path => don't delete the file since we're not responsible for it + ...matching an exclusion pattern => don't delete the file since it's excluded */ files.push(asset.sidecarPath, asset.originalPath); } diff --git a/server/src/services/job.service.ts b/server/src/services/job.service.ts index 2faed0a51666a..a9a430858e488 100644 --- a/server/src/services/job.service.ts +++ b/server/src/services/job.service.ts @@ -266,7 +266,7 @@ export class JobService extends BaseService { } case JobName.GENERATE_THUMBNAILS: { - if (!item.data.notify && item.data.source !== 'upload') { + if (!item.data.notify && item.data.source !== 'upload' && item.data.source !== 'library-import') { break; } diff --git a/server/src/services/library.service.spec.ts b/server/src/services/library.service.spec.ts index 43d6662d659e5..6441634d43b06 100644 --- a/server/src/services/library.service.spec.ts +++ b/server/src/services/library.service.spec.ts @@ -10,7 +10,7 @@ import { ICronRepository } from 'src/interfaces/cron.interface'; import { IDatabaseRepository } from 'src/interfaces/database.interface'; import { IJobRepository, - ILibraryAssetJob, + ILibraryAssetsJob, ILibraryFileJob, JobName, JOBS_LIBRARY_PAGINATION_SIZE, @@ -179,7 +179,7 @@ describe(LibraryService.name, () => { expect(jobMock.queueAll).toHaveBeenCalledWith([ { - name: JobName.LIBRARY_SYNC_FILE, + name: JobName.LIBRARY_SYNC_FILES, data: { id: libraryStub.externalLibrary1.id, ownerId: libraryStub.externalLibrary1.owner.id, @@ -231,7 +231,7 @@ describe(LibraryService.name, () => { expect(jobMock.queueAll).toHaveBeenCalledWith([ { - name: JobName.LIBRARY_SYNC_ASSET, + name: JobName.LIBRARY_SYNC_ASSETS, data: { id: assetStub.external.id, importPaths: libraryStub.externalLibrary1.importPaths, @@ -250,22 +250,22 @@ describe(LibraryService.name, () => { describe('handleSyncAsset', () => { it('should skip missing assets', async () => { - const mockAssetJob: ILibraryAssetJob = { - id: assetStub.external.id, + const mockAssetJob: ILibraryAssetsJob = { + ids: [assetStub.external.id], importPaths: ['/'], exclusionPatterns: [], }; assetMock.getById.mockResolvedValue(null); - await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SKIPPED); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SKIPPED); expect(assetMock.remove).not.toHaveBeenCalled(); }); it('should offline assets no longer on disk', async () => { - const mockAssetJob: ILibraryAssetJob = { - id: assetStub.external.id, + const mockAssetJob: ILibraryAssetsJob = { + ids: [assetStub.external.id], importPaths: ['/'], exclusionPatterns: [], }; @@ -273,7 +273,7 @@ describe(LibraryService.name, () => { assetMock.getById.mockResolvedValue(assetStub.external); storageMock.stat.mockRejectedValue(new Error('ENOENT, no such file or directory')); - await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], { isOffline: true, @@ -282,15 +282,15 @@ describe(LibraryService.name, () => { }); it('should offline assets matching an exclusion pattern', async () => { - const mockAssetJob: ILibraryAssetJob = { - id: assetStub.external.id, + const mockAssetJob: ILibraryAssetsJob = { + ids: [assetStub.external.id], importPaths: ['/'], exclusionPatterns: ['**/user1/**'], }; assetMock.getById.mockResolvedValue(assetStub.external); - await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], { isOffline: true, deletedAt: expect.any(Date), @@ -298,8 +298,8 @@ describe(LibraryService.name, () => { }); it('should set assets outside of import paths as offline', async () => { - const mockAssetJob: ILibraryAssetJob = { - id: assetStub.external.id, + const mockAssetJob: ILibraryAssetsJob = { + ids: [assetStub.external.id], importPaths: ['/data/user2'], exclusionPatterns: [], }; @@ -307,7 +307,7 @@ describe(LibraryService.name, () => { assetMock.getById.mockResolvedValue(assetStub.external); storageMock.checkFileExists.mockResolvedValue(true); - await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], { isOffline: true, @@ -316,8 +316,8 @@ describe(LibraryService.name, () => { }); it('should do nothing with online assets', async () => { - const mockAssetJob: ILibraryAssetJob = { - id: assetStub.external.id, + const mockAssetJob: ILibraryAssetsJob = { + ids: [assetStub.external.id], importPaths: ['/'], exclusionPatterns: [], }; @@ -325,14 +325,14 @@ describe(LibraryService.name, () => { assetMock.getById.mockResolvedValue(assetStub.external); storageMock.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats); - await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); expect(assetMock.updateAll).not.toHaveBeenCalled(); }); it('should un-trash an asset previously marked as offline', async () => { - const mockAssetJob: ILibraryAssetJob = { - id: assetStub.external.id, + const mockAssetJob: ILibraryAssetsJob = { + ids: [assetStub.external.id], importPaths: ['/'], exclusionPatterns: [], }; @@ -340,7 +340,7 @@ describe(LibraryService.name, () => { assetMock.getById.mockResolvedValue(assetStub.trashedOffline); storageMock.stat.mockResolvedValue({ mtime: assetStub.trashedOffline.fileModifiedAt } as Stats); - await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.trashedOffline.id], { deletedAt: null, @@ -353,8 +353,8 @@ describe(LibraryService.name, () => { }); it('should update file when mtime has changed', async () => { - const mockAssetJob: ILibraryAssetJob = { - id: assetStub.external.id, + const mockAssetJob: ILibraryAssetsJob = { + ids: [assetStub.external.id], importPaths: ['/'], exclusionPatterns: [], }; @@ -363,7 +363,7 @@ describe(LibraryService.name, () => { assetMock.getById.mockResolvedValue(assetStub.external); storageMock.stat.mockResolvedValue({ mtime: newMTime } as Stats); - await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); + await expect(sut.handleSyncAssets(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS); expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], { fileModifiedAt: newMTime, @@ -960,7 +960,7 @@ describe(LibraryService.name, () => { expect(jobMock.queueAll).toHaveBeenCalledWith([ { - name: JobName.LIBRARY_SYNC_FILE, + name: JobName.LIBRARY_SYNC_FILES, data: { id: libraryStub.externalLibraryWithImportPaths1.id, assetPath: '/foo/photo.jpg', @@ -969,7 +969,7 @@ describe(LibraryService.name, () => { }, ]); expect(jobMock.queueAll).toHaveBeenCalledWith([ - { name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) }, + { name: JobName.LIBRARY_SYNC_ASSETS, data: expect.objectContaining({ ids: [assetStub.image.id] }) }, ]); }); @@ -985,7 +985,7 @@ describe(LibraryService.name, () => { expect(jobMock.queueAll).toHaveBeenCalledWith([ { - name: JobName.LIBRARY_SYNC_FILE, + name: JobName.LIBRARY_SYNC_FILES, data: { id: libraryStub.externalLibraryWithImportPaths1.id, assetPath: '/foo/photo.jpg', @@ -994,7 +994,7 @@ describe(LibraryService.name, () => { }, ]); expect(jobMock.queueAll).toHaveBeenCalledWith([ - { name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) }, + { name: JobName.LIBRARY_SYNC_ASSETS, data: expect.objectContaining({ id: [assetStub.image.id] }) }, ]); }); @@ -1009,7 +1009,7 @@ describe(LibraryService.name, () => { await sut.watchAll(); expect(jobMock.queueAll).toHaveBeenCalledWith([ - { name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) }, + { name: JobName.LIBRARY_SYNC_ASSETS, data: expect.objectContaining({ ids: [assetStub.image.id] }) }, ]); }); @@ -1166,9 +1166,9 @@ describe(LibraryService.name, () => { expect(jobMock.queueAll).toHaveBeenCalledWith([ { - name: JobName.LIBRARY_SYNC_ASSET, + name: JobName.LIBRARY_SYNC_ASSETS, data: { - id: assetStub.image1.id, + ids: [assetStub.image1.id], importPaths: libraryStub.externalLibrary1.importPaths, exclusionPatterns: libraryStub.externalLibrary1.exclusionPatterns, }, diff --git a/server/src/services/library.service.ts b/server/src/services/library.service.ts index c0d24fea9e19d..3c4e7f7a28c8e 100644 --- a/server/src/services/library.service.ts +++ b/server/src/services/library.service.ts @@ -1,4 +1,4 @@ -import { BadRequestException, Injectable } from '@nestjs/common'; +import { BadRequestException, Injectable, InternalServerErrorException } from '@nestjs/common'; import { R_OK } from 'node:constants'; import path, { basename, isAbsolute, parse } from 'node:path'; import picomatch from 'picomatch'; @@ -16,10 +16,12 @@ import { } from 'src/dtos/library.dto'; import { AssetEntity } from 'src/entities/asset.entity'; import { LibraryEntity } from 'src/entities/library.entity'; -import { AssetType, ImmichWorker } from 'src/enum'; +import { AssetStatus, AssetType, ImmichWorker } from 'src/enum'; +import { AssetCreate } from 'src/interfaces/asset.interface'; import { DatabaseLock } from 'src/interfaces/database.interface'; import { ArgOf } from 'src/interfaces/event.interface'; import { JobName, JobOf, JOBS_LIBRARY_PAGINATION_SIZE, JobStatus, QueueName } from 'src/interfaces/job.interface'; +import { AssetSyncResult } from 'src/interfaces/library.interface'; import { BaseService } from 'src/services/base.service'; import { mimeTypes } from 'src/utils/mime-types'; import { handlePromiseError } from 'src/utils/misc'; @@ -98,6 +100,18 @@ export class LibraryService extends BaseService { let _resolve: () => void; const ready$ = new Promise((resolve) => (_resolve = resolve)); + const handler = async (event: string, path: string) => { + if (matcher(path)) { + this.logger.debug(`File ${event} event received for ${path} in library ${library.id}}`); + await this.jobRepository.queue({ + name: JobName.LIBRARY_SYNC_FILES, + data: { libraryId: library.id, ownerId: library.ownerId, assetPaths: [path] }, + }); + } else { + this.logger.verbose(`Ignoring file ${event} event for ${path} in library ${library.id}`); + } + }; + this.watchers[id] = this.storageRepository.watch( library.importPaths, { @@ -107,43 +121,13 @@ export class LibraryService extends BaseService { { onReady: () => _resolve(), onAdd: (path) => { - const handler = async () => { - this.logger.debug(`File add event received for ${path} in library ${library.id}}`); - if (matcher(path)) { - const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path); - if (asset) { - await this.syncAssets(library, [asset.id]); - } - if (matcher(path)) { - await this.syncFiles(library, [path]); - } - } - }; - return handlePromiseError(handler(), this.logger); + return handlePromiseError(handler('add', path), this.logger); }, onChange: (path) => { - const handler = async () => { - this.logger.debug(`Detected file change for ${path} in library ${library.id}`); - const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path); - if (asset) { - await this.syncAssets(library, [asset.id]); - } - if (matcher(path)) { - // Note: if the changed file was not previously imported, it will be imported now. - await this.syncFiles(library, [path]); - } - }; - return handlePromiseError(handler(), this.logger); + return handlePromiseError(handler('change', path), this.logger); }, onUnlink: (path) => { - const handler = async () => { - this.logger.debug(`Detected deleted file at ${path} in library ${library.id}`); - const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path); - if (asset) { - await this.syncAssets(library, [asset.id]); - } - }; - return handlePromiseError(handler(), this.logger); + return handlePromiseError(handler('delete', path), this.logger); }, onError: (error) => { this.logger.error(`Library watcher for library ${library.id} encountered error: ${error}`); @@ -190,12 +174,12 @@ export class LibraryService extends BaseService { } } - async getStatistics(id: string): Promise { - const statistics = await this.libraryRepository.getStatistics(id); - if (!statistics) { - throw new BadRequestException(`Library ${id} not found`); + async getAssetCount(id: string): Promise { + const count = await this.assetRepository.getAssetCount({ libraryId: id }); + if (count == undefined) { + throw new InternalServerErrorException(`Failed to get asset count for library ${id}`); } - return statistics; + return count; } async get(id: string): Promise { @@ -228,26 +212,32 @@ export class LibraryService extends BaseService { return mapLibrary(library); } - private async syncFiles({ id, ownerId }: LibraryEntity, assetPaths: string[]) { - await this.jobRepository.queueAll( - assetPaths.map((assetPath) => ({ - name: JobName.LIBRARY_SYNC_FILE, - data: { - id, - assetPath, - ownerId, - }, - })), - ); - } + @OnJob({ name: JobName.LIBRARY_SYNC_FILES, queue: QueueName.LIBRARY }) + async handleSyncFiles(job: JobOf): Promise { + const library = await this.libraryRepository.get(job.libraryId); + if (!library) { + // We need to check if the library still exists as it could have been deleted after the scan was queued + this.logger.debug(`Library ${job.libraryId} not found, skipping file import`); + return JobStatus.FAILED; + } - private async syncAssets({ importPaths, exclusionPatterns }: LibraryEntity, assetIds: string[]) { - await this.jobRepository.queueAll( - assetIds.map((assetId) => ({ - name: JobName.LIBRARY_SYNC_ASSET, - data: { id: assetId, importPaths, exclusionPatterns }, - })), - ); + const assetImports = job.assetPaths.map((assetPath) => this.processEntity(assetPath, job.ownerId, job.libraryId)); + + const assetIds: string[] = []; + + // Due to a typeorm limitation we must batch the inserts + const batchSize = 2000; + for (let i = 0; i < assetImports.length; i += batchSize) { + const batch = assetImports.slice(i, i + batchSize); + const batchIds = await this.assetRepository.createAll(batch).then((assets) => assets.map((asset) => asset.id)); + assetIds.push(...batchIds); + } + + this.logger.log(`Imported ${assetIds.length} file(s) into library ${job.libraryId}`); + + await this.queuePostSyncJobs(assetIds); + + return JobStatus.SUCCESS; } private async validateImportPath(importPath: string): Promise { @@ -361,98 +351,62 @@ export class LibraryService extends BaseService { return JobStatus.SUCCESS; } - @OnJob({ name: JobName.LIBRARY_SYNC_FILE, queue: QueueName.LIBRARY }) - async handleSyncFile(job: JobOf): Promise { - // Only needs to handle new assets - const assetPath = path.normalize(job.assetPath); - - let asset = await this.assetRepository.getByLibraryIdAndOriginalPath(job.id, assetPath); - if (asset) { - return JobStatus.SKIPPED; - } - - let stat; - try { - stat = await this.storageRepository.stat(assetPath); - } catch (error: any) { - if (error.code === 'ENOENT') { - this.logger.error(`File not found: ${assetPath}`); - return JobStatus.SKIPPED; - } - this.logger.error(`Error reading file: ${assetPath}. Error: ${error}`); - return JobStatus.FAILED; - } + private processEntity(filePath: string, ownerId: string, libraryId: string): AssetCreate { + const assetPath = path.normalize(filePath); - this.logger.log(`Importing new library asset: ${assetPath}`); + // This date will be set until metadata extraction runs + const datePlaceholder = new Date('1900-01-01'); - const library = await this.libraryRepository.get(job.id, true); - if (!library || library.deletedAt) { - this.logger.error('Cannot import asset into deleted library'); - return JobStatus.FAILED; - } - - // TODO: device asset id is deprecated, remove it - const deviceAssetId = `${basename(assetPath)}`.replaceAll(/\s+/g, ''); - - const pathHash = this.cryptoRepository.hashSha1(`path:${assetPath}`); - - // TODO: doesn't xmp replace the file extension? Will need investigation - let sidecarPath: string | null = null; - if (await this.storageRepository.checkFileExists(`${assetPath}.xmp`, R_OK)) { - sidecarPath = `${assetPath}.xmp`; - } - - const assetType = mimeTypes.isVideo(assetPath) ? AssetType.VIDEO : AssetType.IMAGE; - - const mtime = stat.mtime; - - asset = await this.assetRepository.create({ - ownerId: job.ownerId, - libraryId: job.id, - checksum: pathHash, + return { + ownerId: ownerId, + libraryId: libraryId, + checksum: this.cryptoRepository.hashSha1(`path:${assetPath}`), originalPath: assetPath, - deviceAssetId, + + // TODO: device asset id is deprecated, remove it + deviceAssetId: `${basename(assetPath)}`.replaceAll(/\s+/g, ''), deviceId: 'Library Import', - fileCreatedAt: mtime, - fileModifiedAt: mtime, - localDateTime: mtime, - type: assetType, + fileCreatedAt: datePlaceholder, + fileModifiedAt: datePlaceholder, + localDateTime: datePlaceholder, + type: mimeTypes.isVideo(assetPath) ? AssetType.VIDEO : AssetType.IMAGE, originalFileName: parse(assetPath).base, - - sidecarPath, isExternal: true, - }); - - await this.queuePostSyncJobs(asset); - - return JobStatus.SUCCESS; + }; } - async queuePostSyncJobs(asset: AssetEntity) { - this.logger.debug(`Queueing metadata extraction for: ${asset.originalPath}`); - - await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: { id: asset.id, source: 'upload' } }); + async queuePostSyncJobs(assetIds: string[]) { + await this.jobRepository.queueAll( + assetIds.map((assetId) => ({ + name: JobName.METADATA_EXTRACTION, + data: { id: assetId, source: 'library-import' }, + })), + ); } async queueScan(id: string) { await this.findOrFail(id); + this.logger.log(`Starting to scan library ${id}`); + await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_FILES, data: { id, }, }); + await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, data: { id } }); } @OnJob({ name: JobName.LIBRARY_QUEUE_SYNC_ALL, queue: QueueName.LIBRARY }) async handleQueueSyncAll(): Promise { - this.logger.debug(`Refreshing all external libraries`); + this.logger.log(`Initiating scan of all external libraries`); await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_CLEANUP, data: {} }); const libraries = await this.libraryRepository.getAll(true); + await this.jobRepository.queueAll( libraries.map((library) => ({ name: JobName.LIBRARY_QUEUE_SYNC_FILES, @@ -469,63 +423,116 @@ export class LibraryService extends BaseService { }, })), ); + return JobStatus.SUCCESS; } - @OnJob({ name: JobName.LIBRARY_SYNC_ASSET, queue: QueueName.LIBRARY }) - async handleSyncAsset(job: JobOf): Promise { - const asset = await this.assetRepository.getById(job.id); - if (!asset) { - return JobStatus.SKIPPED; - } + @OnJob({ name: JobName.LIBRARY_SYNC_ASSETS, queue: QueueName.LIBRARY }) + async handleSyncAssets(job: JobOf): Promise { + const assets = await this.assetRepository.getByIds(job.assetIds); - const markOffline = async (explanation: string) => { - if (!asset.isOffline) { - this.logger.debug(`${explanation}, removing: ${asset.originalPath}`); - await this.assetRepository.updateAll([asset.id], { isOffline: true, deletedAt: new Date() }); + const assetIdsToOffline: string[] = []; + const assetIdsToUpdate: string[] = []; + + this.logger.debug(`Checking batch of ${assets.length} existing asset(s) in library ${job.libraryId}`); + + for (const asset of assets) { + const action = await this.handleSyncAsset(asset); + switch (action) { + case AssetSyncResult.OFFLINE: + assetIdsToOffline.push(asset.id); + break; + case AssetSyncResult.UPDATE: + assetIdsToUpdate.push(asset.id); + break; } - }; + } - const isInPath = job.importPaths.find((path) => asset.originalPath.startsWith(path)); - if (!isInPath) { - await markOffline('Asset is no longer in an import path'); - return JobStatus.SUCCESS; + if (assetIdsToOffline.length) { + await this.assetRepository.updateAll(assetIdsToOffline, { + isOffline: true, + status: AssetStatus.TRASHED, + deletedAt: new Date(), + }); } - const isExcluded = job.exclusionPatterns.some((pattern) => picomatch.isMatch(asset.originalPath, pattern)); + if (assetIdsToUpdate.length) { + //TODO: When we have asset status, we need to leave deletedAt as is when status is trashed + await this.assetRepository.updateAll(assetIdsToUpdate, { + isOffline: false, + status: AssetStatus.ACTIVE, + deletedAt: null, + }); + await this.queuePostSyncJobs(assetIdsToUpdate); + } + + const remainingCount = assets.length - assetIdsToOffline.length - assetIdsToUpdate.length; + + this.logger.log( + `Checked existing asset(s): ${assetIdsToOffline.length} offlined, ${assetIdsToUpdate.length} updated, ${remainingCount} unchanged of batch of ${assets.length} in library ${job.libraryId}.`, + ); + + return JobStatus.SUCCESS; + } + + private async checkOfflineAsset(asset: AssetEntity) { + if (!asset.libraryId) { + return false; + } + + const library = await this.libraryRepository.get(asset.libraryId); + if (!library) { + return false; + } + + const isInImportPath = library.importPaths.find((path) => asset.originalPath.startsWith(path)); + if (!isInImportPath) { + return false; + } + + const isExcluded = library.exclusionPatterns.some((pattern) => picomatch.isMatch(asset.originalPath, pattern)); if (isExcluded) { - await markOffline('Asset is covered by an exclusion pattern'); - return JobStatus.SUCCESS; + return false; + } + + return true; + } + + private async handleSyncAsset(asset: AssetEntity): Promise { + if (!asset) { + return AssetSyncResult.DO_NOTHING; } let stat; try { stat = await this.storageRepository.stat(asset.originalPath); } catch { - await markOffline('Asset is no longer on disk or is inaccessible because of permissions'); - return JobStatus.SUCCESS; + if (asset.isOffline) { + return AssetSyncResult.DO_NOTHING; + } + + this.logger.debug( + `Asset is no longer on disk or is inaccessible because of permissions, moving to trash: ${asset.originalPath}`, + ); + return AssetSyncResult.OFFLINE; } const mtime = stat.mtime; const isAssetModified = mtime.toISOString() !== asset.fileModifiedAt.toISOString(); + let shouldAssetGoOnline = false; - if (asset.isOffline || isAssetModified) { - this.logger.debug(`Asset was offline or modified, updating asset record ${asset.originalPath}`); - //TODO: When we have asset status, we need to leave deletedAt as is when status is trashed - await this.assetRepository.updateAll([asset.id], { - isOffline: false, - deletedAt: null, - fileCreatedAt: mtime, - fileModifiedAt: mtime, - originalFileName: parse(asset.originalPath).base, - }); + if (asset.isOffline && asset.status != AssetStatus.DELETED) { + // Only perform the expensive check if the asset is offline + shouldAssetGoOnline = await this.checkOfflineAsset(asset); } - if (isAssetModified) { - this.logger.debug(`Asset was modified, queuing metadata extraction for: ${asset.originalPath}`); - await this.queuePostSyncJobs(asset); + if (shouldAssetGoOnline || isAssetModified) { + this.logger.debug(`Asset was offline or modified, updating asset record ${asset.originalPath}`); + + return AssetSyncResult.UPDATE; } - return JobStatus.SUCCESS; + + return AssetSyncResult.DO_NOTHING; } @OnJob({ name: JobName.LIBRARY_QUEUE_SYNC_FILES, queue: QueueName.LIBRARY }) @@ -536,7 +543,7 @@ export class LibraryService extends BaseService { return JobStatus.SKIPPED; } - this.logger.log(`Refreshing library ${library.id} for new assets`); + this.logger.debug(`Validating import paths for library ${library.id}...`); const validImportPaths: string[] = []; @@ -551,28 +558,55 @@ export class LibraryService extends BaseService { if (validImportPaths.length === 0) { this.logger.warn(`No valid import paths found for library ${library.id}`); + + return JobStatus.SKIPPED; } - const assetsOnDisk = this.storageRepository.walk({ + const pathsOnDisk = this.storageRepository.walk({ pathsToCrawl: validImportPaths, includeHidden: false, exclusionPatterns: library.exclusionPatterns, take: JOBS_LIBRARY_PAGINATION_SIZE, }); - let count = 0; + let importCount = 0; + let crawlCount = 0; + + this.logger.log(`Starting crawl of ${validImportPaths.length} path(s) for library ${library.id}...`); - for await (const assetBatch of assetsOnDisk) { - count += assetBatch.length; - this.logger.debug(`Discovered ${count} asset(s) on disk for library ${library.id}...`); - await this.syncFiles(library, assetBatch); - this.logger.verbose(`Queued scan of ${assetBatch.length} crawled asset(s) in library ${library.id}...`); + for await (const pathBatch of pathsOnDisk) { + crawlCount += pathBatch.length; + this.logger.debug( + `Crawled ${pathBatch.length} file(s) for library ${library.id}, in total ${crawlCount} file(s) crawled so far`, + ); + const newPaths = await this.assetRepository.getNewPaths(library.id, pathBatch); + if (newPaths.length > 0) { + importCount += newPaths.length; + + await this.jobRepository.queue({ + name: JobName.LIBRARY_SYNC_FILES, + data: { libraryId: library.id, ownerId: library.ownerId, assetPaths: newPaths }, + }); + this.logger.log( + `Crawled ${crawlCount} file(s) so far: ${newPaths.length} of current batch queued for import for ${library.id}...`, + ); + } else { + this.logger.log( + `Crawled ${crawlCount} file(s) so far: ${pathBatch.length} of current batch already in library ${library.id}...`, + ); + } } - if (count > 0) { - this.logger.debug(`Finished queueing scan of ${count} assets on disk for library ${library.id}`); - } else if (validImportPaths.length > 0) { - this.logger.debug(`No non-excluded assets found in any import path for library ${library.id}`); + if (crawlCount === 0) { + this.logger.log(`No files found on disk for library ${library.id}`); + } else if (importCount > 0 && importCount === crawlCount) { + this.logger.log(`Finished crawling and queueing ${crawlCount} file(s) for import for library ${library.id}`); + } else if (importCount > 0) { + this.logger.log( + `Finished crawling ${crawlCount} file(s) of which ${importCount} file(s) are queued for import for library ${library.id}`, + ); + } else { + this.logger.log(`All ${crawlCount} file(s) on disk are already in library ${library.id}`); } await this.libraryRepository.update({ id: job.id, refreshedAt: new Date() }); @@ -587,27 +621,66 @@ export class LibraryService extends BaseService { return JobStatus.SKIPPED; } - this.logger.log(`Scanning library ${library.id} for removed assets`); + const assetCount = await this.assetRepository.getAssetCount({ libraryId: job.id, withDeleted: true }); + + if (!assetCount) { + this.logger.log(`Library ${library.id} is empty, no need to check assets`); + return JobStatus.SUCCESS; + } - const onlineAssets = usePagination(JOBS_LIBRARY_PAGINATION_SIZE, (pagination) => - this.assetRepository.getAll(pagination, { libraryId: job.id, withDeleted: true }), + this.logger.log( + `${assetCount} asset(s) in library ${library.id} will be checked against import paths and exclusion patterns...`, ); - let assetCount = 0; - for await (const assets of onlineAssets) { - assetCount += assets.length; - this.logger.debug(`Discovered ${assetCount} asset(s) in library ${library.id}...`); - await this.jobRepository.queueAll( - assets.map((asset) => ({ - name: JobName.LIBRARY_SYNC_ASSET, - data: { id: asset.id, importPaths: library.importPaths, exclusionPatterns: library.exclusionPatterns }, - })), + const offlineResult = await this.assetRepository.updateOffline(library); + + const affectedAssetCount = offlineResult.affected; + if (affectedAssetCount === undefined) { + this.logger.error(`Unknown error occurred when updating offline status in ${library.id}`); + return JobStatus.FAILED; + } + + if (affectedAssetCount === assetCount) { + this.logger.log( + `All ${assetCount} asset(s) in ${library.id} are outside of import paths and/or match an exclusion pattern, marked as offline`, + ); + + return JobStatus.SUCCESS; + } else if (affectedAssetCount !== assetCount && affectedAssetCount > 0) { + this.logger.log( + `${offlineResult.affected} asset(s) out of ${assetCount} were marked offline due to import paths and/or exclusion patterns for library ${library.id}`, + ); + } else { + this.logger.log( + `All ${assetCount} asset(s) in library ${library.id} were in an import path and none matched an exclusion pattern`, + ); + } + + this.logger.log(`Scanning library ${library.id} for assets missing from disk...`); + + const existingAssets = usePagination(JOBS_LIBRARY_PAGINATION_SIZE, (pagination) => + this.assetRepository.getAllInLibrary(pagination, job.id), + ); + + let currentAssetCount = 0; + for await (const assets of existingAssets) { + currentAssetCount += assets.length; + + await this.jobRepository.queue({ + name: JobName.LIBRARY_SYNC_ASSETS, + data: { + libraryId: library.id, + assetIds: assets.map((asset) => asset.id), + }, + }); + + this.logger.log( + `Queued check of ${currentAssetCount} of ${assetCount} existing asset(s) so far in library ${library.id}`, ); - this.logger.debug(`Queued check of ${assets.length} asset(s) in library ${library.id}...`); } - if (assetCount) { - this.logger.log(`Finished queueing check of ${assetCount} assets for library ${library.id}`); + if (currentAssetCount) { + this.logger.log(`Finished queuing ${currentAssetCount} asset check(s) for library ${library.id}`); } return JobStatus.SUCCESS; diff --git a/server/src/services/metadata.service.ts b/server/src/services/metadata.service.ts index 79a7d519d601e..14dae28da0ccc 100644 --- a/server/src/services/metadata.service.ts +++ b/server/src/services/metadata.service.ts @@ -148,13 +148,17 @@ export class MetadataService extends BaseService { } @OnJob({ name: JobName.METADATA_EXTRACTION, queue: QueueName.METADATA_EXTRACTION }) - async handleMetadataExtraction({ id }: JobOf): Promise { + async handleMetadataExtraction({ id, source }: JobOf): Promise { const { metadata, reverseGeocoding } = await this.getConfig({ withCache: true }); const [asset] = await this.assetRepository.getByIds([id], { faces: { person: false } }); if (!asset) { return JobStatus.FAILED; } + if (source === 'library-import') { + await this.processSidecar(id, false); + } + const stats = await this.storageRepository.stat(asset.originalPath); const exifTags = await this.getExifTags(asset); diff --git a/server/src/services/trash.service.ts b/server/src/services/trash.service.ts index 621dee0f8176d..549963772dcbc 100644 --- a/server/src/services/trash.service.ts +++ b/server/src/services/trash.service.ts @@ -52,7 +52,7 @@ export class TrashService extends BaseService { ); for await (const assetIds of assetPagination) { - this.logger.debug(`Queueing ${assetIds.length} assets for deletion from the trash`); + this.logger.debug(`Queueing ${assetIds.length} asset(s) for deletion from the trash`); count += assetIds.length; await this.jobRepository.queueAll( assetIds.map((assetId) => ({ diff --git a/server/src/utils/misc.spec.ts b/server/src/utils/misc.spec.ts index 53be77dc21a58..87ab6d4399bbf 100644 --- a/server/src/utils/misc.spec.ts +++ b/server/src/utils/misc.spec.ts @@ -1,4 +1,4 @@ -import { getKeysDeep, unsetDeep } from 'src/utils/misc'; +import { getKeysDeep, globToSqlPattern, unsetDeep } from 'src/utils/misc'; import { describe, expect, it } from 'vitest'; describe('getKeysDeep', () => { @@ -51,3 +51,18 @@ describe('unsetDeep', () => { expect(unsetDeep({ foo: 'bar', nested: { enabled: true } }, 'nested.enabled')).toEqual({ foo: 'bar' }); }); }); + +describe('globToSqlPattern', () => { + const testCases = [ + ['**/Raw/**', '%/Raw/%'], + ['**/abc/*.tif', '%/abc/%.tif'], + ['**/*.tif', '%/%.tif'], + ['**/*.jp?', '%/%.jp_'], + ['**/@eaDir/**', '%/@eaDir/%'], + ['**/._*', `%/.\\_%`], + ]; + + it.each(testCases)('should convert %s to %s', (input, expected) => { + expect(globToSqlPattern(input)).toEqual(expected); + }); +}); diff --git a/server/src/utils/misc.ts b/server/src/utils/misc.ts index 6a64923a3bf7b..3543cf20b02f5 100644 --- a/server/src/utils/misc.ts +++ b/server/src/utils/misc.ts @@ -10,6 +10,7 @@ import { ReferenceObject, SchemaObject } from '@nestjs/swagger/dist/interfaces/o import _ from 'lodash'; import { writeFileSync } from 'node:fs'; import path from 'node:path'; +import picomatch from 'picomatch'; import { SystemConfig } from 'src/config'; import { CLIP_MODEL_INFO, serverVersion } from 'src/constants'; import { ImmichCookie, ImmichHeader, MetadataKey } from 'src/enum'; @@ -264,3 +265,55 @@ export const useSwagger = (app: INestApplication, { write }: { write: boolean }) writeFileSync(outputPath, JSON.stringify(patchOpenAPI(specification), null, 2), { encoding: 'utf8' }); } }; + +const convertTokenToSqlPattern = (token: any): string => { + if (typeof token === 'string') { + return token; + } + + switch (token.type) { + case 'slash': { + return '/'; + } + case 'text': { + return token.value; + } + case 'globstar': + case 'star': { + return '%'; + } + case 'underscore': { + return String.raw`\_`; + } + case 'qmark': { + return '_'; + } + case 'dot': { + return '.'; + } + case 'bracket': { + return `[${token.value}]`; + } + case 'negate': { + return `[^${token.value}]`; + } + case 'brace': { + const options = token.value.split(','); + return `(${options.join('|')})`; + } + default: { + return ''; + } + } +}; + +export const globToSqlPattern = (glob: string) => { + const tokens = picomatch.parse(glob).tokens; + + let result = ''; + for (const token of tokens) { + result += convertTokenToSqlPattern(token); + } + + return result; +}; diff --git a/server/test/repositories/asset.repository.mock.ts b/server/test/repositories/asset.repository.mock.ts index 928a7956c5f0c..07020c48c2c7e 100644 --- a/server/test/repositories/asset.repository.mock.ts +++ b/server/test/repositories/asset.repository.mock.ts @@ -38,5 +38,7 @@ export const newAssetRepositoryMock = (): Mocked => { getDuplicates: vitest.fn(), upsertFile: vitest.fn(), upsertFiles: vitest.fn(), + updateOffline: vitest.fn(), + getNewPaths: vitest.fn(), }; }; diff --git a/web/src/routes/admin/library-management/+page.svelte b/web/src/routes/admin/library-management/+page.svelte index b89e81ebf687d..20d35ff76d842 100644 --- a/web/src/routes/admin/library-management/+page.svelte +++ b/web/src/routes/admin/library-management/+page.svelte @@ -12,18 +12,16 @@ notificationController, NotificationType, } from '$lib/components/shared-components/notification/notification'; - import { ByteUnit, getBytesWithUnit } from '$lib/utils/byte-units'; import { handleError } from '$lib/utils/handle-error'; import { createLibrary, deleteLibrary, getAllLibraries, - getLibraryStatistics, + getAssetCount, getUserAdmin, scanLibrary, updateLibrary, type LibraryResponseDto, - type LibraryStatsResponseDto, type UserResponseDto, } from '@immich/sdk'; import { mdiDatabase, mdiDotsVertical, mdiPlusBoxOutline, mdiSync } from '@mdi/js'; @@ -44,13 +42,8 @@ let libraries: LibraryResponseDto[] = $state([]); - let stats: LibraryStatsResponseDto[] = []; let owner: UserResponseDto[] = $state([]); - let photos: number[] = []; - let videos: number[] = []; - let totalCount: number[] = $state([]); - let diskUsage: number[] = $state([]); - let diskUsageUnit: ByteUnit[] = $state([]); + let assetCount: number[] = $state([]); let editImportPaths: number | undefined = $state(); let editScanSettings: number | undefined = $state(); let renameLibrary: number | undefined = $state(); @@ -74,12 +67,8 @@ }; const refreshStats = async (listIndex: number) => { - stats[listIndex] = await getLibraryStatistics({ id: libraries[listIndex].id }); + assetCount[listIndex] = await getAssetCount({ id: libraries[listIndex].id }); owner[listIndex] = await getUserAdmin({ id: libraries[listIndex].ownerId }); - photos[listIndex] = stats[listIndex].photos; - videos[listIndex] = stats[listIndex].videos; - totalCount[listIndex] = stats[listIndex].total; - [diskUsage[listIndex], diskUsageUnit[listIndex]] = getBytesWithUnit(stats[listIndex].usage, 0); }; async function readLibraryList() { @@ -190,10 +179,10 @@ } await refreshStats(index); - const assetCount = totalCount[index]; - if (assetCount > 0) { + const count = assetCount[index]; + if (count > 0) { const isConfirmed = await dialogController.show({ - prompt: $t('admin.confirm_delete_library_assets', { values: { count: assetCount } }), + prompt: $t('admin.confirm_delete_library_assets', { values: { count } }), }); if (!isConfirmed) { @@ -242,19 +231,18 @@ - + {$t('type')} {$t('name')} {$t('owner')} {$t('assets')} - {$t('size')} {#each libraries as library, index (library.id)} - {#if totalCount[index] == undefined} + {#if assetCount[index] == undefined} {:else} - {totalCount[index].toLocaleString($locale)} - {/if} - - - {#if diskUsage[index] == undefined} - - {:else} - {diskUsage[index]} - {diskUsageUnit[index]} + {assetCount[index].toLocaleString($locale)} {/if}