From 075d8f882e1c454cf438fc149e2f89dabcd9d8ac Mon Sep 17 00:00:00 2001 From: farfromrefug Date: Thu, 19 Dec 2024 12:14:37 +0100 Subject: [PATCH] fix: delete cached zip after import finished --- app/android/activity.android.ts | 2 +- app/services/api.ts | 117 +++------- app/services/importservice.ts | 21 +- app/workers/ImportWorker.ts | 384 ++++++++++++++++---------------- 4 files changed, 233 insertions(+), 291 deletions(-) diff --git a/app/android/activity.android.ts b/app/android/activity.android.ts index a4d7443..1a5976a 100644 --- a/app/android/activity.android.ts +++ b/app/android/activity.android.ts @@ -7,7 +7,7 @@ let shortPress = false; let longpressTimer; let audioManager: android.media.AudioManager; -const SUPPORTED_KEY_CODES = [24, 25] +const SUPPORTED_KEY_CODES = [24, 25]; function handleKey(activity: androidx.appcompat.app.AppCompatActivity, keyCode: number, event: android.view.KeyEvent) { const instance = getInstance(); // DEV_LOG && console.warn('handleKey', activity, instance, instance.shouldHandleVolumeButtons, keyCode); diff --git a/app/services/api.ts b/app/services/api.ts index 08d9600..360427c 100644 --- a/app/services/api.ts +++ b/app/services/api.ts @@ -253,41 +253,11 @@ export async function getHEAD(arg: any) { export async function downloadStories(story: RemoteContent, folderId?: number) { let progressNotificationId; - let destinationFilePath; try { const name = Date.now() + ''; const destinationFileName = `${name}.zip`; - const destinationFolderPath = __IOS__ ? knownFolders.temp().path : documentsService.dataFolder.path; - const androidUseContent = __ANDROID__ && destinationFolderPath.startsWith(ANDROID_CONTENT); - // let file: File; - // if (story.download.startsWith('blob:')) { - // DEV_LOG && console.log('downloadStories', 'blob', story.download); - // const destinationFilePath = path.join(knownFolders.temp().path, destinationFileName); - // if (__IOS__) { - // showSnackMessage({ text: l('downloading'), progress: -1 }); - // const url = NSURL.URLWithString(story.download); - // DEV_LOG && console.log('downloadStories', 'url', url); - // const data = NSData.alloc().initWithContentsOfURL(url); - // DEV_LOG && console.log('downloadStories', 'data', data); - // await new Promise((resolve, reject) => { - // try { - // data.writeToFileAtomicallyCompletion(destinationFilePath, true, () => resolve); - // } catch (error) { - // reject(error); - // } - // }); - // file = File.fromPath(destinationFilePath); - // } else { - // try { - // com.akylas.conty.FileUtils.Companion.writeURLToFile(story.download, destinationFilePath); - // } catch (e) { - // showError(e); - // } - // file = File.fromPath(destinationFilePath); - // } - // } else { showSnackMessage({ text: l('preparing_download'), progress: -1 }); const headResult = await getHEAD(story.download); const contentLength = headResult['content-length'] || headResult['Content-Length']; @@ -296,49 +266,9 @@ export async function downloadStories(story: RemoteContent, folderId?: number) { size = parseInt(headResult['content-length'] || headResult['Content-Length'], 10); DEV_LOG && console.log('downloadStories', size); } - // const toDownload = await Promise.all( - // stories.map(async (s) => { - // const pageContent = await (await https.request({ method: 'GET', url: stories[0].download })).content.toStringAsync(); - // const actualUrl = pageContent.match(//)[1]; - // const size = parseInt((await getHEAD(actualUrl))['content-length'], 10); - // return { - // ...s, - // download: actualUrl, - // size - // }; - // }) - // ); - // DEV_LOG && - // console.log( - // 'toDownload', - // toDownload.map((s) => ({ size: s.size, download: s.download })) - // ); - // const totalSize = toDownload.reduce((acc, cur) => acc + cur.size, 0); - // const confirmed = await confirm({ - // title: lc('download_stories'), - // message: lc( - // 'confirm_download_stories', - // toDownload.length, - // filesize( - // toDownload.reduce((acc, cur) => acc + cur.size, 0), - // { output: 'string' } - // ) - // ) - // }); - - // if (!confirmed) { - // return; - // } progressNotificationId = 52346 + hashCode(story.download); - // const headers = await Promise.all(stories.map(getHEAD)); - - // const newContentSize = headers['content-length']; - // DEV_LOG && console.log('checkForStoryUpdate', url, storyId, workingDir, newContentSize, typeof newContentSize, lastSize !== newContentSize, Folder.exists(storyDirPath)); - - // if (forceReload || lastSize !== newContentSize || !Folder.exists(storyDirPath)) { const runningRequestTag: string = story.download; - // const name = cleanFilename(story.title); const progressNotification = ProgressNotifications.show({ id: progressNotificationId, //required @@ -381,7 +311,7 @@ export async function downloadStories(story: RemoteContent, folderId?: number) { const compressed = documentsService.supportsCompressedData; - const downloadFilePath = !compressed || androidUseContent ? path.join(knownFolders.temp().path, destinationFileName) : path.join(destinationFolderPath, destinationFileName); + const downloadFilePath = path.join(knownFolders.temp().path, destinationFileName); const file = await getFile( { url: story.download, @@ -402,29 +332,38 @@ export async function downloadStories(story: RemoteContent, folderId?: number) { DEV_LOG && console.log('downloaded', story.download, File.exists(file.path), file.size); if (File.exists(file.path) && file.size > 0) { // do it on a background thread - importService.importContentFromFiles( - [ - { - filePath: file.path, - id: name, - extraData: { - age: story.age, - title: story.title, - description: story.description, - createdDate: dayjs(story.created_at).valueOf(), - modifiedDate: dayjs(story.updated_at).valueOf() - } + (async () => { + try { + DEV_LOG && console.warn('about tot importContentFromFiles'); + await importService.importContentFromFiles( + [ + { + filePath: file.path, + id: name, + extraData: { + age: story.age, + title: story.title, + description: story.description, + createdDate: dayjs(story.created_at).valueOf(), + modifiedDate: dayjs(story.updated_at).valueOf() + } + } + ], + folderId, + true + ); + DEV_LOG && console.warn('aimportContentFromFiles done'); + } finally { + if (file && File.exists(file.path)) { + DEV_LOG && console.log('removing downloading file', file.path); + file.remove(); } - ], - folderId - ); + } + })(); } } catch (error) { showError(error); hideSnackMessage(); - if (destinationFilePath && File.exists(destinationFilePath)) { - File.fromPath(destinationFilePath).remove(); - } } finally { ProgressNotifications.dismiss(progressNotificationId); } diff --git a/app/services/importservice.ts b/app/services/importservice.ts index 2bfcccf..63f0eb2 100644 --- a/app/services/importservice.ts +++ b/app/services/importservice.ts @@ -27,9 +27,9 @@ export class ImportService extends Observable { nativeDatas?: { [k: string]: any }; }; }) { - // DEV_LOG && console.log('onWorkerMessage', event); const data = event.data; const id = data.id; + // DEV_LOG && console.log('onWorkerMessage', event); try { let messageData = data.messageData; if (typeof messageData === 'string') { @@ -37,8 +37,9 @@ export class ImportService extends Observable { messageData = JSON.parse(messageData); } catch (error) {} } - // DEV_LOG && console.error(TAG, 'onWorkerMessage', id, data.type, id && this.messagePromises.hasOwnProperty(id), Object.keys(this.messagePromises), messageData); + // DEV_LOG && console.info('onWorkerMessage', id, data.type, id && this.messagePromises.hasOwnProperty(id), Object.keys(this.messagePromises), messageData); if (id && this.messagePromises.hasOwnProperty(id)) { + // DEV_LOG && console.log('worker response to promise',id); this.messagePromises[id].forEach(function (executor) { executor.timeoutTimer && clearTimeout(executor.timeoutTimer); // if (isError) { @@ -64,14 +65,14 @@ export class ImportService extends Observable { const eventData = messageData as any; switch (data.type) { case 'event': - DEV_LOG && console.info('worker event', documentsService.id, eventData.eventName, eventData.target, !!eventData.object, Object.keys(eventData), JSON.stringify(eventData.pack)); + // DEV_LOG && console.info('worker event', documentsService.id, eventData.eventName, eventData.target, !!eventData.object, Object.keys(eventData), JSON.stringify(eventData.pack)); if (eventData.target === 'documentsService') { if (eventData.pack) { eventData.pack = await documentsService.packRepository.get(eventData.pack.id); } documentsService.notify({ ...eventData, object: eventData.object || documentsService }); } else { - DEV_LOG && console.info('notifying event from worker', documentsService.id, eventData.eventName, Object.keys(eventData)); + // DEV_LOG && console.info('notifying event from worker', documentsService.id, eventData.eventName, Object.keys(eventData)); this.notify({ ...eventData }); } break; @@ -191,25 +192,25 @@ export class ImportService extends Observable { this.worker = null; // this.services.forEach((service) => service.stop()); } - async updateContentFromDataFolder({ showSnack = true }: { showSnack?: boolean } = {}) { + async updateContentFromDataFolder({ showSnack = true }: { showSnack?: boolean } = {}, shouldWait = false) { this.ensureWorker(); - await this.sendMessageToWorker('import_data', { showSnack }, undefined, undefined, false, 0, { db: documentsService.db.db.db }); + await this.sendMessageToWorker('import_data', { showSnack }, shouldWait ? time() : undefined, undefined, false, 0, { db: documentsService.db.db.db }); } - async importContentFromFiles(files: { filePath: string; id?: string; extraData?: Partial }[], folderId?: number) { + async importContentFromFiles(files: { filePath: string; id?: string; extraData?: Partial }[], folderId?: number, shouldWait = false) { DEV_LOG && console.log('importContentFromFiles', files, folderId); this.ensureWorker(); - await this.sendMessageToWorker('import_from_files', { files, folderId }, undefined, undefined, false, 0, { db: documentsService.db.db.db }); + await this.sendMessageToWorker('import_from_files', { files, folderId }, shouldWait ? time() : undefined, undefined, false, 0, { db: documentsService.db.db.db }); } // async importContentFromFile(data: { filePath: string; id?: string; extraData?; folderId?: number }) { // DEV_LOG && console.log('importContentFromFile', JSON.stringify(data)); // this.ensureWorker(); // await this.sendMessageToWorker('import_from_file', data, undefined, undefined, false, 0, { db: documentsService.db.db.db }); // } - async deletePacks(packs: Pack[]) { + async deletePacks(packs: Pack[], shouldWait = false) { const data = packs.map((s) => ({ id: s.id, folders: s.folders })); DEV_LOG && console.log('deleteDocuments', JSON.stringify(data)); this.ensureWorker(); - await this.sendMessageToWorker('delete_packs', data, undefined, undefined, false, 0, { db: documentsService.db.db.db }); + await this.sendMessageToWorker('delete_packs', data, shouldWait ? time() : undefined, undefined, false, 0, { db: documentsService.db.db.db }); } } export const importService = new ImportService(); diff --git a/app/workers/ImportWorker.ts b/app/workers/ImportWorker.ts index 6e8b948..72dd605 100644 --- a/app/workers/ImportWorker.ts +++ b/app/workers/ImportWorker.ts @@ -143,9 +143,9 @@ export default class ImportWorker extends Observable { } async sendError(error) { - const { nativeException, ...realError } = error; + const { message, nativeException, stack, ...realError } = error; (global as any).postMessage({ - messageData: JSON.stringify({ error: { message: error.toString(), stack: error.stack, ...error } }), + messageData: JSON.stringify({ error: { message: message || error.toString(), nativeException, stack, ...realError } }), type: 'error' }); } @@ -167,7 +167,15 @@ export default class ImportWorker extends Observable { DEV_LOG && console.warn('ImportWorker', 'handleStart', documentsService.id, event.data.nativeData.db, this.dataFolder.path); } } - + replyToPromise(data) { + if (data.id) { + (global as any).postMessage({ + messageData: JSON.stringify({ eventName: data.type }), + type: 'event', + id: data.id + }); + } + } async receivedMessage(event: WorkerEvent) { const handled = this.receivedMessageBase(event); DEV_LOG && console.log(TAG, 'receivedMessage', handled, event.data.type); @@ -177,11 +185,12 @@ export default class ImportWorker extends Observable { switch (data.type) { case 'import_data': await worker.handleStart(event); - this.importFromCurrentDataFolderQueue(event.data.messageData); + await this.importFromCurrentDataFolderQueue(event.data.messageData); + DEV_LOG && console.log('importFromCurrentDataFolderQueue done'); break; case 'import_from_files': await worker.handleStart(event); - this.importFromFilesQueue(event.data.messageData); + await this.importFromFilesQueue(event.data.messageData); break; // case 'import_from_file': // await worker.handleStart(event); @@ -189,14 +198,18 @@ export default class ImportWorker extends Observable { // break; case 'delete_packs': await worker.handleStart(event); - this.deletePacksQueue(event.data.messageData); + await this.deletePacksQueue(event.data.messageData); + DEV_LOG && console.log('deletePacksQueue done'); + break; case 'stop': - worker.stop(data.messageData?.error, data.id); + await worker.stop(data.messageData?.error, data.id); break; } } catch (error) { this.sendError(error); + } finally { + this.replyToPromise(event.data); } } return true; @@ -213,33 +226,30 @@ export default class ImportWorker extends Observable { return this.queue.add(() => this.deletePacks(data)); } async deletePacks(ids: { id: string; folders: number[] }[]) { - DEV_LOG && console.log('deleteDocuments', ids); + DEV_LOG && console.log('deletePacks', ids); // await this.packRepository.delete(model); - try { - const database = documentsService.db; - if (!database.isOpen()) { - return; - } - this.notify({ eventName: EVENT_IMPORT_STATE, state: 'running', type: 'delete_packs' } as ImportStateEventData); - await doInBatch<{ id: string; folders: number[] }, void>( - ids, - async (d: { id: string; folders: number[] }) => { - const id = d.id; - await documentsService.removePack(id); - const folderPathStr = path.join(documentsService.realDataFolderPath, id); - if (Folder.exists(folderPathStr)) { - const docData = Folder.fromPath(folderPathStr, false); - DEV_LOG && console.log('deleteDocument', folderPathStr); - await docData.remove(); - } - // we notify on each delete so that UI updates fast - documentsService.notify({ eventName: EVENT_PACK_DELETED, packIds: [id], folders: d.folders } as PackDeletedEventData); - }, - 1 - ); - } catch (error) { - this.sendError(error); + const database = documentsService.db; + if (!database.isOpen()) { + return; } + this.notify({ eventName: EVENT_IMPORT_STATE, state: 'running', type: 'delete_packs' } as ImportStateEventData); + await doInBatch<{ id: string; folders: number[] }, void>( + ids, + async (d: { id: string; folders: number[] }) => { + const id = d.id; + await documentsService.removePack(id); + const folderPathStr = path.join(documentsService.realDataFolderPath, id); + if (Folder.exists(folderPathStr)) { + const docData = Folder.fromPath(folderPathStr, false); + DEV_LOG && console.log('deleteDocument', folderPathStr); + await docData.remove(); + } + // we notify on each delete so that UI updates fast + documentsService.notify({ eventName: EVENT_PACK_DELETED, packIds: [id], folders: d.folders } as PackDeletedEventData); + }, + 1 + ); + DEV_LOG && console.log('deletePacks done', ids); } // async importFromFileQueue(data) { // return this.queue.add(() => this.importFromFileInternal(data)); @@ -255,150 +265,146 @@ export default class ImportWorker extends Observable { return entities.findIndex((e) => e.name === LUNII_DATA_FILE || e.name === TELMI_DATA_FILE) !== -1; } async importFromCurrentDataFolderInternal({ showSnack }: { showSnack?: boolean }) { - try { - const database = documentsService.db; - if (!database.isOpen()) { - return; - } - this.notify({ eventName: EVENT_IMPORT_STATE, state: 'running', type: 'import_data', showSnack } as ImportStateEventData); - const supportsCompressedData = documentsService.supportsCompressedData; - DEV_LOG && console.log(TAG, 'importFromCurrentDataFolderInternal', this.dataFolder.path); - const entities = await this.dataFolder.getEntities(); - // DEV_LOG && - // console.log( - // 'updateContentFromDataFolder', - // supportsCompressedData, - // entities.map((e) => e.name) - // ); + const database = documentsService.db; + if (!database.isOpen()) { + return; + } + this.notify({ eventName: EVENT_IMPORT_STATE, state: 'running', type: 'import_data', showSnack } as ImportStateEventData); + const supportsCompressedData = documentsService.supportsCompressedData; + DEV_LOG && console.log(TAG, 'importFromCurrentDataFolderInternal', this.dataFolder.path); + const entities = await this.dataFolder.getEntities(); + // DEV_LOG && + // console.log( + // 'updateContentFromDataFolder', + // supportsCompressedData, + // entities.map((e) => e.name) + // ); - // we remove duplicates - const existToTest = [...new Set(entities.map((e) => '"' + (e['extension'] ? e.name.slice(0, -e['extension'].length) : e.name) + '"'))]; - // DEV_LOG && console.log('existToTest', existToTest); - const r = (await database.query(new SqlQuery([`SELECT id,compressed,externalPath FROM Pack WHERE id IN (${existToTest.join(',')})`]))) as { - id: string; - externalPath?: string; - compressed: 1 | 0; - }[]; - // DEV_LOG && console.log('updateContentFromDataFolder1 in db', r); - for (let index = 0; index < entities.length; index++) { - const entity = entities[index]; - try { - if (!this.isFolderValid(entity.path)) { - console.error(`invalid folder : ${entity.path}`); - await Folder.fromPath(entity.path).remove(); - continue; - } - let id = entity['extension'] ? entity.name.slice(0, -entity['extension']?.length) : entity.name; - const compressed = entity.name.endsWith('.zip'); - const existing = r.find((i) => i.id === id); - let inputFilePath = entity.path; - // DEV_LOG && console.log('updateContentFromDataFolder handling', id, compressed, JSON.stringify(existing)); - if (!existing) { - let extraData: PackExtra; - DEV_LOG && console.log('importing from data folder', entity.name, compressed, supportsCompressedData); - // we need to clean up the name because some char will break android ZipFile - const realId = Date.now() + ''; - let destinationFolderPath = inputFilePath; - if (compressed && realId !== id && supportsCompressedData) { - inputFilePath = path.join(this.dataFolder.path, `${realId}.zip`); - if (compressed && supportsCompressedData) { - await File.fromPath(destinationFolderPath).rename(inputFilePath); - } - id = realId; - destinationFolderPath = inputFilePath; - } - //TODO: for now we ignore compressed! - if (compressed && !supportsCompressedData) { - // continue; - id = realId; - destinationFolderPath = this.dataFolder.getFolder(id, true).path; - DEV_LOG && console.log('importing from zip', id, inputFilePath, destinationFolderPath, Folder.exists(destinationFolderPath)); - await unzip(inputFilePath, destinationFolderPath); - const subPaths = await this.getUnzippedStorySubPaths(destinationFolderPath); - if (subPaths) { - extraData = extraData || {}; - extraData.subPaths = subPaths; - } - DEV_LOG && console.log('deleting zip', inputFilePath); - await File.fromPath(inputFilePath).remove(); - } - if (!supportsCompressedData) { - DEV_LOG && console.log('sizetest', destinationFolderPath, Folder.exists(destinationFolderPath), Folder.fromPath(destinationFolderPath).getEntitiesSync()); - const test1Path = path.join(destinationFolderPath, LUNII_DATA_FILE); - const test2Path = path.join(destinationFolderPath, TELMI_DATA_FILE); - const sizeTest = (test1Path && File.exists(test1Path) && File.fromPath(test1Path).size) || (test2Path && File.exists(test2Path) && File.fromPath(test2Path).size); - if (!sizeTest) { - // broken folder, let s remove it - await Folder.fromPath(destinationFolderPath).remove(); - continue; - } + // we remove duplicates + const existToTest = [...new Set(entities.map((e) => '"' + (e['extension'] ? e.name.slice(0, -e['extension'].length) : e.name) + '"'))]; + // DEV_LOG && console.log('existToTest', existToTest); + const r = (await database.query(new SqlQuery([`SELECT id,compressed,externalPath FROM Pack WHERE id IN (${existToTest.join(',')})`]))) as { + id: string; + externalPath?: string; + compressed: 1 | 0; + }[]; + // DEV_LOG && console.log('updateContentFromDataFolder1 in db', r); + for (let index = 0; index < entities.length; index++) { + const entity = entities[index]; + try { + if (!this.isFolderValid(entity.path)) { + console.error(`invalid folder : ${entity.path}`); + await Folder.fromPath(entity.path).remove(); + continue; + } + let id = entity['extension'] ? entity.name.slice(0, -entity['extension']?.length) : entity.name; + const compressed = entity.name.endsWith('.zip'); + const existing = r.find((i) => i.id === id); + let inputFilePath = entity.path; + // DEV_LOG && console.log('updateContentFromDataFolder handling', id, compressed, JSON.stringify(existing)); + if (!existing) { + let extraData: PackExtra; + DEV_LOG && console.log('importing from data folder', entity.name, compressed, supportsCompressedData); + // we need to clean up the name because some char will break android ZipFile + const realId = Date.now() + ''; + let destinationFolderPath = inputFilePath; + if (compressed && realId !== id && supportsCompressedData) { + inputFilePath = path.join(this.dataFolder.path, `${realId}.zip`); + if (compressed && supportsCompressedData) { + await File.fromPath(destinationFolderPath).rename(inputFilePath); } - await this.prepareAndImportUncompressedPack({ - destinationFolderPath, - id, - supportsCompressedData: supportsCompressedData && compressed, - extraData: extraData ? { extra: extraData } : undefined - }); - } else if (compressed && !supportsCompressedData) { - // we have an entry in db using a zip. Let s unzip and update the existing pack to use the unzipped Version - const destinationFolderPath = this.dataFolder.getFolder(id, true).path; - DEV_LOG && console.log('we need to unzip existing entry in db', entity.path, destinationFolderPath); - // if (!Folder.exists(destinationFolderPath)) { - await unzip(entity.path, destinationFolderPath); - let extraData: PackExtra; + id = realId; + destinationFolderPath = inputFilePath; + } + //TODO: for now we ignore compressed! + if (compressed && !supportsCompressedData) { + // continue; + id = realId; + destinationFolderPath = this.dataFolder.getFolder(id, true).path; + DEV_LOG && console.log('importing from zip', id, inputFilePath, destinationFolderPath, Folder.exists(destinationFolderPath)); + await unzip(inputFilePath, destinationFolderPath); const subPaths = await this.getUnzippedStorySubPaths(destinationFolderPath); if (subPaths) { extraData = extraData || {}; extraData.subPaths = subPaths; } - const pack = await documentsService.packRepository.get(id); - - pack.save({ - compressed: 0, - extra: Object.assign({}, pack.extra, extraData) - }); - // } - await File.fromPath(entity.path).remove(); - } else if (supportsCompressedData && compressed && existing.compressed === 0) { - (await documentsService.packRepository.get(id)).save({ - compressed: 1 - }); + DEV_LOG && console.log('deleting zip', inputFilePath); + await File.fromPath(inputFilePath).remove(); } - } catch (error) { - await Folder.fromPath(entity.path).remove(); - throw error; + if (!supportsCompressedData) { + DEV_LOG && console.log('sizetest', destinationFolderPath, Folder.exists(destinationFolderPath), Folder.fromPath(destinationFolderPath).getEntitiesSync()); + const test1Path = path.join(destinationFolderPath, LUNII_DATA_FILE); + const test2Path = path.join(destinationFolderPath, TELMI_DATA_FILE); + const sizeTest = (test1Path && File.exists(test1Path) && File.fromPath(test1Path).size) || (test2Path && File.exists(test2Path) && File.fromPath(test2Path).size); + if (!sizeTest) { + // broken folder, let s remove it + await Folder.fromPath(destinationFolderPath).remove(); + continue; + } + } + await this.prepareAndImportUncompressedPack({ + destinationFolderPath, + id, + supportsCompressedData: supportsCompressedData && compressed, + extraData: extraData ? { extra: extraData } : undefined + }); + } else if (compressed && !supportsCompressedData) { + // we have an entry in db using a zip. Let s unzip and update the existing pack to use the unzipped Version + const destinationFolderPath = this.dataFolder.getFolder(id, true).path; + DEV_LOG && console.log('we need to unzip existing entry in db', entity.path, destinationFolderPath); + // if (!Folder.exists(destinationFolderPath)) { + await unzip(entity.path, destinationFolderPath); + let extraData: PackExtra; + const subPaths = await this.getUnzippedStorySubPaths(destinationFolderPath); + if (subPaths) { + extraData = extraData || {}; + extraData.subPaths = subPaths; + } + const pack = await documentsService.packRepository.get(id); + + pack.save({ + compressed: 0, + extra: Object.assign({}, pack.extra, extraData) + }); + // } + await File.fromPath(entity.path).remove(); + } else if (supportsCompressedData && compressed && existing.compressed === 0) { + (await documentsService.packRepository.get(id)).save({ + compressed: 1 + }); } + } catch (error) { + await Folder.fromPath(entity.path).remove(); + throw error; } + } - const externalPaths = JSON.parse(JSON.stringify(ApplicationSettings.getString('external_paths', '[]'))); - const externalEntities = []; - for (let index = 0; index < externalPaths.length; index++) { - const pathStr = externalPaths[index]; - if (Folder.exists(externalPaths[index])) { - externalEntities.push(await Folder.fromPath(pathStr).getEntities()); - } + const externalPaths = JSON.parse(JSON.stringify(ApplicationSettings.getString('external_paths', '[]'))); + const externalEntities = []; + for (let index = 0; index < externalPaths.length; index++) { + const pathStr = externalPaths[index]; + if (Folder.exists(externalPaths[index])) { + externalEntities.push(await Folder.fromPath(pathStr).getEntities()); } + } - for (let index = 0; index < externalEntities.length; index++) { - const entity = externalEntities[index]; - try { - if (!this.isFolderValid(entity.path)) { - console.error(`invalid folder : ${entity.path}`); - continue; - } - const existing = r.find((i) => i.externalPath === entity.path); - if (!existing) { - await this.prepareAndImportUncompressedPack({ destinationFolderPath: entity.path, externalPath: entity.path, id: entity.path, supportsCompressedData: false }); - } - } catch (error) { - await Folder.fromPath(entity.path).remove(); - throw error; + for (let index = 0; index < externalEntities.length; index++) { + const entity = externalEntities[index]; + try { + if (!this.isFolderValid(entity.path)) { + console.error(`invalid folder : ${entity.path}`); + continue; } + const existing = r.find((i) => i.externalPath === entity.path); + if (!existing) { + await this.prepareAndImportUncompressedPack({ destinationFolderPath: entity.path, externalPath: entity.path, id: entity.path, supportsCompressedData: false }); + } + } catch (error) { + await Folder.fromPath(entity.path).remove(); + throw error; } - } catch (error) { - DEV_LOG && console.error(error, error.stack); - this.sendError(error); } + DEV_LOG && console.log(TAG, 'importFromCurrentDataFolderInternal done', this.dataFolder.path); } async prepareAndImportUncompressedPack({ @@ -544,39 +550,35 @@ export default class ImportWorker extends Observable { // } // } async importFromFilesInternal({ files, folderId, showSnack }: { files: { filePath: string; id?: string; extraData?: Partial }[]; folderId?: number; showSnack?: boolean }) { - try { - const database = documentsService.db; - if (!database.isOpen()) { - return; - } - this.notify({ eventName: EVENT_IMPORT_STATE, state: 'running', type: 'import_from_files', showSnack } as ImportStateEventData); - DEV_LOG && console.log(TAG, 'importFromFilesInternal', this.dataFolder.path, JSON.stringify(files)); - const supportsCompressedData = documentsService.supportsCompressedData; - for (let index = 0; index < files.length; index++) { - const fileData = files[index]; - const inputFilePath = fileData.filePath; - let destinationFolderPath = inputFilePath; - let extraData: Partial = fileData.extraData; - const id = Date.now() + ''; - destinationFolderPath = path.join(this.dataFolder.path, `${id}.zip`); - if (!supportsCompressedData) { - destinationFolderPath = this.dataFolder.getFolder(id, true).path; - await unzip(inputFilePath, destinationFolderPath); - DEV_LOG && console.log('unzip done'); - const subPaths = await this.getUnzippedStorySubPaths(destinationFolderPath); - if (subPaths) { - extraData = extraData || {}; - extraData.extra = extraData.extra || {}; - extraData.extra.subPaths = subPaths; - } - } else { - await File.fromPath(inputFilePath).copy(destinationFolderPath); + const database = documentsService.db; + if (!database.isOpen()) { + return; + } + this.notify({ eventName: EVENT_IMPORT_STATE, state: 'running', type: 'import_from_files', showSnack } as ImportStateEventData); + DEV_LOG && console.log(TAG, 'importFromFilesInternal', this.dataFolder.path, JSON.stringify(files)); + const supportsCompressedData = documentsService.supportsCompressedData; + for (let index = 0; index < files.length; index++) { + const fileData = files[index]; + const inputFilePath = fileData.filePath; + let destinationFolderPath = inputFilePath; + let extraData: Partial = fileData.extraData; + const id = Date.now() + ''; + destinationFolderPath = path.join(this.dataFolder.path, `${id}.zip`); + if (!supportsCompressedData) { + destinationFolderPath = this.dataFolder.getFolder(id, true).path; + await unzip(inputFilePath, destinationFolderPath); + DEV_LOG && console.log('unzip done'); + const subPaths = await this.getUnzippedStorySubPaths(destinationFolderPath); + if (subPaths) { + extraData = extraData || {}; + extraData.extra = extraData.extra || {}; + extraData.extra.subPaths = subPaths; } - - await this.prepareAndImportUncompressedPack({ destinationFolderPath, id, supportsCompressedData, folderId, extraData }); + } else { + await File.fromPath(inputFilePath).copy(destinationFolderPath); } - } catch (error) { - this.sendError(error); + + await this.prepareAndImportUncompressedPack({ destinationFolderPath, id, supportsCompressedData, folderId, extraData }); } } }