diff --git a/apps/studio/electron/main/code/files.ts b/apps/studio/electron/main/code/files.ts index 9f7b80486f..6883ab73d6 100644 --- a/apps/studio/electron/main/code/files.ts +++ b/apps/studio/electron/main/code/files.ts @@ -1,6 +1,7 @@ import { existsSync, promises as fs } from 'fs'; import * as path from 'path'; import prettier from 'prettier'; +import crypto from 'crypto'; export async function readFile(filePath: string): Promise { try { @@ -82,3 +83,27 @@ export async function formatContent(filePath: string, content: string): Promise< return content; } } + +export function createHash(content: string): string { + return crypto.createHash('sha256').update(content, 'utf8').digest('hex'); +} + +export async function checkIfCacheDirectoryExists(projectDir: string): Promise { + const cacheDir = path.join(projectDir, '.onlook', 'cache'); + try { + await fs.mkdir(cacheDir, { recursive: true }); + } catch (error) { + console.error(`Failed to create cache directory: ${error}`); + } +} + +export async function removeCacheDirectory(projectDir: string): Promise { + const cacheDir = path.join(projectDir, '.onlook'); + + try { + await fs.rm(cacheDir, { recursive: true, force: true }); + console.log(`Removed cache directory: ${cacheDir}`); + } catch (error) { + console.error(`Failed to remove cache directory: ${error}`); + } +} diff --git a/apps/studio/electron/main/run/cleanup.ts b/apps/studio/electron/main/run/cleanup.ts index c8c250b3ef..51dd710a9f 100644 --- a/apps/studio/electron/main/run/cleanup.ts +++ b/apps/studio/electron/main/run/cleanup.ts @@ -2,14 +2,19 @@ import traverse, { NodePath } from '@babel/traverse'; import * as t from '@babel/types'; import { EditorAttributes } from '@onlook/models/constants'; import { generateCode } from '../code/diff/helpers'; -import { formatContent, readFile, writeFile } from '../code/files'; +import { createHash, formatContent, readFile, writeFile } from '../code/files'; import { parseJsxFile } from '../code/helpers'; import { GENERATE_CODE_OPTIONS, getValidFiles, isReactFragment } from './helpers'; +import path from 'path'; +import type { HashesJson } from '@onlook/models'; export async function removeIdsFromDirectory(dirPath: string) { const filePaths = await getValidFiles(dirPath); for (const filePath of filePaths) { - await removeIdsFromFile(filePath); + const isFileChanged = await checkIfFileChanged(dirPath, filePath); + if (isFileChanged) { + await removeIdsFromFile(filePath); + } } } @@ -67,3 +72,53 @@ export function removeIdsFromAst(ast: t.File) { }, }); } + +export async function checkIfFileChanged(projectDir: string, filePath: string): Promise { + if (!filePath) { + console.error('No file path provided.'); + return false; + } + + const cacheDir = path.join(projectDir, '.onlook', 'cache'); + const hashesFilePath = path.join(cacheDir, 'hashes.json'); + + let hashesJson: HashesJson = {}; + + try { + const existing = await readFile(hashesFilePath); + if (existing?.trim()) { + hashesJson = JSON.parse(existing); + } + } catch (error) { + console.error('Failed to read hashes.json. Proceeding without cache.'); + return true; + } + + const storedEntry = hashesJson[filePath]; + if (!storedEntry) { + console.warn(`No stored hash for file: ${filePath}`); + return true; + } + + const fileContentWithIds = await readFile(filePath); + if (!fileContentWithIds || fileContentWithIds.trim() === '') { + console.error(`Failed to get content for file: ${filePath}`); + return false; + } + + const calculatedHash = createHash(fileContentWithIds); + + if (calculatedHash === storedEntry.hash) { + try { + const cacheFileContent = await readFile(storedEntry.cache_path); + if (cacheFileContent?.trim()) { + await writeFile(filePath, cacheFileContent); + return false; + } + } catch (err) { + console.error(`Failed to read cached file at ${storedEntry.cache_path}:`, err); + } + } + + return true; +} diff --git a/apps/studio/electron/main/run/helpers.ts b/apps/studio/electron/main/run/helpers.ts index f69bb32204..b448b4caf9 100644 --- a/apps/studio/electron/main/run/helpers.ts +++ b/apps/studio/electron/main/run/helpers.ts @@ -22,6 +22,7 @@ export const IGNORED_DIRECTORIES = [ 'build', '.next', '.git', + '.onlook', CUSTOM_OUTPUT_DIR, ]; diff --git a/apps/studio/electron/main/run/index.ts b/apps/studio/electron/main/run/index.ts index e76af94978..1d9b64671f 100644 --- a/apps/studio/electron/main/run/index.ts +++ b/apps/studio/electron/main/run/index.ts @@ -4,10 +4,15 @@ import { RunState } from '@onlook/models/run'; import { subscribe, type AsyncSubscription } from '@parcel/watcher'; import { mainWindow } from '..'; import { sendAnalytics } from '../analytics'; -import { writeFile } from '../code/files'; +import { removeCacheDirectory, writeFile } from '../code/files'; import { removeIdsFromDirectory } from './cleanup'; import { ALLOWED_EXTENSIONS, getValidFiles, IGNORED_DIRECTORIES } from './helpers'; -import { createMappingFromContent, getFileWithIds as getFileContentWithIds } from './setup'; +import { + cacheFile, + createMappingFromContent, + getFileWithIds as getFileContentWithIds, + generateAndStoreHash, +} from './setup'; import terminal from './terminal'; class RunManager { @@ -162,7 +167,9 @@ class RunManager { async addIdsToDirectoryAndCreateMapping(dirPath: string): Promise { const filePaths = await getValidFiles(dirPath); for (const filePath of filePaths) { + await cacheFile(filePath, dirPath); await this.processFileForMapping(filePath); + await generateAndStoreHash(filePath, dirPath); } return filePaths; } @@ -200,6 +207,7 @@ class RunManager { async cleanProjectDir(folderPath: string): Promise { await removeIdsFromDirectory(folderPath); + await removeCacheDirectory(folderPath); this.runningDirs.delete(folderPath); } diff --git a/apps/studio/electron/main/run/setup.ts b/apps/studio/electron/main/run/setup.ts index 02c25208ea..25f453495f 100644 --- a/apps/studio/electron/main/run/setup.ts +++ b/apps/studio/electron/main/run/setup.ts @@ -1,9 +1,12 @@ +import * as path from 'path'; +import { createHash } from 'crypto'; +import { promises as fs } from 'fs'; import traverse, { NodePath } from '@babel/traverse'; import * as t from '@babel/types'; import { EditorAttributes } from '@onlook/models/constants'; import type { DynamicType, TemplateNode } from '@onlook/models/element'; import { generateCode } from '../code/diff/helpers'; -import { formatContent, readFile } from '../code/files'; +import { checkIfCacheDirectoryExists, formatContent, readFile, writeFile } from '../code/files'; import { parseJsxFile } from '../code/helpers'; import { GENERATE_CODE_OPTIONS, @@ -14,6 +17,7 @@ import { isNodeElementArray, isReactFragment, } from './helpers'; +import type { HashesJson } from '@onlook/models'; export async function getFileWithIds(filePath: string): Promise { const content = await readFile(filePath); @@ -170,3 +174,69 @@ function createMapping(ast: t.File, filename: string): Record { + await checkIfCacheDirectoryExists(projectDir); + + const content = await readFile(filePath); + + if (!content || content.trim() === '') { + console.error(`Failed to get content for file: ${filePath}`); + return; + } + + const cacheDir = path.join(projectDir, '.onlook', 'cache'); + + const baseName = path.basename(filePath, path.extname(filePath)); + + const ext = path.extname(filePath); + const fileNameHash = createHash('sha256').update(filePath).digest('hex').slice(0, 10); + + const cacheFileName = `${baseName}-${fileNameHash}${ext}`; + + const cacheFilePath = path.join(cacheDir, cacheFileName); + + await writeFile(cacheFilePath, content); +} + +export async function generateAndStoreHash(filePath: string, projectDir: string) { + await checkIfCacheDirectoryExists(projectDir); + + const cacheDir = path.join(projectDir, '.onlook', 'cache'); + const hashesFilePath = path.join(cacheDir, 'hashes.json'); + + const content = await readFile(filePath); + + if (!content || content.trim() === '') { + console.error(`Failed to get content for file: ${filePath}`); + return; + } + + const hash = createHash('sha256').update(content).digest('hex'); + + let hashesJson: HashesJson = {}; + + try { + const existing = await readFile(hashesFilePath); + if (existing) { + hashesJson = JSON.parse(existing); + } + } catch (e) { + console.log('No existing hashes.json found, creating new one.'); + } + + const baseName = path.basename(filePath, path.extname(filePath)); + const ext = path.extname(filePath); + const fileNameHash = createHash('sha256').update(filePath).digest('hex').slice(0, 10); + + const cacheFileName = `${baseName}-${fileNameHash}${ext}`; + + const cacheFilePath = path.join(cacheDir, cacheFileName); + + hashesJson[filePath] = { + hash, + cache_path: cacheFilePath, + }; + + await fs.writeFile(hashesFilePath, JSON.stringify(hashesJson, null, 2), 'utf8'); +} diff --git a/packages/models/src/cache/index.ts b/packages/models/src/cache/index.ts new file mode 100644 index 0000000000..fec4dee524 --- /dev/null +++ b/packages/models/src/cache/index.ts @@ -0,0 +1,8 @@ +type HashEntry = { + hash: string; + cache_path: string; +}; + +export type HashesJson = { + [originalFilePath: string]: HashEntry; +}; diff --git a/packages/models/src/index.ts b/packages/models/src/index.ts index b183202bfc..009bb1141c 100644 --- a/packages/models/src/index.ts +++ b/packages/models/src/index.ts @@ -13,3 +13,4 @@ export * from './pages/'; export * from './projects/'; export * from './run/'; export * from './settings/'; +export * from './cache/';