diff --git a/localtypings/projectheader.d.ts b/localtypings/projectheader.d.ts index 07d8191e85a9..3be70ae37e77 100644 --- a/localtypings/projectheader.d.ts +++ b/localtypings/projectheader.d.ts @@ -28,23 +28,30 @@ declare namespace pxt.workspace { tutorialCompleted?: pxt.tutorial.TutorialCompletionInfo; // workspace guid of the extension under test extensionUnderTest?: string; - cloudSync?: boolean; // Mark a header for syncing with a cloud provider + // id of cloud user who created this project + cloudUserId?: string; } export interface Header extends InstallHeader { id: string; // guid (generated by us) path?: string; // for workspaces that require it - recentUse: number; // seconds since epoch - modificationTime: number; // seconds since epoch + recentUse: number; // seconds since epoch UTC (cloud safe) + modificationTime: number; // seconds since epoch UTC (cloud safe) icon?: string; // icon uri isDeleted: boolean; // mark whether or not a header has been deleted saveId?: any; // used to determine whether a project has been edited while we're saving to cloud + + // TODO @darzu: remove all of these? - // For cloud providers - blobId: string; // id of the cloud blob holding this script - blobVersion: string; // version of the cloud blob - blobCurrent: boolean; // has the current version of the script been pushed to cloud + // For cloud providers -- DEPRECATED + blobId_: string; // id of the cloud blob holding this script + blobVersion_: string; // version of the cloud blob + blobCurrent_: boolean; // has the current version of the script been pushed to cloud + + cloudVersion: string; // The cloud-assigned version number (e.g. etag) + // TODO @darzu: "cloudCurrent" seems very bad. This is a stateful notation and it is hard to reason about whether or not this is true. + cloudCurrent: boolean; // Has the current version of the project been pushed to cloud // Used for Updating projects backupRef?: string; // guid of backed-up project (present if an update was interrupted) diff --git a/localtypings/pxtpackage.d.ts b/localtypings/pxtpackage.d.ts index 4a7e0aac5c65..d940970ef01a 100644 --- a/localtypings/pxtpackage.d.ts +++ b/localtypings/pxtpackage.d.ts @@ -2,6 +2,7 @@ declare namespace pxt { type CodeCardType = "file" | "example" | "codeExample" | "tutorial" | "side" | "template" | "package" | "hw" | "forumUrl" | "forumExample" | "sharedExample"; type CodeCardEditorType = "blocks" | "js" | "py"; + type CodeCardCloudState = "local" | "cloud"; interface Map { [index: string]: T; @@ -159,6 +160,7 @@ declare namespace pxt { cardType?: CodeCardType; editor?: CodeCardEditorType; otherActions?: CodeCardAction[]; + cloudState?: CodeCardCloudState; header?: string; diff --git a/pxteditor/editor.ts b/pxteditor/editor.ts index d28a39751b6f..518d94b71c2b 100644 --- a/pxteditor/editor.ts +++ b/pxteditor/editor.ts @@ -215,12 +215,11 @@ namespace pxt.editor { importExampleAsync(options: ExampleImportOptions): Promise; showScriptManager(): void; importProjectDialog(): void; - cloudSync(): boolean; - cloudSignInDialog(): void; - cloudSignOut(): void; removeProject(): void; editText(): void; + hasCloudSync(): boolean; + getPreferredEditor(): string; saveAndCompile(): void; updateHeaderName(name: string): void; diff --git a/pxteditor/localStorage.ts b/pxteditor/localStorage.ts index 48057dd3a372..5171cff16524 100644 --- a/pxteditor/localStorage.ts +++ b/pxteditor/localStorage.ts @@ -1,4 +1,5 @@ namespace pxt.storage { + // TODO @darzu: why is this different from the WorkspaceProvider api? interface IStorage { removeItem(key: string): void; getItem(key: string): string; diff --git a/pxteditor/workspace.ts b/pxteditor/workspace.ts index 1202ac5aeaba..88ebcda375eb 100644 --- a/pxteditor/workspace.ts +++ b/pxteditor/workspace.ts @@ -3,6 +3,8 @@ namespace pxt.workspace { export type ScriptText = pxt.Map; + + // TODO @darzu: ugh. why is there a "Project" that is different from a "File". They are nearly identical... export interface Project { header?: Header; text?: ScriptText; @@ -14,17 +16,28 @@ namespace pxt.workspace { url: string; } + // TODO @darzu: why is version "any" ? that's really annoying to reason about. used as: string | ScriptText + // TODO @darzu: _rev is a string; modificationTime is an int export type Version = any; export interface File { header: Header; text: ScriptText; + // This version field is reserved for the storage mechanism. E.g. PouchDB requires a _rev field containing + // the currently stored version. version: Version; } export interface WorkspaceProvider { - listAsync(): Promise; // called from workspace.syncAsync (including upon startup) + listAsync(): Promise; + /* + Tries to get the corrisponding File with the current version if it exists. + If it does not exist, returns undefined. + */ getAsync(h: Header): Promise; + /* + If text is empty, then only update the header. + */ setAsync(h: Header, prevVersion: Version, text?: ScriptText): Promise; deleteAsync?: (h: Header, prevVersion: Version) => Promise; resetAsync(): Promise; @@ -54,9 +67,12 @@ namespace pxt.workspace { id: U.guidGen(), recentUse: modTime, modificationTime: modTime, - blobId: null, - blobVersion: null, - blobCurrent: false, + blobId_: null, + blobVersion_: null, + blobCurrent_: false, + cloudUserId: null, + cloudCurrent: false, + cloudVersion: null, isDeleted: false, } return header diff --git a/pxtlib/util.ts b/pxtlib/util.ts index ddc7ddc674ff..ea97c6b05994 100644 --- a/pxtlib/util.ts +++ b/pxtlib/util.ts @@ -1409,6 +1409,10 @@ namespace ts.pxtc.Util { return res }) } + + export function unreachable(...ns: never[]): never { + throw new Error("Type error: this code should be unreachable"); + } } namespace ts.pxtc.BrowserImpl { diff --git a/webapp/src/app.tsx b/webapp/src/app.tsx index ab9da57404d7..3022822b6418 100644 --- a/webapp/src/app.tsx +++ b/webapp/src/app.tsx @@ -6,8 +6,8 @@ import * as React from "react"; import * as ReactDOM from "react-dom"; -import * as workspace from "./workspace"; -import * as cloudsync from "./cloudsync"; +import * as workspace from "./workspaces/workspace"; +import * as cloudsync from "./workspaces/cloudsync"; import * as data from "./data"; import * as pkg from "./package"; import * as core from "./core"; @@ -95,7 +95,7 @@ function setEditor(editor: ProjectView) { } export class ProjectView - extends data.Component + extends auth.Component implements IProjectView { editor: srceditor.Editor; editorFile: pkg.File; @@ -173,7 +173,6 @@ export class ProjectView this.openDeviceSerial = this.openDeviceSerial.bind(this); this.toggleGreenScreen = this.toggleGreenScreen.bind(this); this.toggleSimulatorFullscreen = this.toggleSimulatorFullscreen.bind(this); - this.cloudSignInComplete = this.cloudSignInComplete.bind(this); this.toggleSimulatorCollapse = this.toggleSimulatorCollapse.bind(this); this.showKeymap = this.showKeymap.bind(this); this.toggleKeymap = this.toggleKeymap.bind(this); @@ -264,8 +263,13 @@ export class ProjectView } this.saveFileAsync().done(); } else if (active) { + // TODO @darzu: new code path maybe: + // workspace.syncAsync().then(changed => this.reloadAsync()) + // reloadAsync: this.loadHeaderAsync() + // OR: subscribe to data api, on change, reload + data.invalidate("header:*") - if (workspace.isHeadersSessionOutdated() + if (workspace.isHeadersSessionOutdated() // TODO @darzu: sync check point || workspace.isHeaderSessionOutdated(this.state.header)) { pxt.debug('workspace: changed, reloading...') let id = this.state.header ? this.state.header.id : ''; @@ -1300,6 +1304,7 @@ export class ProjectView return checkAsync.then(() => this.openHome()); let p = Promise.resolve(); + // TODO @darzu: sync checkpoint if (workspace.isHeadersSessionOutdated()) { // reload header before loading pxt.log(`sync before load`) p = p.then(() => workspace.syncAsync().then(() => { })) @@ -1335,7 +1340,11 @@ export class ProjectView if (editorState.searchBar === undefined) editorState.searchBar = oldEditorState.searchBar; } - if (!h.cloudSync && this.cloudSync()) h.cloudSync = true; + // If user is signed in, sync this project to the cloud. + if (this.hasCloudSync()) { + // TODO @darzu: this might not be where we want to attach the user to the project + h.cloudUserId = this.getUser()?.id; + } return compiler.newProjectAsync() .then(() => h.backupRef ? workspace.restoreFromBackupAsync(h) : Promise.resolve()) @@ -1442,6 +1451,7 @@ export class ProjectView } // update recentUse on the header + // TODO @darzu: this is saving hte project without text... return workspace.saveAsync(h) }).then(() => this.loadTutorialFiltersAsync()) .finally(() => { @@ -2006,62 +2016,6 @@ export class ProjectView }) } - /////////////////////////////////////////////////////////// - //////////// Cloud //////////// - /////////////////////////////////////////////////////////// - - cloudSync() { - return this.hasSync(); - } - - cloudSignInDialog() { - const providers = cloudsync.providers(); - if (providers.length == 0) - return; - if (providers.length == 1) - providers[0].loginAsync().then(() => { - this.cloudSignInComplete(); - }) - else { - // TODO: Revisit in new cloud sync - //this.signInDialog.show(); - } - } - - cloudSignOut() { - core.confirmAsync({ - header: lf("Sign out"), - body: lf("You are signing out. Make sure that you commited all your changes, local projects will be deleted."), - agreeClass: "red", - agreeIcon: "sign out", - agreeLbl: lf("Sign out"), - }).then(r => { - if (r) { - const inEditor = !!this.state.header; - // Reset the cloud workspace - return workspace.resetCloudAsync() - .then(() => { - if (inEditor) { - this.openHome(); - } - if (this.home) { - this.home.forceUpdate(); - } - }) - } - return Promise.resolve(); - }); - } - - cloudSignInComplete() { - pxt.log('cloud sign in complete'); - initLogin(); - cloudsync.syncAsync() - .then(() => { - this.forceUpdate(); - }).done(); - } - /////////////////////////////////////////////////////////// //////////// Home ///////////// /////////////////////////////////////////////////////////// @@ -2251,7 +2205,7 @@ export class ProjectView pubCurrent: false, target: pxt.appTarget.id, targetVersion: pxt.appTarget.versions.target, - cloudSync: this.cloudSync(), + cloudUserId: this.getUser()?.id, temporary: options.temporary, tutorial: options.tutorial, extensionUnderTest: options.extensionUnderTest @@ -3090,6 +3044,10 @@ export class ProjectView } } + hasCloudSync() { + return this.isLoggedIn(); + } + showScriptManager() { this.scriptManagerDialog.show(); } @@ -3327,6 +3285,7 @@ export class ProjectView } showResetDialog() { + console.log("showResetDialog (1)") dialogs.showResetDialogAsync().done(r => { if (!r) return Promise.resolve(); return Promise.resolve() @@ -3334,6 +3293,7 @@ export class ProjectView return pxt.winrt.releaseAllDevicesAsync(); }) .then(() => { + console.log("showResetDialog (2)") return this.resetWorkspace(); }); }); @@ -4540,12 +4500,19 @@ document.addEventListener("DOMContentLoaded", () => { const isSandbox = pxt.shell.isSandboxMode() || pxt.shell.isReadOnly(); const isController = pxt.shell.isControllerMode(); const theme = pxt.appTarget.appTheme; - if (query["ws"]) workspace.setupWorkspace(query["ws"]); + // TODO @darzu: this is bad. we shouldn't be choosing a workspace in two places (see "chooseWorkspace") + if (query["ws"]) { + workspace.setupWorkspace(query["ws"] as workspace.WorkspaceKind) + } else if ((theme.allowParentController || isController) && pxt.BrowserUtils.isIFrame()) workspace.setupWorkspace("iframe"); else if (isSandbox) workspace.setupWorkspace("mem"); else if (pxt.winrt.isWinRT()) workspace.setupWorkspace("uwp"); else if (pxt.BrowserUtils.isIpcRenderer()) workspace.setupWorkspace("idb"); - else if (pxt.BrowserUtils.isLocalHost() || pxt.BrowserUtils.isPxtElectron()) workspace.setupWorkspace("fs"); + // TODO @darzu: uncomment. this disables filesystem workspace + //else if (pxt.BrowserUtils.isLocalHost() || pxt.BrowserUtils.isPxtElectron()) workspace.setupWorkspace("fs"); + else { + workspace.setupWorkspace("browser"); + } Promise.resolve() .then(async () => { const href = window.location.href; diff --git a/webapp/src/auth.ts b/webapp/src/auth.ts index d562faa9640c..7e2bfdd7030e 100644 --- a/webapp/src/auth.ts +++ b/webapp/src/auth.ts @@ -465,12 +465,18 @@ export async function initialUserPreferences(): Promise { const state = getState(); @@ -637,5 +643,5 @@ data.mountVirtualApi(MODULE, { getSync: authApiHandler }); // ClouddWorkspace must be included after we mount our virtual APIs. -import * as cloudWorkspace from "./cloudworkspace"; +import * as cloudWorkspace from "./cloud"; cloudWorkspace.init(); diff --git a/webapp/src/blocks.tsx b/webapp/src/blocks.tsx index a03b96b35491..d6efd69d78b3 100644 --- a/webapp/src/blocks.tsx +++ b/webapp/src/blocks.tsx @@ -8,7 +8,7 @@ import * as toolboxeditor from "./toolboxeditor" import * as compiler from "./compiler" import * as toolbox from "./toolbox"; import * as snippets from "./blocksSnippets"; -import * as workspace from "./workspace"; +import * as workspace from "./workspaces/workspace"; import * as simulator from "./simulator"; import * as dialogs from "./dialogs"; import * as blocklyFieldView from "./blocklyFieldView"; diff --git a/webapp/src/cloud.ts b/webapp/src/cloud.ts new file mode 100644 index 000000000000..8f6ff44f2020 --- /dev/null +++ b/webapp/src/cloud.ts @@ -0,0 +1,208 @@ +import * as core from "./core"; +import * as auth from "./auth"; +import * as ws from "./workspaces/workspace"; +import * as data from "./data"; +import * as workspace from "./workspaces/workspace"; + +type Version = pxt.workspace.Version; +type File = pxt.workspace.File; +type Header = pxt.workspace.Header; +type ScriptText = pxt.workspace.ScriptText; + +import U = pxt.Util; +import { toDbg } from "./workspaces/cloudsyncworkspace"; + +const state = { + uploadCount: 0, + downloadCount: 0 +}; + +type CloudProject = { + id: string; + header: string; + text: string; + version: string; +}; + +export async function listAsync(): Promise { + return new Promise(async (resolve, reject) => { + // TODO @darzu: this is causing errors? + const result = await auth.apiAsync("/api/user/project"); + console.log("cloud.ts:listAsync"); // TODO @darzu: dbg + if (result.success) { + const userId = auth.user()?.id; + const headers: Header[] = result.resp.map(proj => { + const header = JSON.parse(proj.header); + header.cloudUserId = userId; + header.cloudVersion = proj.version; + header.cloudCurrent = true; + return header; + }); + // TODO @darzu: dbg + console.dir(headers.map(toDbg)) + resolve(headers); + } else { + reject(new Error(result.errmsg)); + } + }); +} + +export function getAsync(h: Header): Promise { + console.log(`cloud.ts:getAsync ${h.id}`); // TODO @darzu: + return new Promise(async (resolve, reject) => { + const result = await auth.apiAsync(`/api/user/project/${h.id}`); + if (result.success) { + const userId = auth.user()?.id; + const project = result.resp; + const header = JSON.parse(project.header); + const text = JSON.parse(project.text); + const version = project.version; + const file: File = { + header, + text, + version + }; + file.header.cloudCurrent = true; + file.header.cloudVersion = file.version; + file.header.cloudUserId = userId; + resolve(file); + } else if (result.statusCode === 404) { + resolve(null); + } else { + reject(new Error(result.errmsg)); + } + }); +} + +// TODO @darzu: is it okay to export this? +export function setAsync(h: Header, prevVersion: Version, text?: ScriptText): Promise { + console.log(`cloud.ts:setAsync ${h.id}`); // TODO @darzu: + return new Promise(async (resolve, reject) => { + const userId = auth.user()?.id; + h.cloudUserId = userId; + h.cloudCurrent = false; + h.cloudVersion = prevVersion; + const project: CloudProject = { + id: h.id, + header: JSON.stringify(h), + text: text ? JSON.stringify(text) : undefined, + version: prevVersion + } + const result = await auth.apiAsync('/api/user/project', project); + if (result.success) { + h.cloudCurrent = true; + h.cloudVersion = result.resp; + resolve(result.resp); + } else { + // TODO: Handle reject due to version conflict + reject(new Error(result.errmsg)); + } + }); +} + +export function deleteAsync(h: Header, prevVersion: Version, text?: ScriptText): Promise { + console.log(`cloud.ts:deleteAsync ${h.id}`); // TODO @darzu: + return Promise.resolve(); +} + +export function resetAsync(): Promise { + console.log(`cloud.ts:resetAsync`); // TODO @darzu: + return Promise.resolve(); +} + +export async function syncAsync(): Promise { + if (!auth.hasIdentity()) { return; } + if (!await auth.loggedIn()) { return; } + console.log(`cloud.ts:syncAsync`); // TODO @darzu: + try { + const userId = auth.user()?.id; + // Filter to cloud-synced headers owned by the current user. + const localCloudHeaders = workspace.getHeaders(true) + .filter(h => h.cloudUserId && h.cloudUserId === userId); + const remoteHeaders = await listAsync(); + const remoteHeaderMap = U.toDictionary(remoteHeaders, h => h.id); + const tasks = localCloudHeaders.map(async (local) => { + const remote = remoteHeaderMap[local.id]; + delete remoteHeaderMap[local.id]; + if (remote) { + if (local.cloudVersion !== remote.cloudVersion) { + if (local.cloudCurrent) { + // No local changes, download latest. + const file = await getAsync(local); + workspace.saveAsync(file.header, file.text, true); + } else { + // Conflict. + // TODO: Figure out how to register these. + return Promise.resolve(); + } + } else { + if (local.isDeleted) { + // Delete remote copy. + //return deleteAsync(local, local.cloudVersion); + // Mark remote copy as deleted. + remote.isDeleted = true; + return setAsync(remote, null, {}); + } + if (remote.isDeleted) { + // Delete local copy. + local.isDeleted = true; + return workspace.forceSaveAsync(local, {}) + .then(() => { data.clearCache(); }) + } + if (!local.cloudCurrent) { + // Local changes need to be synced up. + const text = await workspace.getTextAsync(local.id); + return setAsync(local, local.cloudVersion, text); + } + // Nothing to do. We're up to date locally. + return Promise.resolve(); + } + } else { + // Anomaly. Local cloud synced project exists, but no record of + // it on remote. We cannot know if there's a conflict. Convert + // to a local project. + delete local.cloudUserId; + delete local.cloudVersion; + delete local.cloudCurrent; + return workspace.saveAsync(local); + } + }); + remoteHeaders.forEach(async (remote) => { + if (remoteHeaderMap[remote.id]) { + // Project exists remotely and not locally, download it. + const file = await getAsync(remote); + tasks.push(workspace.importAsync(file.header, file.text)); + } + }) + await Promise.all(tasks); + } + catch (e) { + pxt.reportException(e); + } +} + +/** + * Virtual API + */ + +const MODULE = "cloud"; +const FIELD_UPLOADING = "uploading"; +const FIELD_DOWNLOADING = "downloading"; +const FIELD_WORKING = "working"; +export const UPLOADING = `${MODULE}:${FIELD_UPLOADING}`; +export const DOWNLOADING = `${MODULE}:${FIELD_DOWNLOADING}`; +export const WORKING = `${MODULE}:${FIELD_WORKING}`; + +function cloudApiHandler(p: string): any { + switch (data.stripProtocol(p)) { + case FIELD_UPLOADING: return state.uploadCount > 0; + case FIELD_DOWNLOADING: return state.downloadCount > 0; + case WORKING: return cloudApiHandler(UPLOADING) || cloudApiHandler(DOWNLOADING); + } + return null; +} + +export function init() { + // 'cloudws' because 'cloud' protocol is already taken. + data.mountVirtualApi("cloudws", { getSync: cloudApiHandler }); +} diff --git a/webapp/src/cloudworkspace.ts b/webapp/src/cloudworkspace.ts deleted file mode 100644 index 2e436c7c770e..000000000000 --- a/webapp/src/cloudworkspace.ts +++ /dev/null @@ -1,141 +0,0 @@ -import * as core from "./core"; -import * as auth from "./auth"; -import * as ws from "./workspace"; -import * as data from "./data"; -import * as workspace from "./workspace"; - -type Version = pxt.workspace.Version; -type File = pxt.workspace.File; -type Header = pxt.workspace.Header; -type Project = pxt.workspace.Project; -type ScriptText = pxt.workspace.ScriptText; -type WorkspaceProvider = pxt.workspace.WorkspaceProvider; - -import U = pxt.Util; - -const state = { - uploadCount: 0, - downloadCount: 0 -}; - -type CloudProject = { - id: string; - header: string; - text: string; - version: string; -}; - -function listAsync(): Promise { - return new Promise(async (resolve, reject) => { - const result = await auth.apiAsync("/api/user/project"); - if (result.success) { - const headers = result.resp.map(proj => JSON.parse(proj.header)); - resolve(headers); - } else { - reject(new Error(result.errmsg)); - } - }); -} - -function getAsync(h: Header): Promise { - return new Promise(async (resolve, reject) => { - const result = await auth.apiAsync(`/api/user/project/${h.id}`); - if (result.success) { - const project = result.resp; - const header = JSON.parse(project.header); - const text = JSON.parse(project.text); - const version = project.version; - const file: File = { - header, - text, - version - }; - resolve(file); - } else { - reject(new Error(result.errmsg)); - } - }); -} - -function setAsync(h: Header, prevVersion: Version, text?: ScriptText): Promise { - return new Promise(async (resolve, reject) => { - const project: CloudProject = { - id: h.id, - header: JSON.stringify(h), - text: text ? JSON.stringify(text) : undefined, - version: prevVersion - } - const result = await auth.apiAsync('/api/user/project', project); - if (result.success) { - resolve(result.resp); - } else { - // TODO: Handle reject due to version conflict - reject(new Error(result.errmsg)); - } - }); -} - -function deleteAsync(h: Header, prevVersion: Version, text?: ScriptText): Promise { - return Promise.resolve(); -} - -function resetAsync(): Promise { - return Promise.resolve(); -} - -export const provider: WorkspaceProvider = { - getAsync, - setAsync, - deleteAsync, - listAsync, - resetAsync, -} - -/** - * Virtual API - */ - -const MODULE = "cloud"; -const FIELD_UPLOADING = "uploading"; -const FIELD_DOWNLOADING = "downloading"; -const FIELD_WORKING = "working"; -export const UPLOADING = `${MODULE}:${FIELD_UPLOADING}`; -export const DOWNLOADING = `${MODULE}:${FIELD_DOWNLOADING}`; -export const WORKING = `${MODULE}:${FIELD_WORKING}`; - -function cloudApiHandler(p: string): any { - switch (data.stripProtocol(p)) { - case FIELD_UPLOADING: return state.uploadCount > 0; - case FIELD_DOWNLOADING: return state.downloadCount > 0; - case WORKING: return cloudApiHandler(UPLOADING) || cloudApiHandler(DOWNLOADING); - } - return null; -} - -export function init() { - // 'cloudws' because 'cloud' protocol is already taken. - data.mountVirtualApi("cloudws", { getSync: cloudApiHandler }); - data.subscribe(userSubscriber, auth.LOGGED_IN); -} - -let prevWorkspaceType: string; - -async function updateWorkspace() { - const loggedIn = await auth.loggedIn(); - if (loggedIn) { - // TODO: Handling of 'prev' is pretty hacky. Need to improve it. - let prev = workspace.switchToCloudWorkspace(); - if (prev !== "cloud") { - prevWorkspaceType = prev; - } - await workspace.syncAsync(); - } else if (prevWorkspaceType) { - workspace.switchToWorkspace(prevWorkspaceType); - await workspace.syncAsync(); - } -} - -const userSubscriber: data.DataSubscriber = { - subscriptions: [], - onDataChanged: async () => updateWorkspace() -}; diff --git a/webapp/src/codecard.tsx b/webapp/src/codecard.tsx index 82f2465ab154..0b667ea9254b 100644 --- a/webapp/src/codecard.tsx +++ b/webapp/src/codecard.tsx @@ -118,6 +118,9 @@ export class CodeCardView extends data.Component { {card.time ?
{card.tutorialLength ?  {lf("{0}/{1}", (card.tutorialStep || 0) + 1, card.tutorialLength)} : undefined} {card.time ? {pxt.Util.timeSince(card.time)} : null} + {card.cloudState === "cloud" && + + }
: undefined} {card.extracontent || card.learnMoreUrl || card.buyUrl || card.feedbackUrl ?
diff --git a/webapp/src/compiler.ts b/webapp/src/compiler.ts index 09a6bf1e5f1d..964e4f5db7ee 100644 --- a/webapp/src/compiler.ts +++ b/webapp/src/compiler.ts @@ -1,6 +1,6 @@ import * as pkg from "./package"; import * as core from "./core"; -import * as workspace from "./workspace"; +import * as workspace from "./workspaces/workspace"; import U = pxt.Util; diff --git a/webapp/src/container.tsx b/webapp/src/container.tsx index 2822387703df..4a08640a325f 100644 --- a/webapp/src/container.tsx +++ b/webapp/src/container.tsx @@ -8,7 +8,7 @@ import * as container from "./container"; import * as core from "./core"; import * as auth from "./auth"; import * as identity from "./identity"; -import * as cloudsync from "./cloudsync"; +import * as cloudsync from "./workspaces/cloudsync"; import * as pkg from "./package"; type ISettingsProps = pxt.editor.ISettingsProps; diff --git a/webapp/src/db.ts b/webapp/src/db.ts index 77b0a90bb7c7..b67be4489ef3 100644 --- a/webapp/src/db.ts +++ b/webapp/src/db.ts @@ -14,7 +14,10 @@ const PouchDB = require("pouchdb") let _db: Promise = undefined; export function getDbAsync(): Promise { - if (_db) return _db; + if (_db) { + (window as any).db = _db + return _db; + } return _db = Promise.resolve() .then(() => { @@ -30,9 +33,17 @@ export function getDbAsync(): Promise { } export function destroyAsync(): Promise { + console.log("destroying db! (1)") // TODO @darzu: dbg return !_db ? Promise.resolve() : _db.then((db: any) => { - db.destroy(); + console.log("destroying db! (2)") // TODO @darzu: dbg + const res: Promise = db.destroy() + res.then(_ => console.log("db destroyed")) // TODO @darzu: _db = undefined; + return res + }).then(r => { + console.log("destroy res") + console.dir(r) + return r }); } @@ -40,10 +51,16 @@ export class Table { constructor(public name: string) { } getAsync(id: string): Promise { - return getDbAsync().then(db => db.get(this.name + "--" + id)).then((v: any) => { - v.id = id - return v - }) + return getDbAsync().then(db => db.get(this.name + "--" + id)) + .then((v: any) => { + v.id = id + return v + }) + .catch(e => { + // not found + // TODO @darzu: trace users to see if this new behavior breaks assumptions + return undefined + }) } getAllAsync(): Promise { @@ -75,9 +92,15 @@ export class Table { .catch(e => { if (e.status == 409) { // conflict while writing key, ignore. - pxt.debug(`table: set conflict (409)`); + pxt.debug(`table: set conflict (409) for ${obj._id}#${obj._rev}`); return undefined; } + if (e.status == 400) { + // bad request; likely _rev format was wrong or something similiar + console.log(`table: bad request ${e.name}:${e.message} for ${obj._id}#${obj._rev}`); // TODO @darzu: + // TODO @darzu: what's the right behavior here? Do we ever expect a 400 in normal operation? + // return undefined; + } pxt.reportException(e); pxt.log(`table: set failed, cleaning translation db`) // clean up translation and try again diff --git a/webapp/src/dialogs.tsx b/webapp/src/dialogs.tsx index 9e723800e976..d859123998fa 100644 --- a/webapp/src/dialogs.tsx +++ b/webapp/src/dialogs.tsx @@ -5,7 +5,7 @@ import * as sui from "./sui"; import * as core from "./core"; import * as coretsx from "./coretsx"; import * as pkg from "./package"; -import * as cloudsync from "./cloudsync"; +import * as cloudsync from "./workspaces/cloudsync"; import Cloud = pxt.Cloud; import Util = pxt.Util; diff --git a/webapp/src/githubbutton.tsx b/webapp/src/githubbutton.tsx index de00121fd9d8..797c6695589c 100644 --- a/webapp/src/githubbutton.tsx +++ b/webapp/src/githubbutton.tsx @@ -1,8 +1,8 @@ import * as React from "react"; import * as sui from "./sui"; import * as pkg from "./package"; -import * as cloudsync from "./cloudsync"; -import * as workspace from "./workspace"; +import * as cloudsync from "./workspaces/cloudsync"; +import * as workspace from "./workspaces/workspace"; interface GithubButtonProps extends pxt.editor.ISettingsProps { className?: string; diff --git a/webapp/src/gitjson.tsx b/webapp/src/gitjson.tsx index a582cc90eb9f..327b1b7599d3 100644 --- a/webapp/src/gitjson.tsx +++ b/webapp/src/gitjson.tsx @@ -3,13 +3,13 @@ import * as pkg from "./package" import * as core from "./core" import * as srceditor from "./srceditor" import * as sui from "./sui" -import * as workspace from "./workspace"; +import * as workspace from "./workspaces/workspace"; import * as dialogs from "./dialogs"; import * as coretsx from "./coretsx"; import * as data from "./data"; import * as markedui from "./marked"; import * as compiler from "./compiler"; -import * as cloudsync from "./cloudsync"; +import * as cloudsync from "./workspaces/cloudsync"; import * as tutorial from "./tutorial"; import * as _package from "./package"; diff --git a/webapp/src/identity.tsx b/webapp/src/identity.tsx index 82a69d9a006d..6e2737c7693d 100644 --- a/webapp/src/identity.tsx +++ b/webapp/src/identity.tsx @@ -4,7 +4,7 @@ import * as core from "./core"; import * as auth from "./auth"; import * as data from "./data"; import * as codecard from "./codecard"; -import * as cloudsync from "./cloudsync"; +import * as cloudsync from "./workspaces/cloudsync"; type ISettingsProps = pxt.editor.ISettingsProps; diff --git a/webapp/src/mkcdProvider.tsx b/webapp/src/mkcdProvider.tsx deleted file mode 100644 index 2332a6568efd..000000000000 --- a/webapp/src/mkcdProvider.tsx +++ /dev/null @@ -1,35 +0,0 @@ -import * as React from "react"; -import * as sui from "./sui"; -import * as core from "./core"; -import * as auth from "./auth"; -import * as data from "./data"; -import * as codecard from "./codecard"; -import * as cloudsync from "./cloudsync"; - -// TODO: We need to do auth and cloud sync through this class. - -export class Provider extends cloudsync.ProviderBase implements cloudsync.Provider { - - constructor() { - super("mkcd", lf("MakeCode"), "xicon makecode", "https://www.makecode.com"); - } - - listAsync(): Promise { - throw new Error("Method not implemented."); - } - downloadAsync(id: string): Promise { - throw new Error("Method not implemented."); - } - uploadAsync(id: string, baseVersion: string, files: pxt.Map): Promise { - throw new Error("Method not implemented."); - } - deleteAsync(id: string): Promise { - throw new Error("Method not implemented."); - } - updateAsync(id: string, newName: string): Promise { - throw new Error("Method not implemented."); - } - getUserInfoAsync(): Promise { - throw new Error("Method not implemented."); - } -} diff --git a/webapp/src/monaco.tsx b/webapp/src/monaco.tsx index e69804ba948b..1d14d72ae434 100644 --- a/webapp/src/monaco.tsx +++ b/webapp/src/monaco.tsx @@ -12,7 +12,7 @@ import * as snippets from "./monacoSnippets" import * as pyhelper from "./monacopyhelper"; import * as simulator from "./simulator"; import * as toolbox from "./toolbox"; -import * as workspace from "./workspace"; +import * as workspace from "./workspaces/workspace"; import * as blocklyFieldView from "./blocklyFieldView"; import { ViewZoneEditorHost, ModalEditorHost, FieldEditorManager } from "./monacoFieldEditorHost"; import * as data from "./data"; diff --git a/webapp/src/monacoFlyout.tsx b/webapp/src/monacoFlyout.tsx index ee3c87a1bbb6..980b777d416e 100644 --- a/webapp/src/monacoFlyout.tsx +++ b/webapp/src/monacoFlyout.tsx @@ -2,7 +2,7 @@ import * as React from "react"; import * as compiler from "./compiler" import * as core from "./core"; import * as toolbox from "./toolbox"; -import * as workspace from "./workspace"; +import * as workspace from "./workspaces/workspace"; import * as data from "./data"; import * as auth from "./auth"; diff --git a/webapp/src/package.ts b/webapp/src/package.ts index b0d22a169284..3a540bbaafda 100644 --- a/webapp/src/package.ts +++ b/webapp/src/package.ts @@ -1,4 +1,4 @@ -import * as workspace from "./workspace"; +import * as workspace from "./workspaces/workspace"; import * as data from "./data"; import * as core from "./core"; import * as db from "./db"; @@ -405,14 +405,17 @@ export class EditorPackage { } savePkgAsync() { - if (this.header.blobCurrent) return Promise.resolve(); + console.log("savePkgAsync") // TODO @darzu: + return Promise.resolve() + // TODO @darzu: ensure none of this is needed; move or del + if (this.header.cloudCurrent) return Promise.resolve(); this.savingNow++; this.updateStatus(); return workspace.saveToCloudAsync(this.header) .then(() => { this.savingNow--; this.updateStatus(); - if (!this.header.blobCurrent) + if (!this.header.cloudCurrent) this.scheduleSave(); }) } @@ -626,6 +629,8 @@ class Host } downloadPackageAsync(pkg: pxt.Package): Promise { + console.log("downloadPackageAsync") // TODO @darzu: dbg + // TODO @darzu: what is the package abstraction for and why is it different than a workspace? let proto = pkg.verProtocol() let epkg = getEditorPkg(pkg) @@ -637,9 +642,10 @@ class Host return Promise.resolve(); } if (!scr) // this should not happen; + // TODO @darzu: this is happening. return Promise.reject(new Error(`Cannot find text for package '${arg}' in the workspace.`)); if (epkg.isTopLevel() && epkg.header) - return workspace.recomputeHeaderFlagsAsync(epkg.header, scr) + return workspace.recomputeHeaderGitFlagsAsync(epkg.header, scr) .then(() => epkg.setFiles(scr)) else { epkg.setFiles(scr) diff --git a/webapp/src/projects.tsx b/webapp/src/projects.tsx index e3425395c858..c0145ecf7f8c 100644 --- a/webapp/src/projects.tsx +++ b/webapp/src/projects.tsx @@ -5,8 +5,9 @@ import * as ReactDOM from "react-dom"; import * as data from "./data"; import * as sui from "./sui"; import * as core from "./core"; -import * as cloudsync from "./cloudsync"; +import * as cloudsync from "./workspaces/cloudsync"; import * as auth from "./auth"; +import * as workspace from "./workspaces/workspace"; import * as identity from "./identity"; import * as codecard from "./codecard" import * as carousel from "./carousel"; @@ -21,7 +22,7 @@ interface ProjectsState { selectedIndex?: number; } -export class Projects extends data.Component { +export class Projects extends auth.Component { constructor(props: ISettingsProps) { super(props) @@ -36,7 +37,6 @@ export class Projects extends data.Component { this.chgCode = this.chgCode.bind(this); this.importProject = this.importProject.bind(this); this.showScriptManager = this.showScriptManager.bind(this); - this.cloudSignIn = this.cloudSignIn.bind(this); this.setSelected = this.setSelected.bind(this); } @@ -162,11 +162,6 @@ export class Projects extends data.Component { this.props.parent.showScriptManager(); } - cloudSignIn() { - pxt.tickEvent("projects.signin", undefined, { interactiveConsent: true }); - this.props.parent.cloudSignInDialog(); - } - renderCore() { const { selectedCategory, selectedIndex } = this.state; @@ -480,7 +475,7 @@ export class ProjectsCarousel extends data.Component { if (index === 1) this.latestProject = view }} @@ -647,6 +644,7 @@ export class ProjectsCarousel extends data.Component; })} {showScriptManagerCard ?
workspace.duplicateAsync(header, text, res)) .then(clonedHeader => { // If we're cloud synced, update the cloudSync flag - if (this.props.parent.cloudSync()) clonedHeader.cloudSync = true; - - delete clonedHeader.blobId - delete clonedHeader.blobVersion - delete clonedHeader.blobCurrent + if (this.props.parent.hasCloudSync()) { + // TODO @darzu: revisit this + clonedHeader.cloudUserId = auth.user()?.id; + } + + delete clonedHeader.blobId_ + delete clonedHeader.blobVersion_ + delete clonedHeader.blobCurrent_ return workspace.saveAsync(clonedHeader); }) diff --git a/webapp/src/scriptsearch.tsx b/webapp/src/scriptsearch.tsx index 3dc8186d71d2..3559fbcb81f4 100644 --- a/webapp/src/scriptsearch.tsx +++ b/webapp/src/scriptsearch.tsx @@ -8,7 +8,7 @@ import * as pkg from "./package"; import * as core from "./core"; import * as codecard from "./codecard"; import * as electron from "./electron"; -import * as workspace from "./workspace"; +import * as workspace from "./workspaces/workspace"; import { SearchInput } from "./components/searchInput"; type ISettingsProps = pxt.editor.ISettingsProps; diff --git a/webapp/src/user.tsx b/webapp/src/user.tsx index 36be1bf71cba..d29b446922ad 100644 --- a/webapp/src/user.tsx +++ b/webapp/src/user.tsx @@ -2,7 +2,7 @@ import * as React from "react"; import * as sui from "./sui"; import * as core from "./core"; import * as auth from "./auth"; -import * as cloudsync from "./cloudsync"; +import * as cloudsync from "./workspaces/cloudsync"; type ISettingsProps = pxt.editor.ISettingsProps; diff --git a/webapp/src/workspaces/browserdbworkspace.ts b/webapp/src/workspaces/browserdbworkspace.ts new file mode 100644 index 000000000000..1a3e42882c62 --- /dev/null +++ b/webapp/src/workspaces/browserdbworkspace.ts @@ -0,0 +1,143 @@ +import * as db from "../db"; +import { toDbg } from "./cloudsyncworkspace"; + +type Header = pxt.workspace.Header; +type ScriptText = pxt.workspace.ScriptText; + +type TextDbEntry = { + files?: ScriptText, + // These are required by PouchDB/CouchDB + id: string, + _rev: any // This must be set to the return value of the last PouchDB/CouchDB +} + +export interface BrowserDbWorkspaceProvider extends pxt.workspace.WorkspaceProvider { + prefix: string; +} + +// TODO @darzu: very important for _rev and _id +export function createBrowserDbWorkspace(namespace: string): BrowserDbWorkspaceProvider { + if (!namespace) { + console.log("BAD default namespace created") + console.trace(); + } + const prefix = namespace ? namespace + "-" : "" + const headerDb = new db.Table(`${prefix}header`); + const textDb = new db.Table(`${prefix}text`); + + // TODO @darzu: dz: + // return pxt.storage.getLocal('workspacesessionid') != sessionID; + // pxt.storage.setLocal('workspacesessionid', sessionID); + + // TODO @darzu: + const printDbg = async () => { + const hdrs: pxt.workspace.Header[] = await headerDb.getAllAsync(); + // const txts: TextDbEntry[] = await textDb.getAllAsync(); + console.log(`dbg ${prefix}-headers:`); + console.dir(hdrs.map(toDbg)) + } + // TODO @darzu: dbg + printDbg(); + + async function listAsync(): Promise { + const hdrs: pxt.workspace.Header[] = await headerDb.getAllAsync() + // // TODO @darzu: debug logging + // console.log(`browser db headers ${prefix}:`) + // console.dir(hdrs.map(h => h.id)) + return hdrs + } + async function getAsync(h: Header): Promise { + const hdrProm = headerDb.getAsync(h.id) + const textProm = textDb.getAsync(h.id) + let [hdrResp, textResp] = await Promise.all([hdrProm, textProm]) as [Header, TextDbEntry] + if (!hdrResp || !textResp) + // TODO @darzu: distinguish these for the caller somehow? + return undefined + return { + header: hdrResp, + text: textResp.files, + version: textResp._rev + } + } + async function setAsync(h: Header, prevVer: any, text?: ScriptText): Promise { + // TODO @darzu: dbg + console.log(`setAsync ${namespace || "default"}:(${h.id}, ${h.modificationTime}, ${prevVer}) :)`) + + let textVer: string = undefined; + if (text) { + const textEnt: TextDbEntry = { + files: text, + id: h.id, + _rev: prevVer + } + + // if we get a 400, we need to fetch the old then do a new + // TODO @darzu: no we shouldn't; this isn't the right layer to handle storage conflicts + try { + textVer = await textDb.setAsync(textEnt) + } catch (e) {} + + if (!textVer) { + console.log(`! failed to set text for id:${h.id},pv:${prevVer}`); // TODO @darzu: dbg logging + const oldTxt = await textDb.getAsync(h.id) + console.dir(`! text ${h.id} actually is: ${oldTxt._rev}`) + console.dir(oldTxt) + } + } + + let hdrVer: string; + try { + hdrVer = await headerDb.setAsync(h) + } catch (e) {} + + if (!hdrVer) { + console.log(`! failed to set hdr for id:${h.id},pv:${prevVer}`); // TODO @darzu: dbg logging + let oldHdr: Header + try { + oldHdr = await headerDb.getAsync(h.id) as Header + } catch (e) {} + if (oldHdr) { + h._rev = oldHdr._rev + } else { + delete h._rev + } + // TODO @darzu: need to rethink error handling here + // TODO @darzu: we shouldn't auto-retry on conflict failure + try { + hdrVer = await headerDb.setAsync(h) + } catch (e) {} + if (!hdrVer) { + console.log(`!!! failed AGAIN to set hdr for id:${h.id},old:${JSON.stringify(oldHdr)}`); // TODO @darzu: dbg logging + } + } + h._rev = hdrVer + + await printDbg(); // TODO @darzu: dbg + + // TODO @darzu: notice undefined means either: "version conflict when setting text" and "no text sent" + return textVer + } + async function deleteAsync(h: Header, prevVer: any): Promise { + await headerDb.deleteAsync(h) + const textEnt: TextDbEntry = { + id: h.id, + _rev: prevVer + } + await textDb.deleteAsync(textEnt); + } + async function resetAsync() { + // workspace.resetAsync already clears all tables + // TODO @darzu: I don't like that worksapce reset does that.... + return Promise.resolve(); + } + + const provider: BrowserDbWorkspaceProvider = { + prefix, + getAsync, + setAsync, + deleteAsync, + listAsync, + resetAsync, + } + return provider; +} \ No newline at end of file diff --git a/webapp/src/workspaces/browserworkspace.ts b/webapp/src/workspaces/browserworkspace.ts new file mode 100644 index 000000000000..58abb3a5c32e --- /dev/null +++ b/webapp/src/workspaces/browserworkspace.ts @@ -0,0 +1,114 @@ +import { BrowserDbWorkspaceProvider, createBrowserDbWorkspace } from "./browserdbworkspace"; + +type Header = pxt.workspace.Header; +type ScriptText = pxt.workspace.ScriptText; +type WorkspaceProvider = pxt.workspace.WorkspaceProvider; + +let currentDb: BrowserDbWorkspaceProvider; +async function init() { + if (!currentDb) { + currentDb = await createAndMigrateBrowserDb(); + } +} + +async function migrateProject(fromWs: WorkspaceProvider, newWs: WorkspaceProvider, h: pxt.workspace.Header): Promise { + const old = await fromWs.getAsync(h) + // Ignore metadata of the previous script so they get re-generated for the new copy + delete (h)._id; + delete (h)._rev; + return await newWs.setAsync(h, undefined, old.text) +}; + +const getVersionedDbPrefix = (majorVersion: number) => { + return pxt.appTarget.appTheme.browserDbPrefixes && pxt.appTarget.appTheme.browserDbPrefixes[majorVersion]; +} +const getCurrentDbPrefix = () => { + const currentVersion = pxt.semver.parse(pxt.appTarget.versions.target); + const currentMajor = currentVersion.major; + const currentDbPrefix = getVersionedDbPrefix(currentMajor); + return currentDbPrefix +} +const getPreviousDbPrefix = () => { + // No headers using this prefix yet, attempt to migrate headers from previous major version (or default tables) + const currentVersion = pxt.semver.parse(pxt.appTarget.versions.target); + const currentMajor = currentVersion.major; + const previousMajor = currentMajor - 1; + const previousDbPrefix = previousMajor < 0 ? "" : getVersionedDbPrefix(previousMajor); + return previousDbPrefix +} + +async function createAndMigrateBrowserDb(): Promise { + console.log("BAD createAndMigrateBrowserDb") // TODO @darzu: trace + const currentDbPrefix = getCurrentDbPrefix(); + let currDb: BrowserDbWorkspaceProvider; + if (currentDbPrefix) { + currDb = createBrowserDbWorkspace(currentDbPrefix); + } else { + // This version does not use a prefix for storing projects, so just use default tables + currDb = createBrowserDbWorkspace(""); + return currDb; + } + + const currHeaders = await currDb.listAsync() + if (currHeaders.length) { + // There are already scripts using the prefix, so a migration has already happened + return currDb; + } + + // Do a migration + const prevDbPrefix = getPreviousDbPrefix(); + let prevDb: BrowserDbWorkspaceProvider; + if (prevDbPrefix) { + prevDb = createBrowserDbWorkspace(prevDbPrefix); + } else { + prevDb = createBrowserDbWorkspace(""); + } + const prevHeaders = await prevDb.listAsync() + prevHeaders.forEach(h => migrateProject(prevDb, currDb, h)); + + return currDb; +} + +export async function copyProjectToLegacyEditor(h: Header, majorVersion: number): Promise
{ + console.log("BAD copyProjectToLegacyEditor") // TODO @darzu: trace + await init(); + + const prefix = getVersionedDbPrefix(majorVersion); + const oldDb = createBrowserDbWorkspace(prefix || ""); + + // clone header + const header = pxt.Util.clone(h); + delete (header as any)._id; + delete header._rev; + header.id = pxt.Util.guidGen(); + + const resp = await currentDb.getAsync(h) + const rev = await oldDb.setAsync(header, undefined, resp.text) + return header +} + +// TODO @darzu: might be a better way to provide this wrapping and handle the migration +// TODO @darzu: export +const provider: WorkspaceProvider = { + listAsync: async () => { + await init(); + return currentDb.listAsync(); + }, + getAsync: async (h: Header) => { + await init(); + return currentDb.getAsync(h); + }, + setAsync: async (h: Header, prevVersion: pxt.workspace.Version, text?: ScriptText) => { + await init(); + console.log("BAD setAsync") // TODO @darzu: tracing usage + return currentDb.setAsync(h, prevVersion, text); + }, + deleteAsync: async (h: Header, prevVersion: pxt.workspace.Version) => { + await init(); + return currentDb.deleteAsync(h, prevVersion); + }, + resetAsync: async () => { + await init(); + return currentDb.resetAsync(); + } +} \ No newline at end of file diff --git a/webapp/src/cloudsync.ts b/webapp/src/workspaces/cloudsync.ts similarity index 93% rename from webapp/src/cloudsync.ts rename to webapp/src/workspaces/cloudsync.ts index 6813b67550cf..03fe7bc18f6a 100644 --- a/webapp/src/cloudsync.ts +++ b/webapp/src/workspaces/cloudsync.ts @@ -1,11 +1,13 @@ // TODO cloud save indication in the editor somewhere -import * as core from "./core"; -import * as pkg from "./package"; +import * as core from "../core"; +import * as pkg from "../package"; import * as ws from "./workspace"; -import * as data from "./data"; +import * as data from "../data"; +import * as cloud from "../cloud"; type Header = pxt.workspace.Header; +type File = pxt.workspace.File; import U = pxt.Util; const lf = U.lf @@ -376,7 +378,7 @@ export async function ensureGitHubTokenAsync() { } // this is generally called by the provier's loginCheck() function -export function setProvider(impl: IdentityProvider) { +function setProvider(impl: IdentityProvider) { if (impl !== currentProvider) { currentProvider = impl invalidateData(); @@ -391,12 +393,12 @@ async function syncOneUpAsync(provider: Provider, h: Header) { text = U.flatClone(text) text[HEADER_JSON] = JSON.stringify(h, null, 4) - let firstTime = h.blobId == null + let firstTime = h.blobId_ == null let info: FileInfo try { - info = await provider.uploadAsync(h.blobId, h.blobVersion, text) + info = await provider.uploadAsync(h.blobId_, h.blobVersion_, text) } catch (e) { if (e.statusCode == 409) { core.warningNotification(lf("Conflict saving {0}; please do a full cloud sync", h.name)) @@ -408,20 +410,20 @@ async function syncOneUpAsync(provider: Provider, h: Header) { pxt.debug(`synced up ${info.id}`) if (firstTime) { - h.blobId = info.id + h.blobId_ = info.id } else { - U.assert(h.blobId == info.id) + U.assert(h.blobId_ == info.id) } - h.blobVersion = info.version + h.blobVersion_ = info.version if (h.saveId === saveId) - h.blobCurrent = true + h.blobCurrent_ = true await ws.saveAsync(h, null, true) } export async function renameAsync(h: Header, newName: string) { const provider = currentProvider && currentProvider.hasSync() && currentProvider as Provider; try { - await provider.updateAsync(h.blobId, newName) + await provider.updateAsync(h.blobId_, newName) } catch (e) { } @@ -501,7 +503,12 @@ export function refreshToken() { } export function syncAsync(): Promise { - return Promise.all([githubSyncAsync(), cloudSyncAsync()]) + return Promise.all([ + githubSyncAsync(), + // TODO @darzu: + // cloud.syncAsync() + cloudSyncAsync() + ]) .then(() => { }); } @@ -511,6 +518,7 @@ function githubSyncAsync(): Promise { } function cloudSyncAsync(): Promise { + // TODO @darzu: delete? if (!currentProvider) return Promise.resolve(undefined) if (!currentProvider.hasSync()) @@ -522,10 +530,10 @@ function cloudSyncAsync(): Promise { let updated: pxt.Map = {} function uninstallAsync(h: Header) { - pxt.debug(`uninstall local ${h.blobId}`) + pxt.debug(`uninstall local ${h.blobId_}`) h.isDeleted = true - h.blobVersion = "DELETED" - h.blobCurrent = false + h.blobVersion_ = "DELETED" + h.blobCurrent_ = false return ws.saveAsync(h, null, true) } @@ -533,9 +541,9 @@ function cloudSyncAsync(): Promise { // rename current script let text = await ws.getTextAsync(header.id) let newHd = await ws.duplicateAsync(header, text) - header.blobId = null - header.blobVersion = null - header.blobCurrent = false + header.blobId_ = null + header.blobVersion_ = null + header.blobCurrent_ = false await ws.saveAsync(header, text) // get the cloud version await syncDownAsync(newHd, cloudHeader) @@ -551,20 +559,20 @@ function cloudSyncAsync(): Promise { } numDown++ - U.assert(header.blobId == cloudHeader.id) + U.assert(header.blobId_ == cloudHeader.id) let blobId = cloudHeader.version - pxt.debug(`sync down ${header.blobId} - ${blobId}`) + pxt.debug(`sync down ${header.blobId_} - ${blobId}`) return provider.downloadAsync(cloudHeader.id) .catch(core.handleNetworkError) .then((resp: FileInfo) => { - U.assert(resp.id == header.blobId) + U.assert(resp.id == header.blobId_) let files = resp.content let hd = JSON.parse(files[HEADER_JSON] || "{}") as Header delete files[HEADER_JSON] - header.cloudSync = true - header.blobCurrent = true - header.blobVersion = resp.version + header.cloudUserId = '1234' + header.blobCurrent_ = true + header.blobVersion_ = resp.version // TODO copy anything else from the cloud? header.name = hd.name || header.name || "???" header.id = header.id || hd.id || U.guidGen() @@ -579,7 +587,7 @@ function cloudSyncAsync(): Promise { header.modificationTime = resp.updatedAt || U.nowSeconds() if (!header.recentUse) header.recentUse = header.modificationTime - updated[header.blobId] = 1; + updated[header.blobId_] = 1; if (!header0) return ws.importAsync(header, files, true) @@ -613,7 +621,7 @@ function cloudSyncAsync(): Promise { } function syncDeleteAsync(h: Header) { - return provider.deleteAsync(h.blobId) + return provider.deleteAsync(h.blobId_) .then(() => uninstallAsync(h)) } @@ -625,15 +633,15 @@ function cloudSyncAsync(): Promise { // Get all local headers including those that had been deleted const allScripts = ws.getHeaders(true) const cloudHeaders = U.toDictionary(entries, e => e.id) - const existingHeaders = U.toDictionary(allScripts.filter(h => h.blobId), h => h.blobId) + const existingHeaders = U.toDictionary(allScripts.filter(h => h.blobId_), h => h.blobId_) //console.log('all', allScripts); //console.log('cloud', cloudHeaders); //console.log('existing', existingHeaders); //console.log('syncthese', allScripts.filter(hd => hd.cloudSync)); // Only syncronize those that have been marked with cloudSync - let waitFor = allScripts.filter(hd => hd.cloudSync).map(hd => { - if (cloudHeaders.hasOwnProperty(hd.blobId)) { - let chd = cloudHeaders[hd.blobId] + let waitFor = allScripts.filter(hd => hd.cloudUserId).map(hd => { + if (cloudHeaders.hasOwnProperty(hd.blobId_)) { + let chd = cloudHeaders[hd.blobId_] // The script was deleted locally, delete on cloud if (hd.isDeleted) { @@ -641,8 +649,8 @@ function cloudSyncAsync(): Promise { return syncDeleteAsync(hd) } - if (chd.version == hd.blobVersion) { - if (hd.blobCurrent) { + if (chd.version == hd.blobVersion_) { + if (hd.blobCurrent_) { // nothing to do return Promise.resolve() } else { @@ -650,7 +658,7 @@ function cloudSyncAsync(): Promise { return syncUpAsync(hd) } } else { - if (hd.blobCurrent) { + if (hd.blobCurrent_) { console.log('might have synced down: ', hd.name); return syncDownAsync(hd, chd) } else { @@ -659,7 +667,7 @@ function cloudSyncAsync(): Promise { } } } else { - if (hd.blobVersion) + if (hd.blobVersion_) // this has been pushed once to the cloud - uninstall wins return uninstallAsync(hd) else { @@ -740,7 +748,8 @@ export function loginCheck() { impl.loginCheck(); } -export function saveToCloudAsync(h: Header) { +export async function saveToCloudAsync(h: Header) { + // TODO @darzu: why is this null when saving a new local project while logged in? if (!currentProvider || !currentProvider.hasSync()) return Promise.resolve(); diff --git a/webapp/src/workspaces/cloudsyncworkspace.ts b/webapp/src/workspaces/cloudsyncworkspace.ts new file mode 100644 index 000000000000..15e4dac66fd8 --- /dev/null +++ b/webapp/src/workspaces/cloudsyncworkspace.ts @@ -0,0 +1,491 @@ +import { ConflictStrategy, DisjointSetsStrategy, Strategy } from "./workspacebehavior"; +import U = pxt.Util; + +type Header = pxt.workspace.Header; +type ScriptText = pxt.workspace.ScriptText; +type WsFile = pxt.workspace.File; +type Version = pxt.workspace.Version; +type WorkspaceProvider = pxt.workspace.WorkspaceProvider; + +// TODO @darzu: BIG TODOs +// [ ] cache invalidation via header sessions +// [ ] enforce soft-delete +// pouchdb uses _delete for soft delete +// [ ] error handling: conflicts returned as "undefined"; other errors propegate as exceptions + +export interface CachedWorkspaceProvider extends WorkspaceProvider { + getHeadersHash(): string, + synchronize(reason: SynchronizationReason): Promise, // TODO @darzu: name syncAsync? + pendingSync(): Promise, + firstSync(): Promise, + listSync(): Header[], + getHeaderSync(id: string): Header, + tryGetSync(h: Header): WsFile +} + +// TODO @darzu: \/ \/ \/ thread through +export interface SynchronizationReason { + expectedHeadersHash?: string, + pollStorage?: boolean, +} + +function computeHeadersHash(hdrs: Header[]): string { + // TODO @darzu: should we just do an actual hash? + // [ ] measure perf difference + // [ ] maybe there are some fields we want to ignore; if so, these should likely be moved out of Header interface + return hdrs.length + ' ' + hdrs // TODO @darzu: [ ] use the length component in the workspace internals? + .map(h => h.modificationTime) + .reduce((l, r) => Math.max(l, r), 0) +} + +function hasChanged(a: Header, b: Header): boolean { + // TODO @darzu: use e-tag, _rev, version uuid, or hash instead? + return (!!a !== !!b) || a?.modificationTime !== b?.modificationTime +} + +// TODO @darzu: use cases: multi-tab and cloud +export function createCachedWorkspace(ws: WorkspaceProvider): CachedWorkspaceProvider { + let cacheHdrs: Header[] = [] + let cacheHdrsMap: {[id: string]: Header} = {}; + let cacheProjs: {[id: string]: WsFile} = {}; + + // TODO @darzu: thinking through workspace sessions + // per header locks? + // for all headers? + // const workspaceID: string = pxt.Util.guidGen(); + // pxt.storage.setLocal('workspaceheadersessionid:' + h.id, workspaceID); + // pxt.storage.removeLocal('workspaceheadersessionid:' + h.id); + // const sid = pxt.storage.getLocal('workspaceheadersessionid:' + h.id); + + let cacheHdrsHash: string = ""; + function getHeadersHash(): string { + return cacheHdrsHash; + } + + // TODO @darzu: do we want to kick off the first sync at construction? Side-effects at construction are usually bad.. + const firstUpdate = synchronizeInternal({ pollStorage: true }); + let pendingUpdate = firstUpdate; + async function synchronize(reason: SynchronizationReason): Promise { + if (pendingUpdate.isPending()) + return pendingUpdate + pendingUpdate = synchronizeInternal(reason) + return pendingUpdate + } + + function eraseCache() { + console.log("cachedworkspace: eraseCache") // TODO @darzu: dbg + cacheHdrs = [] + cacheHdrsMap = {} + cacheProjs = {} + cacheHdrsHash = "" + } + + async function synchronizeInternal(reason: SynchronizationReason): Promise { + // remember our old cache, we might keep items from it later + const oldHdrs = cacheHdrs + const oldHdrsMap = cacheHdrsMap + const oldProjs = cacheProjs + const oldHdrsHash = cacheHdrsHash + + const hashDesync = reason.expectedHeadersHash && reason.expectedHeadersHash !== cacheHdrsHash + const needSync = !cacheHdrsHash || hashDesync || reason.pollStorage; + if (hashDesync) { + // TODO @darzu: does this buy us anything? + eraseCache() + } else if (!needSync) { + return [] + } + + const newHdrs = await ws.listAsync() + const newHdrsHash = computeHeadersHash(newHdrs); + if (newHdrsHash === oldHdrsHash) { + // no change, keep the old cache + cacheHdrs = oldHdrs + cacheHdrsMap = oldHdrsMap + cacheProjs = oldProjs + return [] + } + console.log("cachedworkspace: synchronizeInternal (1)") // TODO @darzu: dbg + + // compute header differences and clear old cache entries + const newHdrsMap = U.toDictionary(newHdrs, h => h.id) + const changedHdrIds = U.unique([...oldHdrs, ...newHdrs], h => h.id).map(h => h.id) + .filter(id => hasChanged(oldHdrsMap[id], newHdrsMap[id])) + const newProjs = oldProjs // TODO @darzu: is there any point in copying here? + for (let id of changedHdrIds) { + if (id in newProjs) { + console.log(`cache invalidating ${id} because:`) // TODO @darzu: dbg + console.dir({ old: (oldHdrsMap[id]), new: (newHdrsMap[id]) }) + delete newProjs[id] + } + } + + // save results + cacheHdrsHash = newHdrsHash + cacheProjs = newProjs + cacheHdrs = newHdrs + cacheHdrsMap = newHdrsMap + return changedHdrIds.map(i => newHdrsMap[i]); + } + + async function listAsync(): Promise { + await pendingUpdate; + return cacheHdrs + } + async function getAsync(h: Header): Promise { + // TODO @darzu: should the semantics of this check the header version? + await pendingUpdate; + if (!cacheProjs[h.id]) { + // fetch + // TODO @darzu: use / cooperate with worklist? + const proj = await ws.getAsync(h) + cacheProjs[h.id] = proj + } + return cacheProjs[h.id]; + } + async function setAsync(h: Header, prevVer: Version, text?: ScriptText): Promise { + await pendingUpdate; + if (text) { + // update cached projects + cacheProjs[h.id] = { + header: h, + text, + version: prevVer + } + } + // update headers list, map and hash + if (!cacheHdrsMap[h.id]) { + cacheHdrs.push(h) + } + cacheHdrsMap[h.id] = h + cacheHdrsHash = computeHeadersHash(cacheHdrs) + // send update to backing storage + const res = await ws.setAsync(h, prevVer, text) + if (res) { + if (text) { + // update cached project + cacheProjs[h.id] = { + header: h, + text, + version: res + } + } + } else { + // conflict; delete cache + delete cacheProjs[h.id] + // TODO @darzu: fix header(s) after conflict ? + } + return res; + } + async function deleteAsync(h: Header, prevVer: Version): Promise { + await pendingUpdate; + // update cached projects + delete cacheProjs[h.id]; + // update headers list, map and hash + delete cacheHdrsMap[h.id]; + cacheHdrs = cacheHdrs.filter(r => r.id === h.id); + cacheHdrsHash = computeHeadersHash(cacheHdrs); + // send update to backing storage + await ws.deleteAsync(h, prevVer) + // TODO @darzu: fix header(s) after conflict ? + } + async function resetAsync() { + await pendingUpdate; + eraseCache(); + await ws.resetAsync() + } + + const provider: CachedWorkspaceProvider = { + // cache + getHeadersHash, + synchronize, + pendingSync: () => pendingUpdate, + firstSync: () => firstUpdate, + listSync: () => cacheHdrs, + tryGetSync: h => cacheProjs[h.id], + getHeaderSync: id => cacheHdrsMap[id], + // workspace + getAsync, + setAsync, + deleteAsync, + listAsync, + resetAsync, + } + + return provider; +} + +// TODO @darzu: dbging helper +export function toDbg(h: Header) { + return {n: h.name, t: h.modificationTime, del: h.isDeleted, id: h.id} +} + +export interface CloudSyncWorkspace extends CachedWorkspaceProvider { +} + +export function createCloudSyncWorkspace(cloud: WorkspaceProvider, cloudLocal: WorkspaceProvider): CloudSyncWorkspace { + const cloudCache = createCachedWorkspace(cloud); + const localCache = createCachedWorkspace(cloudLocal); + + const firstCachePull = Promise.all([cloudCache.firstSync(), localCache.firstSync()]) + const pendingCacheSync = () => Promise.all([cloudCache.pendingSync(), localCache.pendingSync()]) + const getHeadersHash = () => localCache.getHeadersHash() + const needsSync = () => cloudCache.getHeadersHash() !== localCache.getHeadersHash() + + // TODO @darzu: we could frequently check the last mod times to see if a sync is in order? + + // TODO @darzu: multi-tab safety for cloudLocal + // TODO @darzu: when two workspaces disagree on last mod time, we should sync? + + const firstSync = synchronizeInternal({pollStorage: true});; + let pendingSync = firstSync; + + async function synchronize(reason: SynchronizationReason): Promise { + if (pendingSync.isPending()) + return pendingSync + pendingSync = synchronizeInternal(reason) + return pendingSync + } + + function resolveConflict(a: Header, b: Header, strat: ConflictStrategy): Header { + // TODO @darzu: involve the user + // TODO @darzu: consider lineage + // TODO @darzu: consider diff + if (strat === ConflictStrategy.LastWriteWins) + return a.modificationTime > b.modificationTime ? a : b; + U.unreachable(strat); + } + async function transfer(fromH: Header, toH: Header, fromWs: WorkspaceProvider, toWs: WorkspaceProvider): Promise
{ + // TODO @darzu: worklist this? + // TODO @darzu: track pending saves + + // TODO @darzu: dbg + console.log(`transfer ${fromH.id}(${fromH.modificationTime},${fromH._rev}) => (${toH?.modificationTime},${toH?._rev})`) + + const newPrj = await fromWs.getAsync(fromH) + + // we need the old project if any exists so we know what prevVersion to pass + // TODO @darzu: keep project text version in the header + let prevVer = undefined + if (toH) { + const oldPrj = await toWs.getAsync(toH) + if (oldPrj) + prevVer = oldPrj.version + } + + // create a new header + // TODO @darzu: how do we do this in an abstraction preserving way? + const newH = {...fromH, _rev: toH?._rev ?? undefined} + delete (newH as any)["_id"] + + const newVer = await toWs.setAsync(newH, prevVer, newPrj.text) + + return newH; + } + async function synchronizeInternal(reason: SynchronizationReason): Promise { + console.log("cloudsyncworkspace: synchronizeInternal") + console.dir(reason) + + // TODO @darzu: review these cases: + // case 1: everything should be synced up, we're just polling the server + // expectedLastModTime = 0 + // we definitely want cloudCache to synchronize + // we don't need localCache to synchronize + // we want to wait on localCache.pendingSync + // case 2: we suspect localCache is out of date (from other tab changes) + // expectedLastModTime = someValueFromOtherTab + // we don't need cloudCache to synchronize + // we definitely want localCache to synchronize + // we want to wait on cloudCache.pendingSync + // case 3: createCloudSyncWorkspace is first called (first sync) + // expectedLastModTime = 0 + // we don't need cloudCache to synchronize + // we don't need localCache to synchronize + // we want to wait on localCache.pendingSync + // TODO @darzu: need to think through and compare how this would work with git + + // wait for each side to sync + await Promise.all([cloudCache.synchronize(reason), localCache.synchronize(reason)]) + + // TODO @darzu: re-generalize? + const left = cloudCache; + const right = localCache; + const strat = { + conflict: ConflictStrategy.LastWriteWins, + disjointSets: DisjointSetsStrategy.Synchronize + } + + const lHdrsList = left.listSync() + const rHdrsList = right.listSync() + + const lHdrs = U.toDictionary(lHdrsList, h => h.id) + const rHdrs = U.toDictionary(rHdrsList, h => h.id) + const allHdrsList = [...lHdrsList, ...rHdrsList] + + // determine left-only, overlap, and right-only sets + const overlap = allHdrsList.reduce( + (p: {[key: string]: Header}, n) => lHdrs[n.id] && rHdrs[n.id] ? (p[n.id] = n) && p : p, {}) + const lOnly = allHdrsList.reduce( + (p: {[key: string]: Header}, n) => lHdrs[n.id] && !rHdrs[n.id] ? (p[n.id] = n) && p : p, {}) + const rOnly = allHdrsList.reduce( + (p: {[key: string]: Header}, n) => !lHdrs[n.id] && rHdrs[n.id] ? (p[n.id] = n) && p : p, {}) + + // resolve conflicts + const conflictResults = U.values(overlap).map(h => resolveConflict(lHdrs[h.id], rHdrs[h.id], strat.conflict)) + + // update left + const lChanges = conflictResults.reduce((p: Header[], n) => hasChanged(n, lHdrs[n.id]) ? [...p, n] : p, []) + let lToPush = lChanges + if (strat.disjointSets === DisjointSetsStrategy.Synchronize) + lToPush = [...lToPush, ...U.values(rOnly)] + const lPushPromises = lToPush.map(h => transfer(rHdrs[h.id], lHdrs[h.id], right, left)) + + // update right + const rChanges = conflictResults.reduce((p: Header[], n) => hasChanged(n, rHdrs[n.id]) ? [...p, n] : p, []) + let rToPush = rChanges + if (strat.disjointSets === DisjointSetsStrategy.Synchronize) + rToPush = [...rToPush, ...U.values(lOnly)] + const rPushPromises = rToPush.map(h => transfer(lHdrs[h.id], rHdrs[h.id], left, right)) + + // wait + // TODO @darzu: worklist? batching? throttling? incremental? + const allPushes = await Promise.all([...lPushPromises, ...rPushPromises]) + const changes = U.unique(allPushes, h => h.id) + + // TODO @darzu: what about mod time changes? + return changes + } + + async function listAsync(): Promise { + await pendingSync + return localCache.listAsync() + } + async function getAsync(h: Header): Promise { + await pendingSync + return localCache.getAsync(h) + } + async function setAsync(h: Header, prevVer: any, text?: ScriptText): Promise { + await pendingSync + + // TODO @darzu: cannot pass prevVer to both of these.. they have different meanings on the different platforms + // TODO @darzu: use a queue to sync to backend and make sure this promise is part of the pending sync set + async function cloudSet() { + const prevCloudProj = await cloudCache.getAsync(h) + const newCloudVer = await cloudCache.setAsync(h, prevCloudProj?.version, text) + } + cloudSet() + + // TODO @darzu: also what to do with the return value ? + return await localCache.setAsync(h, prevVer, text) + } + async function deleteAsync(h: Header, prevVer: any): Promise { + await pendingSync + // TODO @darzu: use a queue to sync to backend + const cloudPromise = cloudCache.deleteAsync(h, prevVer) + await localCache.deleteAsync(h, prevVer) + } + async function resetAsync() { + await pendingSync + // TODO @darzu: do we really want to reset the cloud ever? + // await Promise.all([cloudCache.resetAsync(), localCache.resetAsync()]) + return Promise.resolve(); + } + + + // TODO @darzu: debug logging + firstSync.then(c => { + console.log("cloudSyncWS first update:") + console.dir(localCache.listSync().map(toDbg)) + }) + + const provider: CloudSyncWorkspace = { + // cache + getHeadersHash, + synchronize, + pendingSync: () => pendingSync, + firstSync: () => firstSync, + listSync: () => localCache.listSync(), + tryGetSync: h => localCache.tryGetSync(h), + getHeaderSync: id => localCache.getHeaderSync(id), + // workspace + getAsync, + setAsync, + deleteAsync, + listAsync, + resetAsync, + } + + return provider; +} + +// TODO @darzu: below is the code for multi-tab synchronizing + +// // this key is the max modificationTime value of the allHeaders +// // it is used to track if allHeaders need to be refreshed (syncAsync) +// let sessionID: string = ""; +// export function isHeadersSessionOutdated() { +// return pxt.storage.getLocal('workspacesessionid') != sessionID; +// } +// function maybeSyncHeadersAsync(): Promise { +// if (isHeadersSessionOutdated()) // another tab took control +// return syncAsync().then(() => { }) +// return Promise.resolve(); +// } +// function refreshHeadersSession() { +// // TODO @darzu: carefully handle this +// // use # of scripts + time of last mod as key +// sessionID = allScripts.length + ' ' + allScripts +// .map(h => h.header.modificationTime) +// .reduce((l, r) => Math.max(l, r), 0) +// .toString() +// if (isHeadersSessionOutdated()) { +// pxt.storage.setLocal('workspacesessionid', sessionID); +// pxt.debug(`workspace: refreshed headers session to ${sessionID}`); +// data.invalidate("header:*"); +// data.invalidate("text:*"); +// } +// } +// // this is an identifier for the current frame +// // in order to lock headers for editing +// const workspaceID: string = pxt.Util.guidGen(); +// export function acquireHeaderSession(h: Header) { +// if (h) +// pxt.storage.setLocal('workspaceheadersessionid:' + h.id, workspaceID); +// } +// function clearHeaderSession(h: Header) { +// if (h) +// pxt.storage.removeLocal('workspaceheadersessionid:' + h.id); +// } +// export function isHeaderSessionOutdated(h: Header): boolean { +// if (!h) return false; +// const sid = pxt.storage.getLocal('workspaceheadersessionid:' + h.id); +// return sid && sid != workspaceID; +// } +// function checkHeaderSession(h: Header): void { +// if (isHeaderSessionOutdated(h)) { +// pxt.tickEvent(`workspace.conflict.header`); +// core.errorNotification(lf("This project is already opened elsewhere.")) +// pxt.Util.assert(false, "trying to access outdated session") +// } +// } + +// TODO @darzu: from webapp: +// loadHeaderAsync(h: pxt.workspace.Header, editorState?: pxt.editor.EditorState): Promise { +// if (!h) +// return Promise.resolve() + +// const checkAsync = this.tryCheckTargetVersionAsync(h.targetVersion); +// if (checkAsync) +// return checkAsync.then(() => this.openHome()); + +// let p = Promise.resolve(); +// if (workspace.isHeadersSessionOutdated()) { // reload header before loading +// pxt.log(`sync before load`) +// p = p.then(() => workspace.syncAsync().then(() => { })) +// } +// return p.then(() => { +// workspace.acquireHeaderSession(h); +// if (!h) return Promise.resolve(); +// else return this.internalLoadHeaderAsync(h, editorState); +// }) +// } \ No newline at end of file diff --git a/webapp/src/workspaces/cloudworkspace.ts b/webapp/src/workspaces/cloudworkspace.ts new file mode 100644 index 000000000000..cbfe515f0922 --- /dev/null +++ b/webapp/src/workspaces/cloudworkspace.ts @@ -0,0 +1,40 @@ +import * as cloud from "../cloud"; + +type WorkspaceProvider = pxt.workspace.WorkspaceProvider; + +export const provider: WorkspaceProvider = { + getAsync: cloud.getAsync, + setAsync: cloud.setAsync, + deleteAsync: cloud.deleteAsync, + listAsync: cloud.listAsync, + resetAsync: cloud.resetAsync, +} + +// TODO @darzu: throttled workspace ?? + +// TODO @darzu: do we need a subscription here? +// export function init() { +// data.subscribe(userSubscriber, auth.LOGGED_IN); +// } + +// let prevWorkspaceType: string; + +// async function updateWorkspace() { +// const loggedIn = await auth.loggedIn(); +// if (loggedIn) { +// // TODO: Handling of 'prev' is pretty hacky. Need to improve it. +// let prev = workspace.switchToCloudWorkspace(); +// if (prev !== "cloud") { +// prevWorkspaceType = prev; +// } +// await workspace.syncAsync(); +// } else if (prevWorkspaceType) { +// workspace.switchToWorkspace(prevWorkspaceType); +// await workspace.syncAsync(); +// } +// } + +// const userSubscriber: data.DataSubscriber = { +// subscriptions: [], +// onDataChanged: async () => updateWorkspace() +// }; \ No newline at end of file diff --git a/webapp/src/fileworkspace.ts b/webapp/src/workspaces/fileworkspace.ts similarity index 98% rename from webapp/src/fileworkspace.ts rename to webapp/src/workspaces/fileworkspace.ts index 0230ba2fdce1..7214eac34b08 100644 --- a/webapp/src/fileworkspace.ts +++ b/webapp/src/workspaces/fileworkspace.ts @@ -1,5 +1,5 @@ -import * as core from "./core"; -import * as electron from "./electron"; +import * as core from "../core"; +import * as electron from "../electron"; import U = pxt.Util; import Cloud = pxt.Cloud; diff --git a/webapp/src/githubprovider.tsx b/webapp/src/workspaces/githubprovider.tsx similarity index 98% rename from webapp/src/githubprovider.tsx rename to webapp/src/workspaces/githubprovider.tsx index 3fd265d5dc51..f98215340563 100644 --- a/webapp/src/githubprovider.tsx +++ b/webapp/src/workspaces/githubprovider.tsx @@ -1,8 +1,8 @@ import * as React from "react"; -import * as sui from "./sui"; -import * as core from "./core"; +import * as sui from "../sui"; +import * as core from "../core"; import * as cloudsync from "./cloudsync"; -import * as dialogs from "./dialogs"; +import * as dialogs from "../dialogs"; import * as workspace from "./workspace"; export const PROVIDER_NAME = "github"; diff --git a/webapp/src/googledrive.ts b/webapp/src/workspaces/googledrive.ts similarity index 99% rename from webapp/src/googledrive.ts rename to webapp/src/workspaces/googledrive.ts index 4151a6867148..c39358b799a3 100644 --- a/webapp/src/googledrive.ts +++ b/webapp/src/workspaces/googledrive.ts @@ -1,4 +1,4 @@ -import * as core from "./core"; +import * as core from "../core"; import * as cloudsync from "./cloudsync"; import U = pxt.U diff --git a/webapp/src/idbworkspace.ts b/webapp/src/workspaces/idbworkspace.ts similarity index 92% rename from webapp/src/idbworkspace.ts rename to webapp/src/workspaces/idbworkspace.ts index 1890063fc156..74ebd9e80267 100644 --- a/webapp/src/idbworkspace.ts +++ b/webapp/src/workspaces/idbworkspace.ts @@ -21,6 +21,7 @@ const KEYPATH = "id"; // This function migrates existing projectes in pouchDb to indexDb // From browserworkspace to idbworkspace async function migrateBrowserWorkspaceAsync(): Promise { + console.log("BAD migrateBrowserWorkspaceAsync called") // TODO @darzu: this shouldn't be needed const db = await getDbAsync(); const allDbHeaders = await db.getAllAsync(HEADERS_TABLE); if (allDbHeaders.length) { @@ -28,8 +29,10 @@ async function migrateBrowserWorkspaceAsync(): Promise { return; } + const ws: WorkspaceProvider = null; // TODO @darzu: browserworkspace.provider + const copyProject = async (h: pxt.workspace.Header): Promise => { - const resp = await browserworkspace.provider.getAsync(h); + const resp = await ws.getAsync(h); // Ignore metadata of the previous script so they get re-generated for the new copy delete (resp as any)._id; @@ -38,7 +41,7 @@ async function migrateBrowserWorkspaceAsync(): Promise { await setAsync(h, undefined, resp.text); }; - const previousHeaders = await browserworkspace.provider.listAsync(); + const previousHeaders = await ws.listAsync(); await Promise.all(previousHeaders.map(h => copyProject(h))); } @@ -75,7 +78,9 @@ async function getDbAsync(): Promise { } async function listAsync(): Promise { - await migrateBrowserWorkspaceAsync(); + // TODO @darzu: + console.log("idbworkspace:listAsync") + // await migrateBrowserWorkspaceAsync(); const db = await getDbAsync(); return db.getAllAsync(HEADERS_TABLE); } diff --git a/webapp/src/iframeworkspace.ts b/webapp/src/workspaces/iframeworkspace.ts similarity index 100% rename from webapp/src/iframeworkspace.ts rename to webapp/src/workspaces/iframeworkspace.ts diff --git a/webapp/src/workspaces/jointworkspace.ts b/webapp/src/workspaces/jointworkspace.ts new file mode 100644 index 000000000000..3499cfadce1d --- /dev/null +++ b/webapp/src/workspaces/jointworkspace.ts @@ -0,0 +1,157 @@ +import { CachedWorkspaceProvider, SynchronizationReason } from "./cloudsyncworkspace"; + +type Header = pxt.workspace.Header; +type ScriptText = pxt.workspace.ScriptText; +type File = pxt.workspace.File; +type WorkspaceProvider = pxt.workspace.WorkspaceProvider; +import U = pxt.Util; + +async function unique(...listFns: (() => Promise)[]) { + const allHdrs = (await Promise.all(listFns.map(ls => ls()))) + .reduce((p, n) => [...p, ...n], []) + const seenHdrs: { [key: string]: boolean } = {} + // de-duplicate headers (prefering earlier ones) + const res = allHdrs.reduce((p, n) => { + if (seenHdrs[n.id]) + return p; + seenHdrs[n.id] = true; + return [...p, n] + }, []) + return res; +} + +// TODO @darzu: still useful? else cull +export function createJointWorkspace2(primary: WorkspaceProvider, ...others: WorkspaceProvider[]): WorkspaceProvider { + const all: WorkspaceProvider[] = [primary, ...others]; + + // TODO @darzu: debug logging + console.log(`createJointWorkspace2`); + + async function listAsync(): Promise { + return unique(...all.map(ws => ws.listAsync)) + } + async function getAsync(h: Header): Promise { + // chose the first matching one + return all.reduce(async (p: Promise, n) => await p ?? n.getAsync(h), null) + } + async function getWorkspaceForAsync(h: Header): Promise { + return await all.reduce( + async (p: Promise, n) => await p ?? n.getAsync(h).then(f => f ? n : null), null) + } + async function setAsync(h: Header, prevVer: any, text?: ScriptText): Promise { + const matchingWorkspace = await getWorkspaceForAsync(h) + const ws = matchingWorkspace ?? primary + return ws.setAsync(h, prevVer, text) + } + async function deleteAsync(h: Header, prevVer: any): Promise { + const matchingWorkspace = await getWorkspaceForAsync(h) + return matchingWorkspace?.deleteAsync(h, prevVer) + } + async function resetAsync() { + await Promise.all(all.map(ws => ws.resetAsync())) + } + + const provider: WorkspaceProvider = { + getAsync, + setAsync, + deleteAsync, + listAsync, + resetAsync, + } + return provider; +} + +// note: these won't work recursively, but as of now there's no forseen use +// case beyond 1 level. If needed, we could use a hash tree/merkle tree. +function joinHdrsHash(...hashes: string[]): string { + return hashes?.join("|") ?? "" +} +function splitHdrsHash(hash: string): string[] { + return hash?.split("|") ?? [] +} + +export function createJointWorkspace(...all: CachedWorkspaceProvider[]): CachedWorkspaceProvider { + // TODO @darzu: we're assuming they are disjoint for now + + // TODO @darzu: debug logging + console.log(`createJointWorkspace`); + + const flattenAndUniqueHdrs = (hs: Header[][]) => U.unique(hs.reduce((p, n) => [...p, ...n], []), h => h.id) + + const firstSync = async () => flattenAndUniqueHdrs(await Promise.all(all.map(w => w.firstSync()))) + const pendingSync = async () => flattenAndUniqueHdrs(await Promise.all(all.map(w => w.pendingSync()))) + // TODO @darzu: is this too expensive? + const getHeadersHash = () => joinHdrsHash(...all.map(w => w.getHeadersHash())) + + async function synchronize(reason: SynchronizationReason): Promise { + const expectedHashes = splitHdrsHash(reason.expectedHeadersHash) + const changes = await Promise.all(all.map((w, i) => w.synchronize({ + ...reason, + expectedHeadersHash: expectedHashes[i] + }))) + return flattenAndUniqueHdrs(changes) + } + function listSync(): Header[] { + // return all (assuming disjoint) + return all.map(w => w.listSync()) + .reduce((p, n) => [...p, ...n], []) + } + async function listAsync(): Promise { + await pendingSync() + // return all (assuming disjoint) + return (await Promise.all(all.map(w => w.listAsync()))) + .reduce((p, n) => [...p, ...n], []) + } + function getWorkspaceFor(h: Header): CachedWorkspaceProvider { + return all.reduce((p, n) => p || (n.getHeaderSync(h?.id) ? n : null), null) + } + async function getAsync(h: Header): Promise { + await pendingSync() + // choose the first matching one + const ws = getWorkspaceFor(h) + return ws?.getAsync(h) ?? undefined + } + function tryGetSync(h: Header): File { + // choose the first matching one + const ws = getWorkspaceFor(h) + return ws?.tryGetSync(h) ?? undefined + } + function getHeaderSync(id: string): Header { + return all.reduce((p, n) => p || n.getHeaderSync(id), null as Header) + } + async function setAsync(h: Header, prevVer: any, text?: ScriptText): Promise { + await pendingSync() + // TODO @darzu: dbg logging + console.log("joint:setAsync") + console.dir(all.map(w => w.getHeaderSync(h.id))) + const ws = getWorkspaceFor(h) ?? all[0] + return ws.setAsync(h, prevVer, text) + } + async function deleteAsync(h: Header, prevVer: any): Promise { + await pendingSync() + const ws = getWorkspaceFor(h) + return ws?.deleteAsync(h, prevVer) + } + async function resetAsync() { + await pendingSync() + await Promise.all(all.map(ws => ws.resetAsync())) + } + + const provider: CachedWorkspaceProvider = { + // cache + getHeadersHash, + synchronize, + pendingSync, + firstSync, + listSync, + tryGetSync, + getHeaderSync, + // workspace + getAsync, + setAsync, + deleteAsync, + listAsync, + resetAsync, + } + return provider; +} \ No newline at end of file diff --git a/webapp/src/memoryworkspace.ts b/webapp/src/workspaces/memoryworkspace.ts similarity index 100% rename from webapp/src/memoryworkspace.ts rename to webapp/src/workspaces/memoryworkspace.ts diff --git a/webapp/src/workspaces/memworkspace.ts b/webapp/src/workspaces/memworkspace.ts new file mode 100644 index 000000000000..bc67260da070 --- /dev/null +++ b/webapp/src/workspaces/memworkspace.ts @@ -0,0 +1,73 @@ +type Header = pxt.workspace.Header; +type ScriptText = pxt.workspace.ScriptText; +type File = pxt.workspace.File; +type Project = pxt.workspace.File; +type WorkspaceProvider = pxt.workspace.WorkspaceProvider; +type Version = pxt.workspace.Version; +type Asset = pxt.workspace.Version; + +// TODO @darzu: is this the abstraction we want? +// TODO @darzu: replace memory workspace? +export interface SyncWorkspaceProvider { + listSync(): Header[]; + getSync(h: Header): File; + setSync(h: Header, prevVersion: Version, text?: ScriptText): Version; + deleteSync?: (h: Header, prevVersion: Version) => void; + resetSync(): void; +} + +export interface MemWorkspaceProvider extends WorkspaceProvider, SyncWorkspaceProvider { } + +// TODO @darzu: de-duplicate with memoryworkspace +export function createMemWorkspace() { + // TODO @darzu: debug logging + console.log(`MemWorkspaceProvider`); + + // TODO @darzu: Project or File ?? + const projects: { [key: string]: File } = {} + + // TODO @darzu: useful? + + const syncProv: SyncWorkspaceProvider = { + listSync: (): Header[] => { + return Object.keys(projects).map(k => projects[k].header); + }, + getSync: (h: Header): File => { + return projects[h.id]; + }, + setSync: (h: Header, prevVersion: Version, text?: ScriptText): Version => { + // TODO @darzu: don't do this if text is null? that's what memoryworkspace does... but db.Table workspace doesn't? + projects[h.id] = { + header: h, + text: text, + // TODO @darzu: version??? + version: prevVersion + "*" + }; + }, + deleteSync: (h: Header, prevVersion: Version): void => { + delete projects[h.id]; + }, + resetSync: (): void => { + Object.keys(projects).forEach(k => delete projects[k]) + }, + } + const provider: MemWorkspaceProvider = { + ...syncProv, + getAsync: (h: Header): Promise => { + return Promise.resolve(syncProv.getSync(h)) + }, + setAsync: (h: Header, prevVer: any, text?: ScriptText): Promise => { + return Promise.resolve(syncProv.setSync(h, prevVer, text)) + }, + deleteAsync: (h: Header, prevVer: any): Promise => { + return Promise.resolve(syncProv.deleteSync(h, prevVer)) + }, + listAsync: (): Promise => { + return Promise.resolve(syncProv.listSync()) + }, + resetAsync: (): Promise => { + return Promise.resolve(syncProv.resetSync()) + }, + } + return provider; +} \ No newline at end of file diff --git a/webapp/src/browserworkspace.ts b/webapp/src/workspaces/oldbrowserdbworkspace.ts similarity index 99% rename from webapp/src/browserworkspace.ts rename to webapp/src/workspaces/oldbrowserdbworkspace.ts index cb5daef51cf2..d7d39645e1fd 100644 --- a/webapp/src/browserworkspace.ts +++ b/webapp/src/workspaces/oldbrowserdbworkspace.ts @@ -1,4 +1,4 @@ -import * as db from "./db"; +import * as db from "../db"; let headers: db.Table; let texts: db.Table; diff --git a/webapp/src/onedrive.ts b/webapp/src/workspaces/onedrive.ts similarity index 99% rename from webapp/src/onedrive.ts rename to webapp/src/workspaces/onedrive.ts index 49cf6e068a39..186fcb1e92cd 100644 --- a/webapp/src/onedrive.ts +++ b/webapp/src/workspaces/onedrive.ts @@ -1,6 +1,6 @@ -import * as core from "./core"; +import * as core from "../core"; import * as cloudsync from "./cloudsync"; -import * as data from "./data"; +import * as data from "../data"; const rootdir = "/me/drive/special/approot" diff --git a/webapp/src/workspace.ts b/webapp/src/workspaces/workspace.ts similarity index 65% rename from webapp/src/workspace.ts rename to webapp/src/workspaces/workspace.ts index c932a2251fe6..2f9ca8b64091 100644 --- a/webapp/src/workspace.ts +++ b/webapp/src/workspaces/workspace.ts @@ -1,21 +1,29 @@ -/// -/// -/// +/// +/// +/// -import * as db from "./db"; -import * as core from "./core"; -import * as data from "./data"; +import * as db from "../db"; +import * as core from "../core"; +import * as data from "../data"; import * as browserworkspace from "./browserworkspace" import * as fileworkspace from "./fileworkspace" import * as memoryworkspace from "./memoryworkspace" import * as iframeworkspace from "./iframeworkspace" import * as cloudsync from "./cloudsync" import * as indexedDBWorkspace from "./idbworkspace"; -import * as cloudWorkspace from "./cloudworkspace"; -import * as compiler from "./compiler" +import * as compiler from "../compiler" +import * as auth from "../auth" +import * as cloud from "../cloud" +import * as cloudWorkspace from "./cloudworkspace" +import * as oldbrowserdbworkspace from "./oldbrowserdbworkspace" // TODO @darzu: dbg + import U = pxt.Util; import Cloud = pxt.Cloud; +import { createJointWorkspace, createJointWorkspace2 } from "./jointworkspace"; +import { createBrowserDbWorkspace } from "./browserdbworkspace"; +import { createMemWorkspace, SyncWorkspaceProvider } from "./memworkspace"; +import { CachedWorkspaceProvider, createCachedWorkspace, createCloudSyncWorkspace, toDbg } from "./cloudsyncworkspace"; // Avoid importing entire crypto-js /* tslint:disable:no-submodule-imports */ @@ -25,22 +33,105 @@ type Header = pxt.workspace.Header; type ScriptText = pxt.workspace.ScriptText; type WorkspaceProvider = pxt.workspace.WorkspaceProvider; type InstallHeader = pxt.workspace.InstallHeader; +type File = pxt.workspace.File; -interface HeaderWithScript { - header: Header; - text: ScriptText; - version: pxt.workspace.Version; -} +// TODO @darzu: cloud specific: +/* +MIN BAR: +[ ] UI around sync time +[ ] keeping cloud work in the background +NICE TO HAVE: +[ ] UI conflict resolution +[ ] UI around "manual refresh" +*/ -let allScripts: HeaderWithScript[] = []; +// TODO @darzu: todo list: +// [ ] remove / fix header session methods +// [ ] refreshHeadersSession +// [ ] invalidate virtual api: + // data.invalidateHeader("header", hd); + // data.invalidateHeader("text", hd); + // data.invalidateHeader("pkg-git-status", hd); + // data.invalidate("gh-commits:*"); // invalidate commits just in case +// [x] understand commitAsync +// [ ] remove forceSaveAsync +// [x] remove allScripts +// [ ] remove headerQ +// [x] remove syncAsync +// [ ] ensure we don't regress https://github.com/microsoft/pxt/issues/7520 +// [ ] add analytics +// [ ] hueristics for detecting project loss +// [ ] handle switchToMemoryWorkspace +// [ ] soft delete, ensure we are prefering +// [ ] don't block on network +// [ ] 1st load +// [ ] ever +// [ ] 1st time migrate local -> online +// [ ] multi-user seperation +// [ ] client can't change/delete other user content +// [ ] background work: +// [ ] queueing +// [ ] updating in the queue +// [ ] thorttle, debounce +// [ ] batch +// [ ] don't sync when tab idle, or sync exp backoff +// [ ] cloud state UX +// [ ] project list +// [ ] conflict resolution dialog +// [ ] in editor +// [ ] refactor out git / github stuff +// [ ] on first save: investigate conflicting save +// [ ] on first save: investigate NULL save +// [ ] clean up code +// [ ] handle all TODO @darzu's +// [ ] renames +// [ ] synchronize -> syncAsync +// [ ] cloudsyncworkspace, +// [ ] cloudsync, +// [ ] oldbrowserdbworkspace, +// [ ] synchronizedworkspace, +// [ ] workspacebehavior +// TESTING: +// for each: +// [ ] create new prj +// [ ] delete prj +// [ ] mod prj +// [ ] reset +// do: +// [ ] online +// [ ] offline, signed in +// [ ] offline, signed out +// [ ] multi-tab +// [ ] multi-browser + + +// TODO @darzu: remove. redudant w/ implCache +// let allScripts: File[] = []; // TODO @darzu: del let headerQ = new U.PromiseQueue(); -let impl: WorkspaceProvider; -let implType: string; +let impl: CachedWorkspaceProvider; +// TODO @darzu: del +// let implCache: CachedWorkspaceProvider; +let implType: WorkspaceKind; -function lookup(id: string) { - return allScripts.find(x => x.header.id == id || x.header.path == id); +// TODO @darzu: del +function lookup(id: string): File { + if (!id) { + console.log(`! looking up null id`) // TODO @darzu: dbg + } + + // TODO @darzu: what is .path used for? + const hdr = impl.getHeaderSync(id) + const resp = impl.tryGetSync(hdr) + + if (!resp) { + console.log(`! lookup for ${id} failed!`) // TODO @darzu: dbg + } + return resp + // TODO @darzu: del + // implCache.getSync(); + // return allScripts.find(x => x.header.id == id || x.header.path == id); } export function gitsha(data: string, encoding: "utf-8" | "base64" = "utf-8") { @@ -58,50 +149,131 @@ export function copyProjectToLegacyEditor(header: Header, majorVersion: number): return browserworkspace.copyProjectToLegacyEditor(header, majorVersion); } -export function setupWorkspace(id: string) { - U.assert(!impl, "workspace set twice"); - pxt.log(`workspace: ${id}`); - implType = id ?? "browser"; - switch (id) { +export type WorkspaceKind = "browser" | "fs" | "file" | "mem" | "memory" | "iframe" | "uwp" | "idb" | "cloud"; + +function chooseWorkspace(kind: WorkspaceKind = "browser"): pxt.workspace.WorkspaceProvider { + switch (kind) { case "fs": case "file": // Local file workspace, serializes data under target/projects/ - impl = fileworkspace.provider; - break; + return fileworkspace.provider; case "mem": case "memory": - impl = memoryworkspace.provider; - break; + return memoryworkspace.provider; case "iframe": // Iframe workspace, the editor relays sync messages back and forth when hosted in an iframe - impl = iframeworkspace.provider; - break; + return iframeworkspace.provider; case "uwp": fileworkspace.setApiAsync(pxt.winrt.workspace.fileApiAsync); - impl = pxt.winrt.workspace.getProvider(fileworkspace.provider); - break; + return pxt.winrt.workspace.getProvider(fileworkspace.provider); case "idb": - impl = indexedDBWorkspace.provider; - break; + return indexedDBWorkspace.provider; case "cloud": - impl = cloudWorkspace.provider; - break; + // TODO @darzu: + console.log("CHOOSING CLOUD WORKSPACE"); + return cloudWorkspace.provider; case "browser": default: - impl = browserworkspace.provider - break; + // TODO @darzu: + console.log("chooseWorkspace browser") + return createBrowserDbWorkspace("") + // return browserworkspace.provider } } -export function switchToCloudWorkspace(): string { - U.assert(implType !== "cloud", "workspace already cloud"); - const prevType = implType; - impl = cloudWorkspace.provider; - implType = "cloud"; - return prevType; -} +export function setupWorkspace(kind: WorkspaceKind): void { + U.assert(!impl, "workspace set twice"); + pxt.log(`workspace: ${kind}`); + // TODO @darzu: + console.log(`choosing workspace: ${kind}`); + implType = kind ?? "browser"; + const localChoice = chooseWorkspace(implType); + // TODO @darzu: + if (auth.loggedInSync()) { + console.log("logged in") // TODO @darzu: dbg + // TODO @darzu: need per-user cloud-local + const cloudApis = cloudWorkspace.provider + const localCloud = createBrowserDbWorkspace("cloud-local"); // TODO @darzu: use user choice for this too? + // const localCloud = createBrowserDbWorkspace(""); // TODO @darzu: undo dbg + // TODO @darzu: + // const cachedCloud = createSynchronizedWorkspace(cloudWorkspace.provider, localCloud, { + // conflict: ConflictStrategy.LastWriteWins, + // disjointSets: DisjointSetsStrategy.Synchronize + // }); + const cloudCache = createCloudSyncWorkspace(cloudApis, localCloud); + + const localChoiceCached = createCachedWorkspace(localChoice) + + // TODO @darzu: do one-time overlap migration + // migrateOverlap(localChoiceCached, cloudCache) + + // TODO @darzu: dbg: + // const old = oldbrowserdbworkspace.provider; + // old.listAsync().then(hs => { + // console.log("OLD browser db:") + // console.dir(hs.map(h => ({id: h.id, t: h.modificationTime}))) + // }) + + const joint = createJointWorkspace(cloudCache, localChoiceCached) + impl = joint + // impl = cloudCache // TODO @darzu: undo dbg + // implCache = joint + + // TODO @darzu: improve this + const msPerMin = 1000 * 60 + const afterSync = (changed: Header[]) => { + console.log(`...changes synced! # of changes ${changed.length}`) + onExternalChangesToHeaders(changed) + } + const doSync = async () => { + console.log("synchronizing with the cloud..."); + console.log("before:") + console.dir(joint.listSync().map(toDbg)) + const changed = await joint.synchronize({pollStorage: true}) + if (changed) { + console.log("after:") + console.dir(joint.listSync().map(toDbg)) + } + afterSync(changed) + } + setInterval(doSync, 5 * msPerMin) + // TODO @darzu: + joint.firstSync().then(afterSync) + + // TODO @darzu: when synchronization causes changes + // data.invalidate("header:*"); + // data.invalidate("text:*"); + + // TODO @darzu: we are assuming these workspaces don't overlapp... + // impl = createJointWorkspace2(cachedCloud, localChoice) + } + else { + console.log("logged out") // TODO @darzu: + // TODO @darzu: review + const localWs = localChoice + impl = createCachedWorkspace(localWs) + } -export function switchToWorkspace(id: string) { + // TODO @darzu: + // if (changes.length) { + // data.invalidate("header:*"); + // data.invalidate("text:*"); + // } +} + +// TODO @darzu: needed? +// export function switchToCloudWorkspace(): string { +// U.assert(implType !== "cloud", "workspace already cloud"); +// const prevType = implType; +// // TODO @darzu: +// console.log("switchToCloudWorkspace") +// impl = cloudWorkspace.provider; +// implType = "cloud"; +// return prevType; +// } + +// TODO @darzu: needed? +export function switchToWorkspace(id: WorkspaceKind) { impl = null; setupWorkspace(id); } @@ -133,18 +305,32 @@ async function switchToMemoryWorkspace(reason: string): Promise { }); } - impl = memoryworkspace.provider; + impl = createCachedWorkspace(memoryworkspace.provider); // TODO @darzu: use our new mem workspace implType = "mem"; } export function getHeaders(withDeleted = false) { + // TODO @darzu: include other stuff... + // return await impl.listAsync(); + + // TODO @darzu: we need to consolidate this to one Workspace impl maybeSyncHeadersAsync().done(); - let r = allScripts.map(e => e.header).filter(h => (withDeleted || !h.isDeleted) && !h.isBackup) + const cloudUserId = auth.user()?.id; + // TODO @darzu: use allScripts still? + // let r = allScripts.map(e => e.header) + let r = impl.listSync() + .filter(h => + (withDeleted || !h.isDeleted) + && !h.isBackup + // TODO @darzu: + // && (!h.cloudUserId || h.cloudUserId === cloudUserId) + ) r.sort((a, b) => b.recentUse - a.recentUse) return r } export function makeBackupAsync(h: Header, text: ScriptText): Promise
{ + // TODO @darzu: check mechanism & policy backup system let h2 = U.flatClone(h) h2.id = U.guidGen() @@ -182,7 +368,9 @@ export function restoreFromBackupAsync(h: Header) { } function cleanupBackupsAsync() { - const allHeaders = allScripts.map(e => e.header); + const allHeaders = impl.listSync(); + // TODO @darzu: del + // const allHeaders = allScripts.map(e => e.header); const refMap: pxt.Map = {}; // Figure out which scripts have backups @@ -195,38 +383,108 @@ function cleanupBackupsAsync() { })); } -export function getHeader(id: string) { - maybeSyncHeadersAsync().done(); - let e = lookup(id) - if (e && !e.header.isDeleted) - return e.header +export function getHeader(id: string): Header { + maybeSyncHeadersAsync().done(); // TODO @darzu: handle properly + const hdr = impl.getHeaderSync(id) + if (hdr && !hdr.isDeleted) // TODO @darzu: ensure we're treating soft delete consistently + return hdr + console.log(`! cannot find header: ${id}`) // TODO @darzu: dbg return null } +// TODO @darzu: delete +// export function getHeader2(id: string) { +// maybeSyncHeadersAsync().done(); +// let e = lookup(id) +// if (e && !e.header.isDeleted) +// return e.header +// return null +// } +// TODO @darzu: about workspacesessionid +/* + This represents the last known version of the headers + Any mutation should update the session hash. + The individual mutation will fail if we're out of sync + After we update, we should see if it is what we expected, if not, do a sync + We should regularly poll to see if there have been external changes. + Strategic points can check to see if there have been external changes. + + if we detect an external change, do sync: +*/ // this key is the max modificationTime value of the allHeaders // it is used to track if allHeaders need to be refreshed (syncAsync) -let sessionID: string = ""; +let _allHeadersSessionHash: string = ""; +// TODO @darzu: delete this (unneeded) +// useful because it is synchronous even though we always do the same thing if it is out of date export function isHeadersSessionOutdated() { - return pxt.storage.getLocal('workspacesessionid') != sessionID; -} -function maybeSyncHeadersAsync(): Promise { - if (isHeadersSessionOutdated()) // another tab took control - return syncAsync().then(() => { }) - return Promise.resolve(); + return pxt.storage.getLocal('workspacesessionid') != _allHeadersSessionHash; +} +// careful! only set the headers session after you know we were in sync before the mutation. +async function refreshHeadersSessionAfterMutation() { + await syncAsync() + const newHash = impl.getHeadersHash() + if (_allHeadersSessionHash !== newHash) { + _allHeadersSessionHash = newHash; + pxt.storage.setLocal('workspacesessionid', newHash); + } } -function refreshHeadersSession() { - // use # of scripts + time of last mod as key - sessionID = allScripts.length + ' ' + allScripts - .map(h => h.header.modificationTime) - .reduce((l, r) => Math.max(l, r), 0) - .toString() - if (isHeadersSessionOutdated()) { - pxt.storage.setLocal('workspacesessionid', sessionID); - pxt.debug(`workspace: refreshed headers session to ${sessionID}`); - data.invalidate("header:*"); - data.invalidate("text:*"); +// TODO @darzu: delete this (unneeded) +async function maybeSyncHeadersAsync() { + return syncAsync() + // if (isHeadersSessionOutdated()) { // another tab made changes + // return syncAsync() // ensure we know what those changes were + // } + // return Promise.resolve(); +} +// TODO @darzu: delete this (unused) +async function refreshHeadersSession() { + return await syncAsync() + + // TODO @darzu: del + // // use # of scripts + time of last mod as key + // _allHeadersSessionHash = impl.getHeadersHash(); + + // if (isHeadersSessionOutdated()) { + // pxt.storage.setLocal('workspacesessionid', _allHeadersSessionHash); + // pxt.debug(`workspace: refreshed headers session to ${_allHeadersSessionHash}`); + // console.log(`workspace: refreshed headers session to ${_allHeadersSessionHash}`); // TODO @darzu: dbg + // data.invalidate("header:*"); + // data.invalidate("text:*"); + // } +} +// contract post condition: the headers session will be up to date +export async function syncAsync(): Promise { + console.log("workspace:syncAsync"); + // TODO @darzu: ... and re-acquires headers ? + // TODO @darzu: clean up naming, layering + const expectedHeadersHash = pxt.storage.getLocal('workspacesessionid') + if (expectedHeadersHash !== _allHeadersSessionHash) { + const changedHdrs = await impl.synchronize({ + expectedHeadersHash, + }) + const newHash = impl.getHeadersHash() + _allHeadersSessionHash = newHash; + pxt.storage.setLocal('workspacesessionid', newHash); + onExternalChangesToHeaders(changedHdrs); } + // TODO @darzu: handle: + // filters?: pxt.editor.ProjectFilters; + // searchBar?: boolean; + + // TODO @darzu: \/ + /* + // force reload + ex.text = undefined + ex.version = undefined + + + impl.getSyncState() + */ + return {} } + +// TODO @darzu: check the usage of these three... we need to be really disciplined and ensure this fits +// with the all headers session hash usage. // this is an identifier for the current frame // in order to lock headers for editing const workspaceID: string = pxt.Util.guidGen(); @@ -253,8 +511,11 @@ function checkHeaderSession(h: Header): void { export function initAsync() { if (!impl) { - impl = browserworkspace.provider; - implType = "browser"; + // TODO @darzu: hmmmm we should be use setupWorkspace + console.log("BAD init browser workspace") // TODO @darzu: + // TODO @darzu: + // impl = createCachedWorkspace(browserworkspace.provider); + // implType = "browser"; } return syncAsync() @@ -265,25 +526,60 @@ export function initAsync() { }) } -export function getTextAsync(id: string): Promise { - return maybeSyncHeadersAsync() - .then(() => { - let e = lookup(id) - if (!e) - return Promise.resolve(null as ScriptText) - if (e.text) - return Promise.resolve(e.text) - return headerQ.enqueue(id, () => impl.getAsync(e.header) - .then(resp => { - if (!e.text) { - // otherwise we were beaten to it - e.text = fixupFileNames(resp.text); - } - e.version = resp.version; - return e.text - })) - }) -} +export async function getTextAsync(id: string): Promise { + await maybeSyncHeadersAsync(); + const hdr = impl.getHeaderSync(id); + if (!hdr) { + console.log(`! Lookup failed for ${id}`); // TODO @darzu: dbg + return null + } + const proj = await impl.getAsync(hdr) + if (!proj) { + // TODO @darzu: this is a bad scenario. we should probably purge the header + console.log(`!!! FOUND HEADER BUT NOT PROJECT TEXT FOR: ${id}`); // TODO @darzu: dbg + console.dir(hdr) + return null + } + return proj.text + // TODO @darzu: incorperate: + // return + // .then(() => { + // let e = lookup(id) + // if (!e) + // return Promise.resolve(null as ScriptText) + // if (e.text) + // return Promise.resolve(e.text) + // return headerQ.enqueue(id, () => impl.getAsync(e.header) + // .then(resp => { + // if (!e.text) { + // // otherwise we were beaten to it + // e.text = fixupFileNames(resp.text); + // } + // e.version = resp.version; + // return e.text + // })) + // }) +} +// TODO @darzu: delete +// export function getTextAsync2(id: string): Promise { +// return maybeSyncHeadersAsync() +// .then(() => { +// let e = lookup(id) +// if (!e) +// return Promise.resolve(null as ScriptText) +// if (e.text) +// return Promise.resolve(e.text) +// return headerQ.enqueue(id, () => impl.getAsync(e.header) +// .then(resp => { +// if (!e.text) { +// // otherwise we were beaten to it +// e.text = fixupFileNames(resp.text); +// } +// e.version = resp.version; +// return e.text +// })) +// }) +// } export interface ScriptMeta { description: string; @@ -335,7 +631,8 @@ export function anonymousPublishAsync(h: Header, text: ScriptText, meta: ScriptM }) } -function fixupVersionAsync(e: HeaderWithScript) { +function fixupVersionAsync(e: File) { + // TODO @darzu: need to handle one-off tasks like this if (e.version !== undefined) return Promise.resolve() return impl.getAsync(e.header) @@ -345,20 +642,140 @@ function fixupVersionAsync(e: HeaderWithScript) { } export function forceSaveAsync(h: Header, text?: ScriptText, isCloud?: boolean): Promise { - clearHeaderSession(h); + clearHeaderSession(h); // TODO @darzu: why do we conservatively call clearHeaderSession everywhere? return saveAsync(h, text, isCloud); } -export function saveAsync(h: Header, text?: ScriptText, isCloud?: boolean): Promise { +// TODO @darzu: for debugging +function computeDiff(a: {header: Header, text: ScriptText}, b: {header: Header, text: ScriptText}): string { + const indent = (s: string) => '\t' + s + let res = '' + + if (!a.header || !a.text || !b.header || !b.text) { + res += `FULL: a.header:${!!a.header}, a.text:${!!a.text}, b.header:${!!b.header}, b.text:${!!b.text}` + return res; + } + + // headers + type HeaderK = keyof Header + const hdrKeys = U.unique([...Object.keys(a.header), ...Object.keys(b.header)], s => s) as HeaderK[] + const hasObjChanged = (a: any, b: any) => JSON.stringify(a) !== JSON.stringify(b) + const hasHdrChanged = (k: HeaderK) => hasObjChanged(a.header[k], b.header[k]) + const hdrChanges = hdrKeys.filter(hasHdrChanged) + const hdrDels = hdrChanges.filter(k => (k in a.header) && !(k in b.header)) + const hdrAdds = hdrChanges.filter(k => !(k in a.header) && (k in b.header)) + const hdrMods = hdrChanges.filter(k => (k in a.header) && (k in b.header)) + + res += `HEADER (+${hdrAdds.length}-${hdrDels.length}~${hdrMods.length})` + res += '\n' + const hdrDelStrs = hdrDels.map(k => `DEL ${k}`) + const hdrAddStrs = hdrAdds.map(k => `ADD ${k}: ${JSON.stringify(b.header[k])}`) + const hdrModStrs = hdrMods.map(k => `MOD ${k}: ${JSON.stringify(a.header[k])} => ${JSON.stringify(b.header[k])}`) + res += [...hdrDelStrs, ...hdrAddStrs, ...hdrModStrs].map(indent).join("\n") + res += '\n' + + // files + const filenames = U.unique([...Object.keys(a.text ?? {}), ...Object.keys(b.text ?? {})], s => s) + const hasFileChanged = (filename: string) => a.text[filename] !== b.text[filename] + const fileChanges = filenames.filter(hasFileChanged) + const fileDels = fileChanges.filter(k => (k in a.text) && !(k in b.text)) + const fileAdds = fileChanges.filter(k => !(k in a.text) && (k in b.text)) + const fileMods = fileChanges.filter(k => (k in a.text) && (k in b.text)) + + res += `FILES (+${fileAdds.length}-${fileDels.length}~${fileMods.length})` + res += '\n' + const fileDelStrs = fileDels.map(k => `DEL ${k}`) + const fileAddStrs = fileAdds.map(k => `ADD ${k}`) + const fileModStrs = fileMods.map(k => `MOD ${k}: ${a.text[k].length} => ${b.text[k].length}`) + res += [...fileDelStrs, ...fileAddStrs, ...fileModStrs].map(indent).join("\n") + res += '\n' + + return res; +} + +export async function saveAsync(header: Header, text?: ScriptText, isCloud?: boolean): Promise { + console.log(`workspace:saveAsync ${header.id}`) + if (!text) { + console.log("BAD blank save!") + // debugger; // TODO @darzu: dbg + // TODO @darzu: just return. that's what old browser workspace and saveAsync2 do in combo + } + + // TODO @darzu: port over from saveAsync2 + let newProj: File; + if (text) { + // header & text insert/update + let prevProj = await impl.getAsync(header) // TODO @darzu: dbg + if (prevProj) { + // update + newProj = { + ...prevProj, + header, + text + } + } else { + // new project + newProj = { + header, + text, + version: null, + } + console.log(`first save: ${header.id}`) // TODO @darzu: dbg + } + + // TODO @darzu: dbg + if (prevProj) { + // TODO @darzu: dbg: + const diff = computeDiff(prevProj, { + header, + text, + }) + console.log(`changes to ${header.id}:`) + console.log(diff) + } + } else { + // header only update + newProj = { + header, + text: null, + version: null, + } + } + + try { + const res = await impl.setAsync(newProj.header, newProj.version, newProj.text); + if (!res) { + // conflict occured + console.log(`conflict occured for ${header.id} at ${newProj.version}`) // TODO @darzu: dbg + // TODO @darzu: what to do? probably nothing + } + } catch (e) { + // Write failed; use in memory db. + // TODO @darzu: POLICY + console.log("switchToMemoryWorkspace (1)") // TODO @darzu: + console.dir(e) + // await switchToMemoryWorkspace("write failed"); + // await impl.setAsync(header, prj.version, text); + } + + await refreshHeadersSessionAfterMutation(); + + return; +} + +export function saveAsync2(h: Header, text?: ScriptText, isCloud?: boolean): Promise { + // TODO @darzu: rebuild this pxt.debug(`workspace: save ${h.id}`) if (h.isDeleted) clearHeaderSession(h); - checkHeaderSession(h); + checkHeaderSession(h); // TODO @darzu: what is header session... U.assert(h.target == pxt.appTarget.id); - if (h.temporary) + if (h.temporary) { + // TODO @darzu: sigh. what is "temporary" mean return Promise.resolve() + } let e = lookup(h.id) //U.assert(e.header === h) @@ -371,7 +788,7 @@ export function saveAsync(h: Header, text?: ScriptText, isCloud?: boolean): Prom e.text = text if (!isCloud) { h.pubCurrent = false - h.blobCurrent = false + h.blobCurrent_ = false h.modificationTime = U.nowSeconds(); h.targetVersion = h.targetVersion || "0.0.0"; } @@ -380,24 +797,28 @@ export function saveAsync(h: Header, text?: ScriptText, isCloud?: boolean): Prom } // perma-delete - if (h.isDeleted && h.blobVersion == "DELETED") { - let idx = allScripts.indexOf(e) - U.assert(idx >= 0) - allScripts.splice(idx, 1) + if (h.isDeleted && h.blobVersion_ == "DELETED") { + // TODO @darzu: "isDelete" is a command flag????? argh.. + // TODO @darzu: del: + // let idx = allScripts.indexOf(e) + // U.assert(idx >= 0) + // allScripts.splice(idx, 1) return headerQ.enqueue(h.id, () => fixupVersionAsync(e).then(() => impl.deleteAsync ? impl.deleteAsync(h, e.version) : impl.setAsync(h, e.version, {}))) - .finally(() => refreshHeadersSession()) + .finally(() => refreshHeadersSessionAfterMutation()) } // check if we have dynamic boards, store board info for home page rendering if (text && pxt.appTarget.simulator && pxt.appTarget.simulator.dynamicBoardDefinition) { + // TODO @darzu: what does this mean policy-wise... const pxtjson = pxt.Package.parseAndValidConfig(text[pxt.CONFIG_NAME]); if (pxtjson && pxtjson.dependencies) h.board = Object.keys(pxtjson.dependencies) .filter(p => !!pxt.bundledSvg(p))[0]; } + // TODO @darzu: what is this "headerQ" and why does it exist... return headerQ.enqueue(h.id, async () => { await fixupVersionAsync(e); let ver: any; @@ -408,6 +829,8 @@ export function saveAsync(h: Header, text?: ScriptText, isCloud?: boolean): Prom ver = await impl.setAsync(h, e.version, toWrite); } catch (e) { // Write failed; use in memory db. + // TODO @darzu: POLICY + console.log("switchToMemoryWorkspace (1)") // TODO @darzu: await switchToMemoryWorkspace("write failed"); ver = await impl.setAsync(h, e.version, toWrite); } @@ -418,17 +841,19 @@ export function saveAsync(h: Header, text?: ScriptText, isCloud?: boolean): Prom if ((text && !isCloud) || h.isDeleted) { h.pubCurrent = false; - h.blobCurrent = false; + h.blobCurrent_ = false; h.saveId = null; + // TODO @darzu: we shouldn't need these invalidates; double check data.invalidate("text:" + h.id); data.invalidate("pkg-git-status:" + h.id); } - refreshHeadersSession(); + refreshHeadersSessionAfterMutation(); }); } function computePath(h: Header) { + // TODO @darzu: what's the deal with this path? let path = h.name.replace(/[^a-zA-Z0-9]+/g, " ").trim().replace(/ /g, "-") if (!path) path = "Untitled"; // do not translate @@ -443,17 +868,22 @@ function computePath(h: Header) { } export function importAsync(h: Header, text: ScriptText, isCloud = false) { + // TODO @darzu: why does import bypass workspaces or does it? + console.log(`importAsync: ${h.id}`); // TODO @darzu: dbg h.path = computePath(h) - const e: HeaderWithScript = { + const e: File = { header: h, text: text, version: null } - allScripts.push(e) + // TODO @darzu: del + // allScripts.push(e) return forceSaveAsync(h, text, isCloud) } export function installAsync(h0: InstallHeader, text: ScriptText) { + console.log(`workspace:installAsync ${h0.pubId}`) // TODO @darzu: dbg + // TODO @darzu: why do we "install" here? how does that relate to "import"? This is 5 years old... U.assert(h0.target == pxt.appTarget.id); const h =
h0 @@ -505,7 +935,7 @@ export function duplicateAsync(h: Header, text: ScriptText, newName?: string): P export function createDuplicateName(h: Header) { let reducedName = h.name.indexOf("#") > -1 ? h.name.substring(0, h.name.lastIndexOf('#')).trim() : h.name; - let names = U.toDictionary(allScripts.filter(e => !e.header.isDeleted), e => e.header.name) + let names = U.toDictionary(impl.listSync().filter(h => !h.isDeleted), h => h.name) let n = 2 while (names.hasOwnProperty(reducedName + " #" + n)) n++ @@ -531,6 +961,7 @@ export function fixupFileNames(txt: ScriptText) { } +// TODO @darzu: do we need this raw table? might not have a browser db even const scriptDlQ = new U.PromiseQueue(); const scripts = new db.Table("script"); // cache for published scripts export async function getPublishedScriptAsync(id: string) { @@ -571,8 +1002,9 @@ export async function hasPullAsync(hd: Header) { } export async function pullAsync(hd: Header, checkOnly = false) { + console.log("pullAsync") // TODO @darzu: dbg let files = await getTextAsync(hd.id) - await recomputeHeaderFlagsAsync(hd, files) + await recomputeHeaderGitFlagsAsync(hd, files) let gitjsontext = files[GIT_JSON] if (!gitjsontext) return PullStatus.NoSourceControl @@ -636,6 +1068,7 @@ export async function hasMergeConflictMarkersAsync(hd: Header): Promise } export async function prAsync(hd: Header, commitId: string, msg: string) { + // TODO @darzu: this gh stuff should be moved elsewhere probably.. let parsed = pxt.github.parseRepoId(hd.githubId) // merge conflict - create a Pull Request const branchName = await pxt.github.getNewBranchNameAsync(parsed.fullName, "merge-") @@ -659,6 +1092,7 @@ export function bumpedVersion(cfg: pxt.PackageConfig) { } export async function bumpAsync(hd: Header, newVer = "") { + console.log("bumpAsync") // TODO @darzu: dbg checkHeaderSession(hd); let files = await getTextAsync(hd.id) @@ -689,6 +1123,7 @@ const BLOCKSDIFF_PREVIEW_PATH = ".github/makecode/blocksdiff.png"; const BINARY_JS_PATH = "assets/js/binary.js"; const VERSION_TXT_PATH = "assets/version.txt"; export async function commitAsync(hd: Header, options: CommitOptions = {}) { + // TODO @darzu: learn how this works await cloudsync.ensureGitHubTokenAsync(); let files = await getTextAsync(hd.id) @@ -1061,9 +1496,12 @@ export async function exportToGithubAsync(hd: Header, repoid: string) { // to be called after loading header in a editor -export async function recomputeHeaderFlagsAsync(h: Header, files: ScriptText) { +export async function recomputeHeaderGitFlagsAsync(h: Header, files: ScriptText) { checkHeaderSession(h); + // TODO @darzu: dbg + console.log("recomputeHeaderFlagsAsync") + h.githubCurrent = false const gitjson: GitJson = JSON.parse(files[GIT_JSON] || "{}") @@ -1187,6 +1625,10 @@ export function prepareConfigForGithub(content: string, createRelease?: boolean) } export async function initializeGithubRepoAsync(hd: Header, repoid: string, forceTemplateFiles: boolean, binaryJs: boolean) { + // TODO @darzu: dbg + console.log("initializeGithubRepoAsync") + // TODO @darzu: understand this function + await cloudsync.ensureGitHubTokenAsync(); let parsed = pxt.github.parseRepoId(repoid) @@ -1332,6 +1774,7 @@ export function downloadFilesByIdAsync(id: string): Promise> { } export function installByIdAsync(id: string) { + // TODO @darzu: what is install? return Cloud.privateGetAsync(id, /* forceLiveEndpoint */ true) .then((scr: Cloud.JsonScript) => getPublishedScriptAsync(scr.id) @@ -1347,50 +1790,68 @@ export function installByIdAsync(id: string) { }, files))) } -export function saveToCloudAsync(h: Header) { +// TODO @darzu: no one should call this +export async function saveToCloudAsync(h: Header) { checkHeaderSession(h); - return cloudsync.saveToCloudAsync(h) + if (!await auth.loggedIn()) + return; + // TODO @darzu: bypass cloudsync ? + // TODO @darzu: maybe rely on "syncAsync" instead? + const text = await getTextAsync(h.id); + // TODO @darzu: debug logging + console.log("saveToCloudAsync") + return cloud.setAsync(h, h.cloudVersion, text); + + // return cloudsync.saveToCloudAsync(h) +} + +// called when external changes happen to our headers (e.g. multi-tab +// scenarios, cloud sync, etc.) +function onExternalChangesToHeaders(newHdrs: Header[]) { + newHdrs.forEach(hd => { + data.invalidateHeader("header", hd); + data.invalidateHeader("text", hd); + data.invalidateHeader("pkg-git-status", hd); + }) + if (newHdrs.length) { + // TODO @darzu: can we make this more fine grain? + data.invalidate("gh-commits:*"); // invalidate commits just in case + console.log(`onExternalHeaderChanges:`) + console.dir(newHdrs.map(toDbg)) // TODO @darzu: dbg + } } -export function resetCloudAsync(): Promise { - // always sync local scripts before resetting - // remove all cloudsync or github repositories - return syncAsync().catch(e => { }) - .then(() => cloudsync.resetAsync()) - .then(() => Promise.all(allScripts.map(e => e.header).filter(h => h.cloudSync || h.githubId).map(h => { - // Remove cloud sync'ed project - h.isDeleted = true; - h.blobVersion = "DELETED"; - return forceSaveAsync(h, null, true); - }))) - .then(() => syncAsync()) - .then(() => { }); -} // this promise is set while a sync is in progress // cleared when sync is done. let syncAsyncPromise: Promise; -export function syncAsync(): Promise { +export function syncAsync2(): Promise { + // TODO @darzu: this function shouldn't be needed ideally pxt.debug("workspace: sync") if (syncAsyncPromise) return syncAsyncPromise; return syncAsyncPromise = impl.listAsync() .catch((e) => { // There might be a problem with the native databases. Switch to memory for this session so the user can at // least use the editor. + // TODO @darzu: POLICY + console.log("switchToMemoryWorkspace (2)") // TODO @darzu: + console.dir(e) return switchToMemoryWorkspace("sync failed") .then(() => impl.listAsync()); }) .then(headers => { - const existing = U.toDictionary(allScripts || [], h => h.header.id) // this is an in-place update the header instances - allScripts = headers.map(hd => { - let ex = existing[hd.id] + // TODO @darzu: del "let" + let allScripts = headers.map(hd => { + let ex = impl.tryGetSync(hd) if (ex) { if (JSON.stringify(ex.header) !== JSON.stringify(hd)) { U.jsonCopyFrom(ex.header, hd) // force reload ex.text = undefined ex.version = undefined + // TODO @darzu: handle data API subscriptions on header changed + console.log(`INVALIDATING header ${hd.id}`) // TODO @darzu: data.invalidateHeader("header", hd); data.invalidateHeader("text", hd); data.invalidateHeader("pkg-git-status", hd); @@ -1408,7 +1869,7 @@ export function syncAsync(): Promise { cloudsync.syncAsync().done() // sync in background }) .then(() => { - refreshHeadersSession(); + // TODO @darzu: what does refreshHeadersSession do? return impl.getSyncState ? impl.getSyncState() : null }) .finally(() => { @@ -1416,23 +1877,32 @@ export function syncAsync(): Promise { }); } -export function resetAsync() { - allScripts = [] - return impl.resetAsync() - .then(cloudsync.resetAsync) - .then(db.destroyAsync) - .then(pxt.BrowserUtils.clearTranslationDbAsync) - .then(pxt.BrowserUtils.clearTutorialInfoDbAsync) - .then(compiler.clearApiInfoDbAsync) - .then(() => { - pxt.storage.clearLocal(); - data.clearCache(); - // keep local token (localhost and electron) on reset - if (Cloud.localToken) - pxt.storage.setLocal("local_token", Cloud.localToken); - }) - .then(() => syncAsync()) // sync again to notify other tabs - .then(() => { }); +export async function resetAsync() { + // TODO @darzu: this should just pass through to workspace impl + console.log("resetAsync (1)") // TODO @darzu: + // TODO @darzu: del + // allScripts = [] + await impl.resetAsync(); + console.log("resetAsync (2)") // TODO @darzu: + await cloudsync.resetAsync(); + console.log("resetAsync (3)") // TODO @darzu: + await db.destroyAsync(); + console.log("resetAsync (4)") // TODO @darzu: + await pxt.BrowserUtils.clearTranslationDbAsync(); + console.log("resetAsync (5)") // TODO @darzu: + await pxt.BrowserUtils.clearTutorialInfoDbAsync(); + console.log("resetAsync (6)") // TODO @darzu: + await compiler.clearApiInfoDbAsync(); + console.log("resetAsync (7)") // TODO @darzu: + pxt.storage.clearLocal(); + console.log("resetAsync (8)") // TODO @darzu: + data.clearCache(); + console.log("resetAsync (9)") // TODO @darzu: + // keep local token (localhost and electron) on reset + if (Cloud.localToken) + pxt.storage.setLocal("local_token", Cloud.localToken); + await syncAsync() // sync again to notify other tab; + console.log("resetAsync (10)") // TODO @darzu: } export function loadedAsync() { @@ -1455,7 +1925,10 @@ export function listAssetsAsync(id: string): Promise { } export function isBrowserWorkspace() { - return impl === browserworkspace.provider; + // TODO @darzu: trace all uses. this shouldn't be needed + console.log("workspace.ts:isBrowserWorkspace") + return false + // return impl === browserworkspace.provider; } export function fireEvent(ev: pxt.editor.events.Event) { @@ -1486,6 +1959,7 @@ data.mountVirtualApi("headers", { p = data.stripProtocol(p) const headers = getHeaders() if (!p) return Promise.resolve(headers) + console.log(`data SEARCH headers:${p}`) // TODO @darzu: return compiler.projectSearchAsync({ term: p, headers }) .then((searchResults: pxtc.service.ProjectSearchInfo[]) => searchResults) .then(searchResults => { @@ -1506,6 +1980,7 @@ data.mountVirtualApi("headers", { data.mountVirtualApi("text", { getAsync: p => { const m = /^[\w\-]+:([^\/]+)(\/(.*))?/.exec(p) + // TODO @darzu: thin layer over workspace impl? return getTextAsync(m[1]) .then(files => { if (m[3]) diff --git a/webapp/src/workspaces/workspacebehavior.ts b/webapp/src/workspaces/workspacebehavior.ts new file mode 100644 index 000000000000..4789812e9bf5 --- /dev/null +++ b/webapp/src/workspaces/workspacebehavior.ts @@ -0,0 +1,147 @@ +import { createMemWorkspace, SyncWorkspaceProvider } from "./memworkspace"; +import U = pxt.Util; + +type WorkspaceProvider = pxt.workspace.WorkspaceProvider; +type Header = pxt.workspace.Header; +type Version = pxt.workspace.Version; + +// TODO @darzu: what I need from a header: modificationTime, isDeleted +// TODO @darzu: example: +const exampleRealHeader: Header & {_id: string, _rev: String} = { + "name": "c2", + "meta": {}, + "editor": "blocksprj", + "pubId": "", + "pubCurrent": false, + "target": "arcade", + "targetVersion": "1.3.17", + "cloudUserId": "3341c114-06d5-4ca5-9c2b-b9bb4fb13e81", + "id": "3a30f274-9612-4184-d9ad-e14c99cf81e7", + "recentUse": 1607395785, + "modificationTime": 1607395785, + "path": "c2", + "blobCurrent_": false, + "saveId": null, + "githubCurrent": false, + "cloudCurrent": true, + "cloudVersion": "\"4400a5b3-0000-0100-0000-5fcee9bd0000\"", + "_id": "header--3a30f274-9612-4184-d9ad-e14c99cf81e7", + "_rev": "12-b259964d5d245a44f7141b7c5c41ca23", // TODO @darzu: gotta figure out _rev and _id ... + // TODO @darzu: these are missing in the real header!! + isDeleted: false, + blobId_: null, + blobVersion_: null, +}; + +export enum ConflictStrategy { + LastWriteWins +} +export enum DisjointSetsStrategy { + Synchronize, + DontSynchronize +} + +export interface Strategy { + conflict: ConflictStrategy, + disjointSets: DisjointSetsStrategy +} + +function resolveConflict(a: Header, b: Header, strat: ConflictStrategy): Header { + if (strat === ConflictStrategy.LastWriteWins) + return a.modificationTime > b.modificationTime ? a : b; + U.unreachable(strat); +} + +function hasChanged(a: Header, b: Header): boolean { + // TODO @darzu: use version uuid instead? + return a.modificationTime !== b.modificationTime +} + +async function transfer(h: Header, fromWs: WorkspaceProvider, toWs: WorkspaceProvider): Promise
{ + const fromPrj = await fromWs.getAsync(h) + const prevVersion: Version = null // TODO @darzu: what do we do with this version thing... + const toRes: Version = await toWs.setAsync(h, prevVersion, fromPrj.text) + return h; +} + +export interface SyncResult { + changed: Header[], + left: Header[], + right: Header[], +} + +// TODO @darzu: this has been moved into cloudsync workspace.. not sure it's still needed here +export async function synchronize(left: WorkspaceProvider, right: WorkspaceProvider, strat: Strategy): Promise { + // TODO @darzu: add "on changes identified" handler so we can show in-progress syncing + + // TODO @darzu: thoughts & notes + // idea: never delete, only say "isDeleted" is true; can optimize away later + /* + sync scenarios: + cloud & cloud cache (last write wins; + any workspace & memory workspace (memory leads) + + synchronization strategies: + local & remote cloud + local wins? cloud wins? + last write wins? UTC timestamp? + primary & secondary + primary always truth ? + */ + + const lHdrsList = await left.listAsync() + const rHdrsList = await right.listAsync() + const lHdrs = U.toDictionary(lHdrsList, h => h.id) + const rHdrs = U.toDictionary(rHdrsList, h => h.id) + const allHdrsList = [...lHdrsList, ...rHdrsList] + + // determine left-only, overlap, and right-only sets + const overlap = allHdrsList.reduce( + (p: {[key: string]: Header}, n) => lHdrs[n.id] && rHdrs[n.id] ? (p[n.id] = n) && p : p, {}) + const lOnly = allHdrsList.reduce( + (p: {[key: string]: Header}, n) => lHdrs[n.id] && !rHdrs[n.id] ? (p[n.id] = n) && p : p, {}) + const rOnly = allHdrsList.reduce( + (p: {[key: string]: Header}, n) => !lHdrs[n.id] && rHdrs[n.id] ? (p[n.id] = n) && p : p, {}) + + // resolve conflicts + const conflictResults = U.values(overlap).map(h => resolveConflict(lHdrs[h.id], rHdrs[h.id], strat.conflict)) + + // update left + const lChanges = conflictResults.reduce((p: Header[], n) => hasChanged(n, lHdrs[n.id]) ? [...p, n] : p, []) + let lToPush = lChanges + if (strat.disjointSets === DisjointSetsStrategy.Synchronize) + lToPush = [...lToPush, ...U.values(rOnly)] + const lPushPromises = lToPush.map(h => transfer(h, right, left)) + + // update right + const rChanges = conflictResults.reduce((p: Header[], n) => hasChanged(n, rHdrs[n.id]) ? [...p, n] : p, []) + let rToPush = rChanges + if (strat.disjointSets === DisjointSetsStrategy.Synchronize) + rToPush = [...rToPush, ...U.values(lOnly)] + const rPushPromises = rToPush.map(h => transfer(h, left, right)) + + // wait + // TODO @darzu: batching? throttling? incremental? + const changed = await Promise.all([...lPushPromises, ...rPushPromises]) + + // return final results + const lRes = [...U.values(lOnly), ...lToPush] + const rRes = [...U.values(rOnly), ...rToPush] + return { + changed, + left: lRes, + right: rRes + } +} + +// TODO @darzu: use or delete +// export function wrapInMemCache(ws: WorkspaceProvider): SyncWorkspaceProvider & WorkspaceProvider & Synchronizable { +// return createSynchronizedWorkspace(ws, createMemWorkspace(), { +// conflict: ConflictStrategy.LastWriteWins, +// disjointSets: DisjointSetsStrategy.Synchronize +// }); +// } + +export async function migrateOverlap(fromWs: WorkspaceProvider, toWs: WorkspaceProvider) { + // TODO @darzu: +} \ No newline at end of file