diff --git a/core/actions/googleDriveImport/formatDriveData.ts b/core/actions/googleDriveImport/formatDriveData.ts index 4a92d4850..98ebe1dae 100644 --- a/core/actions/googleDriveImport/formatDriveData.ts +++ b/core/actions/googleDriveImport/formatDriveData.ts @@ -1,13 +1,18 @@ -import { writeFile } from "fs/promises"; +// import { writeFile } from "fs/promises"; +import type { Element, Root } from "hast"; import { rehype } from "rehype"; import rehypeFormat from "rehype-format"; +import { visit } from "unist-util-visit"; import type { PubsId } from "db/public"; +import { logger } from "logger"; import type { DriveData } from "./getGDriveFiles"; +import { uploadFileToS3 } from "~/lib/server"; import { appendFigureAttributes, + cleanUnusedSpans, formatFigureReferences, formatLists, getDescription, @@ -30,7 +35,9 @@ import { structureInlineMath, structureReferences, structureVideos, + tableToObjectArray, } from "./gdocPlugins"; +import { getAssetFile } from "./getGDriveFiles"; export type FormattedDriveData = { pubDescription: string; @@ -42,6 +49,63 @@ export type FormattedDriveData = { }[]; discussions: { id: PubsId; values: {} }[]; }; +const processAssets = async (html: string, pubId: string): Promise => { + const result = await rehype() + .use(() => async (tree: Root) => { + const assetUrls: { [key: string]: string } = {}; + visit(tree, "element", (node: any) => { + const hasSrc = ["img", "video", "audio"].includes(node.tagName); + const isDownload = + node.tagName === "a" && node.properties.className === "file-button"; + if (hasSrc || isDownload) { + const propertyKey = hasSrc ? "src" : "href"; + const originalAssetUrl = node.properties[propertyKey]; + const urlObject = new URL(originalAssetUrl); + if (urlObject.hostname !== "pubpub.org") { + assetUrls[originalAssetUrl] = ""; + } + } + }); + await Promise.all( + Object.keys(assetUrls).map(async (originalAssetUrl) => { + try { + const assetData = await getAssetFile(originalAssetUrl); + if (assetData) { + const uploadedUrl = await uploadFileToS3( + pubId, + assetData.filename, + assetData.buffer, + { contentType: assetData.mimetype } + ); + assetUrls[originalAssetUrl] = uploadedUrl.replace( + "assets.app.pubpub.org.s3.us-east-1.amazonaws.com", + "assets.app.pubpub.org" + ); + } else { + assetUrls[originalAssetUrl] = originalAssetUrl; + } + } catch (err) { + assetUrls[originalAssetUrl] = originalAssetUrl; + } + }) + ); + + visit(tree, "element", (node: any) => { + const hasSrc = ["img", "video", "audio"].includes(node.tagName); + const isDownload = + node.tagName === "a" && node.properties.className === "file-button"; + if (hasSrc || isDownload) { + const propertyKey = hasSrc ? "src" : "href"; + const originalAssetUrl = node.properties[propertyKey]; + if (assetUrls[originalAssetUrl]) { + node.properties[propertyKey] = assetUrls[originalAssetUrl]; + } + } + }); + }) + .process(html); + return String(result); +}; const processHtml = async (html: string): Promise => { const result = await rehype() @@ -62,6 +126,7 @@ const processHtml = async (html: string): Promise => { .use(structureCodeBlock) .use(structureInlineCode) .use(structureAnchors) + .use(cleanUnusedSpans) .use(structureReferences) .use(structureFootnotes) .use(appendFigureAttributes) /* Assumes figures are
elements */ @@ -74,9 +139,11 @@ const processHtml = async (html: string): Promise => { export const formatDriveData = async ( dataFromDrive: DriveData, - communitySlug: string + communitySlug: string, + pubId: string ): Promise => { const formattedPubHtml = await processHtml(dataFromDrive.pubHtml); + const formattedPubHtmlWithAssets = await processAssets(formattedPubHtml, pubId); /* Check for a description in the most recent version */ const latestRawVersion = dataFromDrive.versions.reduce((latest, version) => { @@ -184,7 +251,7 @@ export const formatDriveData = async ( const output = { pubDescription: latestPubDescription, - pubHtml: String(formattedPubHtml), + pubHtml: String(formattedPubHtmlWithAssets), versions, discussions: comments, }; diff --git a/core/actions/googleDriveImport/gdocPlugins.test.ts b/core/actions/googleDriveImport/gdocPlugins.test.ts index 28dbb1e05..b5fa72db3 100644 --- a/core/actions/googleDriveImport/gdocPlugins.test.ts +++ b/core/actions/googleDriveImport/gdocPlugins.test.ts @@ -6,6 +6,7 @@ import { logger } from "logger"; import { appendFigureAttributes, basic, + cleanUnusedSpans, formatFigureReferences, formatLists, getDescription, @@ -99,6 +100,23 @@ test("Convert vert table", async () => { expect(result).toStrictEqual(expectedOutput); }); +test("Convert link-source table", async () => { + const inputNode = JSON.parse( + '{"type":"element","tagName":"table","children":[{"type":"element","tagName":"tbody","children":[{"type":"element","tagName":"tr","children":[{"type":"element","tagName":"td","children":[{"type":"element","tagName":"p","children":[{"type":"element","tagName":"span","children":[{"type":"text","value":"Type"}]}]}]},{"type":"element","tagName":"td","children":[{"type":"element","tagName":"p","children":[{"type":"text"},{"type":"element","tagName":"span","children":[{"type":"text","value":"Source"}]}]}]},{"type":"element","tagName":"td","children":[{"type":"element","tagName":"p","children":[{"type":"element","tagName":"span","children":[{"type":"text","value":"Static Image"}]}]}]}]},{"type":"element","tagName":"tr","children":[{"type":"element","tagName":"td","children":[{"type":"element","tagName":"p","children":[{"type":"element","tagName":"span","children":[{"type":"text","value":"Video"}]}]}]},{"type":"element","tagName":"td","children":[{"type":"element","tagName":"p","children":[{"type":"element","tagName":"span","children":[{"type":"element","tagName":"a","properties":{"href":"https://www.image-url.com"},"children":[{"type":"text","value":"image-filename.png"}]}]}]}]},{"type":"element","tagName":"td","children":[{"type":"element","tagName":"p","children":[{"type":"element","tagName":"span","children":[{"type":"element","tagName":"a","properties":{"href":"https://www.fallback-url.com"},"children":[{"type":"text","value":"fallback-filename.png"}]}]}]}]}]}]}]}' + ); + const expectedOutput = [ + { + source: "https://www.image-url.com", + type: "video", + staticimage: "https://www.fallback-url.com", + }, + ]; + + const result = tableToObjectArray(inputNode); + + expect(result).toStrictEqual(expectedOutput); +}); + test("Do Nothing", async () => { const inputHtml = '
Content
'; @@ -353,7 +371,7 @@ test("Structure Images - DoubleVert Table", async () => { expect(trimAll(result)).toBe(trimAll(expectedOutputHtml)); }); -test("Structure Images", async () => { +test("Structure Videos", async () => { const inputHtml = ` @@ -641,6 +659,7 @@ test("Structure InlineMath", async () => {

I am just writing a lovely $10 equation like this $y=2x + 5$

Should also work as long as styling doesn't change throughout, such as $z= 25x + 2$ and so on.

+

Now consider two different genes, $A$ and $B$, with variation in allelic state across a population of diploid organisms. One gene $A$ has two alleles $A$ and $a$, resulting in three allelic states,

`; @@ -650,6 +669,7 @@ test("Structure InlineMath", async () => {

I am just writing a lovely $10 equation like this y=2x+5y=2x + 5

Should also work as long as styling doesn't change throughout, such as z=25x+2z= 25x + 2 and so on.

+

Now consider two different genes, AA and BB, with variation in allelic state across a population of diploid organisms. One gene AA has two alleles AA and aa, resulting in three allelic states,

`; @@ -878,6 +898,7 @@ test("Structure References", async () => {

I'd also like to add [10.12341] here.

And this should be the same number [10.12341] here. But this diff, [10.5123/123].

+

Two more [10.1016/S0167-4781(02)00500-6][10.abc123].

`; @@ -906,11 +927,19 @@ test("Structure References", async () => { data-type="reference" data-value="10.5123/123"> [4] .

+

Two more + [5] + + [6] + .

`; const result = await rehype() + .use(cleanUnusedSpans) .use(structureReferences) .process(inputHtml) .then((file) => String(file)) @@ -921,6 +950,36 @@ test("Structure References", async () => { expect(trimAll(result)).toBe(trimAll(expectedOutputHtml)); }); +test("cleanUnusedSpans", async () => { + const inputHtml = ` + + + +

Hello there.

+

What?

+ + + `; + const expectedOutputHtml = ` + + +

Hello there.

+

What?

+ + + `; + + const result = await rehype() + .use(cleanUnusedSpans) + .process(inputHtml) + .then((file) => String(file)) + .catch((error) => { + logger.error(error); + }); + + expect(trimAll(result)).toBe(trimAll(expectedOutputHtml)); +}); + test("Structure Footnotes", async () => { const inputHtml = ` diff --git a/core/actions/googleDriveImport/gdocPlugins.ts b/core/actions/googleDriveImport/gdocPlugins.ts index 409bd7c64..529ea50c0 100644 --- a/core/actions/googleDriveImport/gdocPlugins.ts +++ b/core/actions/googleDriveImport/gdocPlugins.ts @@ -97,26 +97,55 @@ export const tableToObjectArray = (node: any) => { const isHoriz = validTypes.includes(headersVert[1].toLowerCase()); const isVert = validTypes.includes(headersHoriz[1].toLowerCase()); + const getCellContent = (tableType: string, headerVal: string, cell: any): any => { + const isTypeWithHtmlValue = + !["math", "reference"].includes(tableType) && headerVal === "value"; + const isCaption = headerVal === "caption"; + if (isTypeWithHtmlValue || isCaption) { + return cell.children; + } + + const isAssetSource = + ["image", "video", "audio", "file"].includes(tableType) && headerVal === "source"; + const isStaticSource = headerVal === "staticimage"; + if (isAssetSource || isStaticSource) { + const findAnchor = (node: any): any => { + if (node.tagName === "a") { + return node; + } + if (node.children) { + for (const child of node.children) { + const found = findAnchor(child); + if (found) { + return found; + } + } + } + return null; + }; + + const anchor = findAnchor(cell); + if (anchor && anchor.properties.href) { + return anchor.properties.href; + } + } + + return getTextContent(cell).trim(); + }; + let data; if (isHoriz) { data = rows.slice(1).map((row: any) => { const cells = row.children.filter((child: any) => child.tagName === "td"); - const obj: { [key: string]: any } = {}; const typeIndex = headersHoriz.findIndex((header) => header === "type"); const tableType = getTextContent(cells[typeIndex]) .trim() .toLowerCase() .replace(/\s+/g, ""); + const obj: { [key: string]: any } = {}; cells.forEach((cell: any, index: number) => { - if ( - (!["math", "reference"].includes(tableType) && - headersHoriz[index] === "value") || - headersHoriz[index] === "caption" - ) { - obj[headersHoriz[index]] = cell.children; - } else { - obj[headersHoriz[index]] = getTextContent(cell).trim(); - } + const headerVal = headersHoriz[index]; + obj[headersHoriz[index]] = getCellContent(tableType, headerVal, cell); }); return { ...obj, type: tableType }; @@ -132,15 +161,8 @@ export const tableToObjectArray = (node: any) => { rows.forEach((row: any, rowIndex: number) => { const cell = row.children[colIndex + 1]; - if ( - (!["math", "reference"].includes(tableType) && - headersVert[rowIndex] === "value") || - headersVert[rowIndex] === "caption" - ) { - obj[headersVert[rowIndex]] = cell.children; - } else { - obj[headersVert[rowIndex]] = getTextContent(cell).trim(); - } + const headerVal = headersVert[rowIndex]; + obj[headersVert[rowIndex]] = getCellContent(tableType, headerVal, cell); }); return { ...obj, type: tableType }; }); @@ -568,7 +590,7 @@ export const structureBlockMath = () => (tree: Root) => { export const structureInlineMath = () => (tree: Root) => { visit(tree, "text", (node: any, index: any, parent: any) => { if (typeof node.value === "string") { - const regex = /\$(\S[^$]*\S)\$/g; + const regex = /\$(\S(?:[^$]*\S)?)\$/g; let match; const elements: any[] = []; let lastIndex = 0; @@ -709,8 +731,65 @@ export const structureAnchors = () => (tree: Root) => { } }); }; +export const cleanUnusedSpans = () => (tree: Root) => { + visit(tree, "element", (node: any, index: any, parent: any) => { + if ( + node.tagName === "span" && + (!node.properties || Object.keys(node.properties).length === 0) + ) { + if (parent && typeof index === "number") { + parent.children.splice(index, 1, ...node.children); + } + } + }); + + visit(tree, "element", (node: any) => { + if (node.children) { + for (let i = 0; i < node.children.length - 1; i++) { + if (node.children[i].type === "text" && node.children[i + 1].type === "text") { + node.children[i].value += node.children[i + 1].value; + node.children.splice(i + 1, 1); + i--; + } + } + } + }); +}; + export const structureReferences = () => (tree: Root) => { const allReference: any[] = []; + const doiBracketRegex = new RegExp(/\[(10\.[^\]]+|https:\/\/doi\.org\/[^\]]+)\]/g); + visit(tree, (node: any, index: any, parent: any) => { + /* Remove all links on [doi.org/12] references. */ + if (node.tagName === "u") { + const parentText = getTextContent(parent); + const nodeText = getTextContent(node); + if (doiBracketRegex.test(parentText)) { + const elements = [ + { + type: "text", + value: `[${nodeText}]`, + }, + ]; + + if (parent && typeof index === "number") { + if (elements.length > 0) { + const prevChild = parent.children[index - 1]; + const nextChild = parent.children[index + 1]; + + if (prevChild && prevChild.type === "text") { + prevChild.value = prevChild.value.slice(0, -1); + } + + if (nextChild && nextChild.type === "text") { + nextChild.value = nextChild.value.slice(1); + } + } + parent.children.splice(index, 1, ...elements); + } + } + } + }); const doiReferenceCounts: { [key: string]: number } = {}; visit(tree, (node: any, index: any, parent: any) => { @@ -725,12 +804,11 @@ export const structureReferences = () => (tree: Root) => { } } if (typeof node.value === "string") { - const regex = new RegExp(/\[(10\.\S+|https:\/\/doi\.org\/\S+)\]/g); let match; const elements: any[] = []; let lastIndex = 0; - while ((match = regex.exec(node.value)) !== null) { + while ((match = doiBracketRegex.exec(node.value)) !== null) { const [_fullMatch, referenceDoi] = match; let currentRefId; if (!doiReferenceCounts[referenceDoi]) { @@ -747,7 +825,7 @@ export const structureReferences = () => (tree: Root) => { } const startIndex = match.index; - const endIndex = regex.lastIndex; + const endIndex = doiBracketRegex.lastIndex; if (startIndex > lastIndex) { elements.push({ diff --git a/core/actions/googleDriveImport/getGDriveFiles.ts b/core/actions/googleDriveImport/getGDriveFiles.ts index 86b978e03..a92d10408 100644 --- a/core/actions/googleDriveImport/getGDriveFiles.ts +++ b/core/actions/googleDriveImport/getGDriveFiles.ts @@ -1,3 +1,6 @@ +import crypto from "crypto"; + +import type { Blob } from "buffer"; import type { Auth } from "googleapis"; import { google } from "googleapis"; @@ -136,3 +139,74 @@ export const getContentFromFolder = async (folderId: string): Promise => { + const drive = google.drive({ version: "v3", auth }); + + const urlObject = new URL(assetUrl); + if (urlObject.hostname === "drive.google.com") { + try { + const urlObject = new URL(assetUrl); + let fileId = urlObject.searchParams.get("id"); + + if (!fileId) { + const fileIdMatch = assetUrl.match(/\/d\/([a-zA-Z0-9_-]+)/); + fileId = fileIdMatch ? fileIdMatch[1] : null; + } + + if (!fileId) { + throw new Error("Invalid asset URL"); + } + + const res = await drive.files.get( + { fileId: fileId, alt: "media" }, + { responseType: "arraybuffer" } + ); + + return { + mimetype: res.headers["content-type"], + filename: fileId, + buffer: Buffer.from(res.data as ArrayBuffer), + }; + } catch (error) { + logger.error(`Error fetching asset file from Drive, ${assetUrl}:`, error); + return null; + } + } else { + try { + const response = await fetch(assetUrl); + if (!response.ok) { + throw new Error(`Failed to fetch asset from URL: ${assetUrl}`); + } + + const contentType = response.headers.get("content-type") || "application/octet-stream"; + const contentDisposition = response.headers.get("content-disposition"); + const assetUrlHash = crypto.createHash("md5").update(assetUrl).digest("hex"); + let filename = assetUrlHash; + + if (contentDisposition) { + const match = contentDisposition.match(/filename="(.+)"/); + if (match && match[1]) { + filename = match[1]; + } + } + + const buffer = Buffer.from(await response.arrayBuffer()); + + return { + mimetype: contentType, + filename: filename, + buffer: buffer, + }; + } catch (error) { + logger.error(`Error fetching non-Drive asset file, ${assetUrl}:`, error); + return null; + } + } +}; diff --git a/core/actions/googleDriveImport/run.ts b/core/actions/googleDriveImport/run.ts index 8cc4d2d55..4636f84b4 100644 --- a/core/actions/googleDriveImport/run.ts +++ b/core/actions/googleDriveImport/run.ts @@ -1,3 +1,4 @@ +import type { PubsId } from "db/public"; import { logger } from "logger"; import { doPubsExist, getPubTypesForCommunity, updatePub, upsertPubRelations } from "~/lib/server"; @@ -29,7 +30,7 @@ export const run = defineRun( if (dataFromDrive === null) { throw new Error("Failed to retrieve data from Google Drive"); } - const formattedData = await formatDriveData(dataFromDrive, communitySlug); + const formattedData = await formatDriveData(dataFromDrive, communitySlug, pub.id); /* MIGRATION */ // TODO: Check and make sure the relations exist, not just the pubs. @@ -41,6 +42,23 @@ export const run = defineRun( existingPubs.forEach((pub) => existingDiscussionPubIds.push(pub.id)); } + const existingVersionIdPairs = pub.values + .filter( + (values) => + values.fieldSlug === `${communitySlug}:versions` && + values.relatedPubId && + values.relatedPub + ) + .map((values) => { + const publicationDateField = values.relatedPub!.values.filter( + (value) => value.fieldSlug === `${communitySlug}:publication-date` + )[0]; + const publicationDate: Date = publicationDateField + ? (publicationDateField.value as Date) + : new Date(values.relatedPub!.createdAt); + return { [`${publicationDate.toISOString()}`]: values.relatedPubId }; + }); + // Versions don't have IDs so we compare timestamps const existingVersionDates = pub.values .filter( @@ -76,6 +94,16 @@ export const run = defineRun( }, }; }), + ...formattedData.discussions + .filter((discussion) => existingDiscussionPubIds.includes(discussion.id)) + .map((discussion) => { + return { + slug: `${communitySlug}:discussions`, + value: null, + relatedPubId: discussion.id, + }; + }), + /* Create new versions from gdrive if they don't exist */ ...formattedData.versions .filter( (version) => @@ -97,6 +125,27 @@ export const run = defineRun( }), ]; + /* Lazily update all existing old versions (TODO: Check for changed content) */ + formattedData.versions + .filter((version) => + existingVersionDates.includes(version[`${communitySlug}:publication-date`]) + ) + .forEach(async (version) => { + const versionDate = version[`${communitySlug}:publication-date`]; + const relatedVersionId = existingVersionIdPairs.filter( + (pair) => pair[versionDate] + )[0][versionDate] as PubsId; + await updatePub({ + pubId: relatedVersionId, + communityId, + lastModifiedBy, + continueOnValidationError: false, + pubValues: { + ...version, + }, + }); + }); + /* NON-MIGRATION */ /* If the main doc is updated, make a new version */ const orderedVersions = pub.values @@ -111,16 +160,15 @@ export const run = defineRun( const fooDateField = foo.relatedPub!.values.filter( (value: any) => value.fieldSlug === `${communitySlug}:publication-date` )[0]; - const barDateField = foo.relatedPub!.values.filter( + const barDateField = bar.relatedPub!.values.filter( (value: any) => value.fieldSlug === `${communitySlug}:publication-date` )[0]; - - const fooDate: Date = fooDateField - ? fooDateField.value - : foo.relatedPub!.createdAt; - const barDate: Date = barDateField - ? barDateField.value - : foo.relatedPub!.createdAt; + const fooDate = new Date( + fooDateField ? fooDateField.value : foo.relatedPub!.createdAt + ); + const barDate = new Date( + barDateField ? barDateField.value : bar.relatedPub!.createdAt + ); return barDate.getTime() - fooDate.getTime(); }); diff --git a/core/prisma/create-admin-user.cts b/core/prisma/create-admin-user.cts new file mode 100644 index 000000000..41a4d96f3 --- /dev/null +++ b/core/prisma/create-admin-user.cts @@ -0,0 +1,94 @@ +/* eslint-disable no-console */ + +import { createEnv } from "@t3-oss/env-nextjs"; +import { Kysely, PostgresDialect } from "kysely"; +import * as pg from "pg"; +import { z } from "zod"; + +import { Database } from "db/Database"; + +import { isUniqueConstraintError } from "../kysely/errors"; +import { createPasswordHash } from "../lib/authentication/password"; + +const env = createEnv({ + server: { + ADMIN_EMAIL: z.string().email(), + ADMIN_PASSWORD: z.string().min(8), + ADMIN_FIRSTNAME: z.string(), + ADMIN_LASTNAME: z.string(), + DATABASE_URL: z.string(), + }, + client: {}, + experimental__runtimeEnv: {}, +}); + +const dialect = new PostgresDialect({ + pool: new pg.Pool({ + connectionString: env.DATABASE_URL, + }), +}); + +const db = new Kysely({ + dialect, +}); + +async function createAdminUser({ + email, + password, + firstName, + lastName, +}: { + email: string; + password: string; + firstName: string; + lastName: string; +}) { + const values = { + slug: email.split("@")[0], + email, + firstName, + lastName, + passwordHash: await createPasswordHash(password), + isSuperAdmin: true, + }; + + return db.insertInto("users").values(values).returningAll().executeTakeFirstOrThrow(); +} + +async function main() { + const adminEmail = env.ADMIN_EMAIL; + const adminPassword = env.ADMIN_PASSWORD; + const adminFirstName = env.ADMIN_FIRSTNAME; + const adminLastName = env.ADMIN_LASTNAME; + + if (!adminEmail || !adminPassword) { + throw new Error("ADMIN_EMAIL and ADMIN_PASSWORD must be set for admin initialization"); + } + + try { + await createAdminUser({ + email: adminEmail, + password: adminPassword, + firstName: adminFirstName, + lastName: adminLastName, + }); + console.log("✨ Admin user created successfully!"); + console.log(`You can now log in with:`); + console.log(`${adminEmail}`); + } catch (e) { + if (isUniqueConstraintError(e)) { + console.log("⚠️ Admin user already exists, skipping initialization"); + return; + } + throw e; + } +} + +if (require.main === module) { + main() + .then(() => process.exit(0)) + .catch((e) => { + console.error(e); + process.exit(1); + }); +} diff --git a/core/prisma/exampleCommunitySeeds/arcadiaJournal.ts b/core/prisma/exampleCommunitySeeds/arcadiaJournal.ts new file mode 100644 index 000000000..506f01ab1 --- /dev/null +++ b/core/prisma/exampleCommunitySeeds/arcadiaJournal.ts @@ -0,0 +1,250 @@ +import type { CommunitiesId, UsersId } from "db/public"; +import { + Action, + CoreSchemaType, + ElementType, + InputComponent, + MemberRole, + StructuralFormElement, +} from "db/public"; + +import { env } from "~/lib/env/env.mjs"; +import { seedCommunity } from "../seed/seedCommunity"; + +export async function seedArcadiaJournal(communityId?: CommunitiesId) { + const memberId = crypto.randomUUID() as UsersId; + + return seedCommunity( + { + community: { + id: communityId, + name: "Arcadia Journal", + slug: "arcadia-journal", + avatar: env.PUBPUB_URL + "/demo/croc.png", + }, + pubFields: { + /* Pub */ + Title: { schemaName: CoreSchemaType.String }, + "Airtable ID": { schemaName: CoreSchemaType.String }, + "Author Email": { schemaName: CoreSchemaType.Email }, + Content: { schemaName: CoreSchemaType.String }, + "DOI URL": { schemaName: CoreSchemaType.URL }, + Description: { schemaName: CoreSchemaType.String }, + Discussions: { schemaName: CoreSchemaType.Null, relation: true }, + "Google Drive Folder Url": { schemaName: CoreSchemaType.URL }, + "Hide Discussions": { schemaName: CoreSchemaType.Boolean }, + "Hide Feedback Form": { schemaName: CoreSchemaType.Boolean }, + "Hide Share Thoughts": { schemaName: CoreSchemaType.Boolean }, + "Icing Hashtags": { schemaName: CoreSchemaType.StringArray }, + "Last Published": { schemaName: CoreSchemaType.DateTime }, + Narratives: { schemaName: CoreSchemaType.Null, relation: true }, + "Pub URL": { schemaName: CoreSchemaType.URL }, + "Pub Contributors": { schemaName: CoreSchemaType.Null, relation: true }, + "Publication Date": { schemaName: CoreSchemaType.DateTime }, + "Social Count": { schemaName: CoreSchemaType.Number }, + "Twitter Collection URL": { schemaName: CoreSchemaType.URL }, + "Typeform URL": { schemaName: CoreSchemaType.URL }, + Versions: { schemaName: CoreSchemaType.Null, relation: true }, + Slug: { schemaName: CoreSchemaType.String }, + "temp drive folder url": { schemaName: CoreSchemaType.URL }, + /* Contributor */ + "Full Name": { schemaName: CoreSchemaType.String }, + "Contributor Person": { schemaName: CoreSchemaType.Null, relation: true }, + Affiliations: { schemaName: CoreSchemaType.Null, relation: true }, + Roles: { schemaName: CoreSchemaType.Null, relation: true }, + /* Person */ + ORCiD: { schemaName: CoreSchemaType.URL }, + Avatar: { schemaName: CoreSchemaType.URL }, + "Avatar File": { schemaName: CoreSchemaType.FileUpload }, + /* Discussion */ + Anchor: { schemaName: CoreSchemaType.String }, + "Is Closed": { schemaName: CoreSchemaType.Boolean }, + "Parent ID": { schemaName: CoreSchemaType.String }, + }, + pubTypes: { + Pub: { + Title: { isTitle: true }, + "Airtable ID": { isTitle: false }, + "Author Email": { isTitle: false }, + Content: { isTitle: false }, + "DOI URL": { isTitle: false }, + Description: { isTitle: false }, + Discussions: { isTitle: false }, + "Google Drive Folder Url": { isTitle: false }, + "Hide Discussions": { isTitle: false }, + "Hide Feedback Form": { isTitle: false }, + "Hide Share Thoughts": { isTitle: false }, + "Icing Hashtags": { isTitle: false }, + "Last Published": { isTitle: false }, + Narratives: { isTitle: false }, + "Pub Contributors": { isTitle: false }, + "Pub URL": { isTitle: false }, + "Publication Date": { isTitle: false }, + "Social Count": { isTitle: false }, + "Twitter Collection URL": { isTitle: false }, + "Typeform URL": { isTitle: false }, + Versions: { isTitle: false }, + Slug: { isTitle: false }, + "temp drive folder url": { isTitle: false }, + }, + Contributor: { + "Full Name": { isTitle: true }, + "Contributor Person": { isTitle: false }, + Affiliations: { isTitle: false }, + "Airtable ID": { isTitle: false }, + Roles: { isTitle: false }, + }, + Person: { + "Full Name": { isTitle: true }, + "Airtable ID": { isTitle: false }, + "Avatar File": { isTitle: false }, + ORCiD: { isTitle: false }, + Avatar: { isTitle: false }, + }, + Type: { + Title: { isTitle: true }, + "Airtable ID": { isTitle: false }, + Slug: { isTitle: false }, + }, + Narrative: { + Title: { isTitle: true }, + "Airtable ID": { isTitle: false }, + "Google Drive Folder Url": { isTitle: false }, + "Icing Hashtags": { isTitle: false }, + "Publication Date": { isTitle: false }, + Slug: { isTitle: false }, + }, + Roles: { + Title: { isTitle: true }, + "Airtable ID": { isTitle: false }, + }, + Institutions: { + Title: { isTitle: true }, + "Airtable ID": { isTitle: false }, + }, + Version: { + Description: { isTitle: true }, + Content: { isTitle: false }, + "Publication Date": { isTitle: false }, + }, + Discussion: { + "Full Name": { isTitle: true }, + Anchor: { isTitle: false }, + "Author Email": { isTitle: false }, + Content: { isTitle: false }, + "Is Closed": { isTitle: false }, + ORCiD: { isTitle: false }, + "Parent ID": { isTitle: false }, + "Publication Date": { isTitle: false }, + Avatar: { isTitle: false }, + }, + }, + users: { + new: { + id: memberId, + firstName: "Arcadia", + email: "arcadia-journal@pubpub.org", + lastName: "Journal", + password: "pubpub-arcadia-journal", + role: MemberRole.admin, + }, + hih: { + role: MemberRole.contributor, + }, + }, + pubs: [ + { + assignee: "new", + pubType: "Pub", + values: { + Title: "Ancient Giants: Unpacking the Evolutionary History of Crocodiles from Prehistoric to Present", + Content: "New Pub 1 Content", + "Author Email": "new@pubpub.org", + "Pub URL": "https://pubpub.org", + }, + stage: "Submitted", + }, + ], + forms: { + Review: { + pubType: "Pub", + elements: [ + { + type: ElementType.structural, + element: StructuralFormElement.p, + content: `# Review\n\n Thank you for agreeing to review this Pub, please do not be a meany bobeeny.`, + }, + { + field: "Title", + type: ElementType.pubfield, + component: InputComponent.textInput, + config: { + maxLength: 255, + label: "Title", + help: "Give your review a snazzy title.", + }, + }, + { + field: "Content", + type: ElementType.pubfield, + component: InputComponent.textArea, + config: { + help: "Enter your review here", + minLength: 255, + label: "Content", + }, + }, + ], + }, + }, + stages: { + Submitted: { + members: { new: MemberRole.contributor }, + actions: [ + { + action: Action.email, + config: { + subject: "HELLO :recipientName REVIEW OUR STUFF PLEASE", + recipient: memberId, + body: `You are invited to fill in a form.\n\n\n\n:link{form="review"}\n\nCurrent time: :value{field='croccroc:published-at'}`, + }, + name: "Send Review email", + }, + ], + }, + "Ask Author for Consent": { + members: { new: MemberRole.contributor }, + }, + "To Evaluate": { + members: { new: MemberRole.contributor }, + }, + "Under Evaluation": {}, + "In Production": {}, + Published: {}, + Shelved: {}, + }, + stageConnections: { + Submitted: { + to: ["To Evaluate"], + }, + "To Evaluate": { + to: ["Under Evaluation"], + }, + "Under Evaluation": { + to: ["Ask Author for Consent"], + }, + "Ask Author for Consent": { + to: ["In Production"], + }, + "In Production": { + to: ["Published"], + }, + }, + }, + { + // this makes sure that the slug is `croccroc`, not `croccroc-${new Date().toISOString()} + randomSlug: false, + withApiToken: "11111111-1111-1111-1111-111111111111.yyyyyyyyyyyyyyyz", + } + ); +} diff --git a/packages/context-editor/src/commands/blocks.ts b/packages/context-editor/src/commands/blocks.ts new file mode 100644 index 000000000..36c38c451 --- /dev/null +++ b/packages/context-editor/src/commands/blocks.ts @@ -0,0 +1,89 @@ +import { lift, setBlockType, wrapIn } from "prosemirror-commands"; +import { Node, NodeType } from "prosemirror-model"; +import { NodeSelection } from "prosemirror-state"; + +import type { Attrs, ToggleCommandFn, ToggleOptions } from "./types"; +import { createTypeToggle } from "./utils"; + +const nodeMatchesTypeAndAttrs = (node: Node, type: NodeType, attrs?: Attrs) => { + if (node.type === type) { + if (!attrs) { + return true; + } + return Object.keys(attrs).every((key) => attrs[key] === node.attrs[key]); + } + return false; +}; + +const blockTypeIsActive = (options: ToggleOptions) => { + const { state, type, withAttrs } = options; + if (!type) { + return false; + } + + const { $from } = state.selection; + const selectedNode = (state.selection as NodeSelection).node; + if (selectedNode && nodeMatchesTypeAndAttrs(selectedNode, type, withAttrs)) { + return true; + } + + let currentDepth = $from.depth; + while (currentDepth > 0) { + const currentNodeAtDepth = $from.node(currentDepth); + if (nodeMatchesTypeAndAttrs(currentNodeAtDepth, type, withAttrs)) { + return true; + } + currentDepth -= 1; + } + + return false; +}; + +const toggleBlockType = (options: ToggleOptions) => { + const { state, type, withAttrs, dispatch } = options; + const { schema } = state; + const isActive = blockTypeIsActive(options); + const newNodeType = isActive ? schema.nodes.paragraph : type; + const setBlockFunction = setBlockType(newNodeType, withAttrs); + return setBlockFunction(state, dispatch); +}; + +const toggleWrap = (options: ToggleOptions) => { + const { state, type, dispatch } = options; + if (blockTypeIsActive(options)) { + return lift(state, dispatch); + } + return wrapIn(type)(state, dispatch); +}; + +const createBlockTypeToggle = (options: { + typeName: string; + withAttrs?: Attrs; + commandFn?: ToggleCommandFn; +}) => { + const { typeName, withAttrs, commandFn = toggleBlockType } = options; + return createTypeToggle({ + withAttrs, + commandFn, + isActiveFn: blockTypeIsActive, + getTypeFromSchema: (schema) => schema.nodes[typeName] as NodeType, + }); +}; + +export const createHeadingBlockTypeToggle = (level: number) => { + return createBlockTypeToggle({ typeName: "heading", withAttrs: { level } }); +}; + +export const paragraphToggle = createBlockTypeToggle({ typeName: "paragraph" }); +export const heading1Toggle = createHeadingBlockTypeToggle(1); +export const heading2Toggle = createHeadingBlockTypeToggle(2); +export const heading3Toggle = createHeadingBlockTypeToggle(3); +export const heading4Toggle = createHeadingBlockTypeToggle(4); +export const heading5Toggle = createHeadingBlockTypeToggle(5); +export const heading6Toggle = createHeadingBlockTypeToggle(6); +export const blockquoteToggle = createBlockTypeToggle({ + typeName: "blockquote", + commandFn: toggleWrap, +}); +// TODO +export const codeBlockToggle = createBlockTypeToggle({ typeName: "code_block" }); diff --git a/packages/context-editor/src/commands/marks.ts b/packages/context-editor/src/commands/marks.ts new file mode 100644 index 000000000..fb43c303f --- /dev/null +++ b/packages/context-editor/src/commands/marks.ts @@ -0,0 +1,32 @@ +import type { MarkType } from "prosemirror-model"; + +import { toggleMark as pmToggleMark } from "prosemirror-commands"; + +import type { ToggleOptions } from "./types"; +import { createTypeToggle } from "./utils"; + +export const markIsActive = (options: ToggleOptions) => { + const { type, state } = options; + const { from, $from, to, empty } = state.selection; + if (empty) { + return !!type.isInSet(state.storedMarks || $from.marks()); + } + return state.doc.rangeHasMark(from, to, type); +}; + +const toggleMark = (options: ToggleOptions) => { + const { state, dispatch, type } = options; + return pmToggleMark(type)(state, dispatch); +}; + +export const createMarkToggle = (typeName: string) => { + return createTypeToggle({ + getTypeFromSchema: (schema) => schema.marks[typeName] as MarkType, + commandFn: toggleMark, + isActiveFn: markIsActive, + }); +}; + +export const strongToggle = createMarkToggle("strong"); +export const emToggle = createMarkToggle("em"); +export const codeToggle = createMarkToggle("code"); diff --git a/packages/context-editor/src/commands/types.ts b/packages/context-editor/src/commands/types.ts new file mode 100644 index 000000000..534f35035 --- /dev/null +++ b/packages/context-editor/src/commands/types.ts @@ -0,0 +1,48 @@ +import type { Mark, MarkType, Node, NodeType, Schema } from "prosemirror-model"; +import type { EditorState } from "prosemirror-state"; +import type { EditorView } from "prosemirror-view"; +import type { ReactNode } from "react"; + +export type Dispatch = EditorView["dispatch"]; +export type Attrs = Node["attrs"] | Mark["attrs"]; + +export type CommandState = { + run: () => unknown; + canRun: boolean; + isActive: boolean; +}; + +export type MenuItemBase = { + key: string; + icon: ReactNode; +}; + +export type CommandStateBuilder = (dispatch: Dispatch, state: EditorState) => CommandState; +export type CommandSpec = (view: EditorView) => (state: EditorState) => CommandState; + +export type CommandDefinition = MenuItemBase & { + command?: CommandSpec; +}; + +export type CommandSubmenu = MenuItemBase & { + commands: CommandDefinition[]; +}; + +export type SchemaType = NodeType | MarkType; + +export type ToggleActiveFn = (options: ToggleOptions) => boolean; +export type ToggleCommandFn = (options: ToggleOptions) => boolean; + +export type ToggleOptions = { + state: EditorState; + type: S; + withAttrs?: Attrs; + dispatch?: Dispatch; +}; + +export type CreateToggleOptions = { + withAttrs?: Attrs; + getTypeFromSchema: (schema: Schema) => S; + commandFn: ToggleCommandFn; + isActiveFn: ToggleActiveFn; +}; diff --git a/packages/context-editor/src/commands/utils.ts b/packages/context-editor/src/commands/utils.ts new file mode 100644 index 000000000..0e75bf308 --- /dev/null +++ b/packages/context-editor/src/commands/utils.ts @@ -0,0 +1,27 @@ +import type { EditorState } from "prosemirror-state"; +import type { EditorView } from "prosemirror-view"; + +import type { + CommandSpec, + CommandStateBuilder, + CreateToggleOptions, + SchemaType, + ToggleOptions, +} from "./types"; + +export const createCommandSpec = (builder: CommandStateBuilder): CommandSpec => { + return (view: EditorView) => (state: EditorState) => builder(view.dispatch, state); +}; + +export const createTypeToggle = (options: CreateToggleOptions) => { + const { getTypeFromSchema, withAttrs, commandFn, isActiveFn } = options; + return createCommandSpec((dispatch, state) => { + const type = getTypeFromSchema(state.schema); + const toggleOptions: ToggleOptions = { state, type, withAttrs }; + return { + run: () => commandFn({ ...toggleOptions, dispatch }), + canRun: commandFn(toggleOptions), + isActive: type && isActiveFn(toggleOptions), + }; + }); +}; diff --git a/packages/context-editor/src/components/MenuBar.tsx b/packages/context-editor/src/components/MenuBar.tsx index a9b8f35a2..4d618807f 100644 --- a/packages/context-editor/src/components/MenuBar.tsx +++ b/packages/context-editor/src/components/MenuBar.tsx @@ -1,58 +1,154 @@ -import type { MarkType } from "prosemirror-model"; -import type { Command, EditorState } from "prosemirror-state"; import type { ReactNode } from "react"; import React from "react"; import { usePluginViewContext } from "@prosemirror-adapter/react"; -import { toggleMark } from "prosemirror-commands"; +import { Quote } from "lucide-react"; import { Button } from "ui/button"; +import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "ui/select"; import { cn } from "utils"; -import { baseSchema } from "../schemas"; -import { markIsActive } from "../utils/marks"; +import type { CommandSpec } from "../commands/types"; +import { + blockquoteToggle, + heading1Toggle, + heading2Toggle, + heading3Toggle, + heading4Toggle, + heading5Toggle, + heading6Toggle, + paragraphToggle, +} from "../commands/blocks"; +import { emToggle, strongToggle } from "../commands/marks"; -interface MenuItem { - name: string; +type MenuItem = { + key: string; + name?: string; icon: ReactNode; - type: MarkType; // eventually should also be NodeType - command: Command; -} + command: CommandSpec; +}; const menuItems: MenuItem[] = [ { - name: "strong", + key: "strong", icon: "B", - type: baseSchema.marks.strong, - command: toggleMark(baseSchema.marks.strong), + command: strongToggle, }, { - name: "em", + key: "em", icon: I, - type: baseSchema.marks.em, - command: toggleMark(baseSchema.marks.em), + command: emToggle, + }, + { + key: "blockquote", + icon: , + command: blockquoteToggle, + }, +]; + +const paragraphTypeItems: MenuItem[] = [ + { + key: "paragraph", + name: "Paragraph", + icon: "Paragraph", + command: paragraphToggle, + }, + { + key: "h1", + name: "Heading 1", + icon: Heading 1, + command: heading1Toggle, + }, + { + key: "h2", + name: "Heading 2", + icon: Heading 2, + command: heading2Toggle, + }, + { + key: "h3", + name: "Heading 3", + icon: Heading 3, + command: heading3Toggle, + }, + { + key: "h4", + name: "Heading 4", + icon: Heading 4, + command: heading4Toggle, + }, + { + key: "h5", + name: "Heading 5", + icon: Heading 5, + command: heading5Toggle, + }, + { + key: "h6", + name: "Heading 6", + icon: Heading 6, + command: heading6Toggle, }, ]; +const ParagraphDropdown = () => { + const { view } = usePluginViewContext(); + const activeType = paragraphTypeItems.find((item) => item.command(view)(view.state).isActive); + + return ( + + ); +}; + export const MenuBar = () => { const { view } = usePluginViewContext(); return ( -
+
{menuItems.map((menuItem) => { - const { name, icon, command, type } = menuItem; - // Returns if given command can be applied at the cursor selection - const isApplicable = command(view.state, undefined, view); - const isActive = markIsActive(type, view.state); + const { key, icon, command } = menuItem; + const { run, canRun, isActive } = command(view)(view.state); return ( ); })} +
); }; diff --git a/packages/context-editor/src/plugins/inputRules.test.ts b/packages/context-editor/src/plugins/inputRules.test.ts index 665052f13..97da60838 100644 --- a/packages/context-editor/src/plugins/inputRules.test.ts +++ b/packages/context-editor/src/plugins/inputRules.test.ts @@ -2,8 +2,8 @@ import { EditorState, TextSelection } from "prosemirror-state"; import { EditorView } from "prosemirror-view"; import { describe, expect, test } from "vitest"; +import { markIsActive } from "../commands/marks"; import { baseSchema } from "../schemas"; -import { markIsActive } from "../utils/marks"; import customRules from "./inputRules"; describe("inputRules", () => { @@ -61,8 +61,10 @@ describe("inputRules", () => { ])("italics $text", ({ text, expected }) => { expect(write(text)).toEqual(expected.text); moveSelection(1); - expect(markIsActive(baseSchema.marks.em, view.state)).toEqual(expected.isItalicized); - expect(markIsActive(baseSchema.marks.strong, view.state)).toBeFalsy(); + expect(markIsActive({ state: view.state, type: baseSchema.marks.em })).toEqual( + expected.isItalicized + ); + expect(markIsActive({ state: view.state, type: baseSchema.marks.strong })).toBeFalsy(); }); test.each([ @@ -75,7 +77,11 @@ describe("inputRules", () => { ])("bold $text", ({ text, expected }) => { expect(write(text)).toEqual(expected.text); moveSelection(1); - expect(markIsActive(baseSchema.marks.strong, view.state)).toEqual(expected.isBold); - expect(markIsActive(baseSchema.marks.em, view.state)).toEqual(expected.isItalicized); + expect(markIsActive({ state: view.state, type: baseSchema.marks.strong })).toEqual( + expected.isBold + ); + expect(markIsActive({ state: view.state, type: baseSchema.marks.em })).toEqual( + expected.isItalicized + ); }); }); diff --git a/packages/context-editor/src/plugins/inputRules.ts b/packages/context-editor/src/plugins/inputRules.ts index f3969122c..7a714ea60 100644 --- a/packages/context-editor/src/plugins/inputRules.ts +++ b/packages/context-editor/src/plugins/inputRules.ts @@ -1,7 +1,7 @@ -import type { MarkType } from "prosemirror-model"; +import type { MarkType, NodeType } from "prosemirror-model"; import type { EditorState } from "prosemirror-state"; -import { InputRule, inputRules } from "prosemirror-inputrules"; +import { InputRule, inputRules, wrappingInputRule } from "prosemirror-inputrules"; import { Fragment, Schema } from "prosemirror-model"; import initialDoc from "../stories/initialDoc.json"; @@ -39,6 +39,7 @@ const applyMarkRule = (markType: MarkType, regex: RegExp) => { } ); }; +const blockQuoteRule = (nodeType: NodeType) => wrappingInputRule(/^\s*>\s$/, nodeType); export default (schema: Schema) => { const rules = [ @@ -54,6 +55,7 @@ export default (schema: Schema) => { // Prosemirror applies the first rule that matches applyMarkRule(schema.marks.strong, boldRegex), applyMarkRule(schema.marks.em, italicsRegex), + blockQuoteRule(schema.nodes.blockquote), ]; return inputRules({ rules }); }; diff --git a/packages/context-editor/src/schemas/blockquote.ts b/packages/context-editor/src/schemas/blockquote.ts new file mode 100644 index 000000000..983567cad --- /dev/null +++ b/packages/context-editor/src/schemas/blockquote.ts @@ -0,0 +1,31 @@ +import type { DOMOutputSpec, MarkSpec, NodeSpec } from "prosemirror-model"; + +export default { + attrs: { + id: { default: null }, + class: { default: null }, + }, + content: "block+", + group: "block", + selectable: false, + parseDOM: [ + { + tag: "blockquote", + getAttrs: (node) => { + return { + id: (node as Element).getAttribute("id"), + }; + }, + }, + ], + toDOM: (node) => { + return [ + "blockquote", + { + class: node.attrs.class, + ...(node.attrs.id && { id: node.attrs.id }), + }, + 0, + ] as DOMOutputSpec; + }, +} satisfies NodeSpec; diff --git a/packages/context-editor/src/schemas/code.ts b/packages/context-editor/src/schemas/code.ts new file mode 100644 index 000000000..e900692fe --- /dev/null +++ b/packages/context-editor/src/schemas/code.ts @@ -0,0 +1,18 @@ +import type { DOMOutputSpec, MarkSpec } from "prosemirror-model"; + +export default { + attrs: { + id: { default: null }, + class: { default: null }, + }, + parseDOM: [{ tag: "code" }], + toDOM: (mark) => { + return [ + "code", + { + class: mark.attrs.class, + ...(mark.attrs.id && { id: mark.attrs.id }), + }, + ] as DOMOutputSpec; + }, +} satisfies MarkSpec; diff --git a/packages/context-editor/src/schemas/index.ts b/packages/context-editor/src/schemas/index.ts index 5c2d8b3a4..2a7fc1272 100644 --- a/packages/context-editor/src/schemas/index.ts +++ b/packages/context-editor/src/schemas/index.ts @@ -1,5 +1,7 @@ import { Schema } from "prosemirror-model"; +import blockquote from "./blockquote"; +import inlineCode from "./code"; import contextAtom from "./contextAtom"; import contextDoc from "./contextDoc"; // import { marks, nodes } from "prosemirror-schema-basic"; @@ -21,10 +23,12 @@ export const baseSchema = new Schema({ heading, contextDoc, contextAtom, + blockquote, }, marks: { strong, em, + code: inlineCode, }, topNode: "doc", }); diff --git a/packages/context-editor/src/style.css b/packages/context-editor/src/style.css index 69d8dacea..7f0deda5a 100644 --- a/packages/context-editor/src/style.css +++ b/packages/context-editor/src/style.css @@ -138,3 +138,7 @@ overflow: scroll; position: relative; } +.ProseMirror blockquote { + padding-left: 1rem; + border-left: solid 4px rgba(181, 181, 181, 0.5); +} diff --git a/packages/context-editor/src/utils/marks.ts b/packages/context-editor/src/utils/marks.ts deleted file mode 100644 index 3a6ddf1c4..000000000 --- a/packages/context-editor/src/utils/marks.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { MarkType } from "prosemirror-model"; -import type { EditorState } from "prosemirror-state"; - -export const markIsActive = (markType: MarkType, editorState: EditorState) => { - const { from, $from, to, empty } = editorState.selection; - if (empty) { - return !!markType.isInSet(editorState.storedMarks || $from.marks()); - } - return editorState.doc.rangeHasMark(from, to, markType); -}; diff --git a/self-host/.env.example b/self-host/.env.example new file mode 100644 index 000000000..e8df51714 --- /dev/null +++ b/self-host/.env.example @@ -0,0 +1,44 @@ +# the default url of the platform +PUBPUB_URL=http://localhost:3000 # the url of the platform +# change this to eg +# PUBPUB_URL=https://platform.example.com +# for a production environment + + +# configure these things with safe values +# or the values of a remote postgres database +POSTGRES_USER=my-postgres-user # change this! +POSTGRES_PASSWORD=my-postgres-password # change this! +POSTGRES_DB=my-postgres-db # change this! this is hard to change after the database has been created +POSTGRES_HOST=db # change this to the name of the service in docker-compose.yml, or the domain of a remote postgres database if you're using that instead +POSTGRES_PORT=5432 # don't forget to update the port in docker-compose.yml if you change this + +# not needed if you're using a remote file server like AWS S3 +MINIO_ROOT_USER= # change this! this is the username for your file server! +MINIO_ROOT_PASSWORD= # change this! this is the password for your file server! + +ASSETS_BUCKET_NAME=assets +ASSETS_UPLOAD_KEY= # change this! example: asset-user +ASSETS_UPLOAD_SECRET_KEY= # change this! +ASSETS_REGION=us-east-1 # leave this unchanged, unless you are hosting files on a different region on actual AWS + +# this is the default value but you ideally should set this up more nicely using our caddy service +ASSETS_STORAGE_ENDPOINT="http://localhost:9000" +# you could also set this to the secured endpoint of your file server +# ASSETS_STORAGE_ENDPOINT="https://example.com/assets" + +MAILGUN_SMTP_HOST=localhost +MAILGUN_SMTP_PORT=54325 +MAILGUN_SMTP_PASSWORD="xxx" +MAILGUN_SMTP_USERNAME="xxx" + +API_KEY="super_secret_key" + +OTEL_SERVICE_NAME="pubpub-v7-dev" # should be shared across components but not environments +HONEYCOMB_API_KEY="xxx" + +# KYSELY_DEBUG="true" + +GCLOUD_KEY_FILE='xxx' + +SELF_HOST="true" diff --git a/self-host/.gitignore b/self-host/.gitignore new file mode 100644 index 000000000..f1b4fd61a --- /dev/null +++ b/self-host/.gitignore @@ -0,0 +1,2 @@ +.env +minio/.minio.sys \ No newline at end of file diff --git a/self-host/README.md b/self-host/README.md index a24c7a9ca..75194762f 100644 --- a/self-host/README.md +++ b/self-host/README.md @@ -4,7 +4,83 @@ For the most part, self-hosting PubPub is a matter of deploying the app and the However, there are a few key things you need to know about. -## Files +## Quick start + +### Clone the repo + +First, you need to clone this repo. + +You can either clone the entire repo by doing + +```sh +git clone https://github.com/pubpub/platform.git pubpub-platform +cd pubpub-platform/self-host +``` + +or you can clone just the `self-host` directory by doing + +```sh +git clone -n --depth=1 --filter=tree:0 \ + https://github.com/pubpub/platform +cd platform +git sparse-checkout set --no-cone /self-host +git checkout +cd .. +mv platform/self-host pubpub-platform +rm -rf platform +cd pubpub-platform +``` + +Either way you will now be inside of a directory with an `.env.example` file and a `docker-compose.yml` file. Success! + +### Set up the environment variables + +The `.env.example` file will give you a list of environment variables you need to set. + +You need to copy the `.env.example` file to a new file called `.env` and then fill in the values. + +```sh +cp .env.example .env +``` + +Now you will need to setup some environment variables. + +#### Database + +> [!WARNING] +> It's important you set up different values for this before initializing the database for the first time +> as it's annoying to change later + +You need to set your own postgres user, password, and database name. The defaults are not safe. These defaults will be used to spin up a postgres database in a container. + +In case you want to use a remote postgres database, you can set the `POSTGRES_HOST` to the domain of the database you want to use. + +To generate a strong password, you can use one of these commands: + +```sh +# On Linux/macOS: +openssl rand -base64 32 + +# Alternative using /dev/urandom: +< /dev/urandom tr -dc A-Za-z0-9 | head -c32; echo + +# On Windows PowerShell: +[System.Web.Security.Membership]::GeneratePassword(32,8) +``` + +Use the output of one of these commands as the password for your postgres user. + +```sh +POSTGRES_USER=my-postgres-user # change this! +POSTGRES_PASSWORD= # change this to the output of one of the commands above! +POSTGRES_DB=my-postgres-db # change this! this is hard to change after the database has been created +POSTGRES_HOST=db # change this to the name of the service in docker-compose.yml, or the domain of a remote postgres database if you're using that instead +``` + +#### Files + +> [!WARNING] +> It's important you set up different values for this immediately The hosted version of Platfrom uses AWS S3 to host files. When self-hosting, you have two options: @@ -29,17 +105,49 @@ You should also remove the `minio` and `minio-init` services from the `docker-co If you want to use the built-in MinIO service, you will need to set the following environment variables: +You will need two strong passwords for your file server: + +- One for the root user +- One for the user we create that can only upload files + +To generate a strong password, you can use one of these commands: + ```sh -ASSETS_BUCKET_NAME="your-bucket-name" # these values will be set once you start up the MinIO service, making it hard to change later! -ASSETS_UPLOAD_KEY="your-access-key" -ASSETS_UPLOAD_SECRET_KEY="your-secret-key" -ASSETS_REGION="your-region" -ASSETS_STORAGE_ENDPOINT="localhost:9000" # this is the default value but you ideally should set this up more nicely using our nginx service +# On Linux/macOS: +openssl rand -base64 32 + +# Alternative using /dev/urandom: +< /dev/urandom tr -dc A-Za-z0-9 | head -c32; echo + +# On Windows PowerShell: +[System.Web.Security.Membership]::GeneratePassword(32,8) +``` + +Run one of these commands twice, and use one for `MINIO_ROOT_PASSWORD` and one for `ASSETS_UPLOAD_SECRET_KEY`. + +```sh +# not needed if you're using a remote file server like AWS S3 +MINIO_ROOT_USER= # change this! this is the username for your file server! +MINIO_ROOT_PASSWORD= # change this! this is the password for your file server! + +# these are either the values of an existing S3-compatible storage service, or the values that will be used to create a new MinIO service +ASSETS_BUCKET_NAME= # example: assets +ASSETS_UPLOAD_KEY= # example: asset-user +ASSETS_UPLOAD_SECRET_KEY= # example: a strong secure password +ASSETS_REGION=us-east-1 # leave this unchanged, unless you are hosting files on a different region on actual AWS ``` Then, after running `docker compose up -d`, you should be able to visit the MinIO console at `http://localhost:9001`. -## Email +#### SSL/Caddy + +> [!NOTE] +> Disabled by default, see later section for more information + +#### Email + +> [!NOTE] +> Can be set up later, or not at all To be able to send emails, you need to set some kind of email provider. @@ -50,9 +158,7 @@ Other common options are [SendGrid](https://sendgrid.com/) and [Postmark](https: You can also use an existing GMail or Office365 account to relay emails through PubPub. Other providers may likely work as well, but are not tested. -### Setup - -#### Mailgun +##### Mailgun To use Mailgun, you will need to create an account on [Mailgun](https://www.mailgun.com/) and set the following environment variables: @@ -65,7 +171,7 @@ MAILGUN_SMTP_FROM="email@your-mailgun-domain.mailgun.org" MAILGUN_SMTP_FROM_NAME="Your Organization" ``` -#### Gmail +##### Gmail To use Gmail to relay emails through PubPub, you will need to create an [app password](https://support.google.com/accounts/answer/185833?hl=en). @@ -92,7 +198,7 @@ MAILGUN_SMTP_FROM="email@gmail.com" # technically optional, but you will almost MAILGUN_SMTP_FROM_NAME="Your Organization" # Optional, will default to "PubPub Team" ``` -#### Office 365 +##### Office 365 You can (for now) send emails through Office 365 Outlook/Exchange through SMTP, although Microsoft has repeatedly stated they will likely deprecate this feature in the future. @@ -107,6 +213,88 @@ MAILGUN_SMTP_FROM="email@outlook.com" # technically optional, but you will almos MAILGUN_SMTP_FROM_NAME="Your Organization" # Optional, will default to "PubPub Team" ``` -#### No email +##### No email You can technically leave the email provider blank, but this will disable the email functionality. The email action will still be visible in the UI, but it will fail when you try to send an email. + +#### Other + +... + +### Disable services you don't want or need + +By default, a number of services are enabled in `docker-compose.yml` that you may not want or need. + +Here a short list of the built in services: + +#### Postgres Database + +By default, we will spin up a postgres database in a container. + +You may want to use your own postgres database instead, in which case you can disable this by removing the `db` service from the `docker-compose.yml` file, and removing the + +```yml +db: + condition: service_started +``` + +from the `depends_on` section of the `platform` service. + +```yml +platform: + depends_on: + db: + condition: service_started +``` + +#### MinIO + +By default, we will spin up a MinIO container to host files locally. + +You can choose to instead choose to use any other S3-compatible storage service, in which case you can disable this by removing the `minio` and `minio-init` services from the `docker-compose.yml` file. + +Be sure to follow the instructions above for configuring the environment variables for your S3-compatible storage service. + +### Start the services for the first time + +Now you should be ready to start the services for the first time! + +```sh +docker compose up -d +``` + +Everything should start up without any issues. + +You should now be able to visit Platform at `http://localhost:3000`, and browse your files at `http://localhost:9001` (if you are using the built-in MinIO service). + +But, as you might have noticed: you cannot login yet! + +This is because you need to create an admin user. + +### Create an admin user + +First, make the script executable: + +```sh +chmod +x ./create-admin.sh +``` + +Then, run the script: + +```sh +./create-admin.sh +``` + +You will be prompted to enter an email, password, first name, and last name. + +Once you have done this, you should be able to login to Platform at `http://localhost:3000` (or the URL you have set up) with the email and password you just created. + +### Create a community + +Navigate to `http://localhost:3000/communities` and create a community. + +### Go to your new community + +Navigate to `http://localhost:3000/c/your-community-slug` and you should see the community dashboard! + +Congrats! You're ready to start using Platform! diff --git a/self-host/caddy/Caddyfile b/self-host/caddy/Caddyfile new file mode 100644 index 000000000..e87e67f5f --- /dev/null +++ b/self-host/caddy/Caddyfile @@ -0,0 +1,30 @@ +{ + email someone@example.com # replace with your email +} + + + +example.com { + # keep this if you want your files to be accessible at example.com/assets/* + handle_path /assets/* { + reverse_proxy minio:9000 + } + + handle { + reverse_proxy platform:3000 + } +} + +# if you want to use a different domain for your files, you can do so here +# for instance, now all your files will be accessible at assets.example.com +# if you go this route, be sure to update your ASSETS_STORAGE_ENDPOINT in .env and restart your services +# assets.example.com { +# reverse_proxy minio:9000 +# } + +# if you want to manage your files via the minio console, you can expose it like so +# this is usually not necessary +# assets-admin.example.com { +# reverse_proxy minio:9001 +# } + diff --git a/self-host/caddy/Caddyfile.test b/self-host/caddy/Caddyfile.test new file mode 100644 index 000000000..6f645d8b2 --- /dev/null +++ b/self-host/caddy/Caddyfile.test @@ -0,0 +1,37 @@ +# this is for testing your caddy config locally +{ + email someone@example.com # replace with your email + # so that caddy won't go generating certificates for you + local_certs +} + + + +# in /etc/hosts, add the following line: +# 127.0.0.1 example.com +example.com { + tls internal + bind 0.0.0.0 + # keep this if you want your files to be accessible at example.com/assets/* + handle_path /assets/* { + reverse_proxy minio:9000 + } + + handle { + reverse_proxy platform:3000 + } +} + +# if you want to use a different domain for your files, you can do so here +# for instance, now all your files will be accessible at assets.example.com +# if you go this route, be sure to update your ASSETS_STORAGE_ENDPOINT in .env and restart your services +# assets.example.com { +# reverse_proxy minio:9000 +# } + +# if you want to manage your files via the minio console, you can expose it like so +# this is usually not necessary +# assets-admin.example.com { +# reverse_proxy minio:9001 +# } + diff --git a/self-host/create-admin.sh b/self-host/create-admin.sh new file mode 100755 index 000000000..0e41bf745 --- /dev/null +++ b/self-host/create-admin.sh @@ -0,0 +1,21 @@ +#!/bin/bash +set -e + +source .env + +# Prompt for admin credentials +read -p "Enter admin email: " ADMIN_EMAIL +read -s -p "Enter admin password: " ADMIN_PASSWORD +echo +read -p "Enter admin first name: " ADMIN_FIRSTNAME +read -p "Enter admin last name: " ADMIN_LASTNAME + +# Run the admin creation script +docker compose run --rm \ + -e ADMIN_EMAIL="$ADMIN_EMAIL" \ + -e ADMIN_PASSWORD="$ADMIN_PASSWORD" \ + -e ADMIN_FIRSTNAME="$ADMIN_FIRSTNAME" \ + -e ADMIN_LASTNAME="$ADMIN_LASTNAME" \ + platform-migrations pnpm --filter core exec tsx prisma/create-admin-user.cts + +echo "✨ Done! You can now login to Platform at $PUBPUB_URL" \ No newline at end of file diff --git a/self-host/docker-compose.yml b/self-host/docker-compose.yml new file mode 100644 index 000000000..46b6ff649 --- /dev/null +++ b/self-host/docker-compose.yml @@ -0,0 +1,149 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/compose-spec/compose-spec/master/schema/compose-spec.json + +# ^ this line provides nicer autocomplete in some editors + +services: + # the core platform service + platform: + depends_on: + db: + condition: service_started + platform-jobs: + condition: service_started + platform-migrations: + condition: service_completed_successfully + platform: linux/amd64 + image: ghcr.io/pubpub/platform:latest + env_file: + - path: .env + required: true + environment: + DATABASE_URL: postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} + ports: + - "3000:3000" + networks: + - app-network + + # platfrom jobs service + # takes care of longer running tasks like scheduling actions + platform-jobs: + depends_on: + platform-migrations: + condition: service_completed_successfully + platform: linux/amd64 + image: ghcr.io/pubpub/platform-jobs:latest + env_file: + - path: .env + required: true + environment: + DATABASE_URL: postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} + networks: + - app-network + + platform-migrations: + platform: linux/amd64 + depends_on: + db: + condition: service_started + image: ghcr.io/pubpub/platform-migrations:latest + env_file: + - path: .env + required: true + environment: + DATABASE_URL: postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB} + # FIXME: remove this once it's merged into main + volumes: + - ../core/prisma/create-admin-user.cts:/usr/src/app/core/prisma/create-admin-user.cts + command: ["pnpm", "--filter", "core", "migrate-docker"] + networks: + - app-network + + # database + # can be removed if you manually set the DATABASE_URL environment variable in .env + # to another postgres database you have access to + db: + image: postgres:15 + restart: always + env_file: + - path: .env + required: true + volumes: + - postgres-data:/var/lib/postgresql/data + networks: + - app-network + ports: + - "${POSTGRES_PORT}:${POSTGRES_PORT}" # you can choose another port if you want, just make sure the first one matches the POSTGRES_PORT in .env + + # recommended reverse proxy solution + # can be removed if you manually set up a reverse proxy like nginx instead + # useful if you want your assets, platform, and site to be on the same domain + # but with different paths + # caddy: + # image: caddy:latest + # depends_on: + # - platform + # - platform-jobs + # - minio + # env_file: + # - path: .env + # required: true + # ports: + # - "443:443" + # volumes: + # - ./caddy:/etc/caddy + # - caddy-data:/data + # - caddy-config:/config + # networks: + # - app-network + + # assets storage + # can be removed if you manually set up a storage service like s3 or minio instead + minio: + image: minio/minio:latest + env_file: + - path: .env + required: true + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/ready"] + interval: 1m30s + timeout: 30s + retries: 5 + start_period: 30s + restart: unless-stopped + command: server --console-address ":9001" /data + ports: + - "9000:9000" # API + - "9001:9001" # Console + volumes: + - ./minio:/data + networks: + - app-network + + # initialize minio + minio-init: + depends_on: + minio: + condition: service_healthy + image: minio/mc:latest + env_file: + - path: .env + required: true + entrypoint: > + /bin/sh -c ' + /usr/bin/mc config host add myminio http://minio:9000 "$${MINIO_ROOT_USER}" "$${MINIO_ROOT_PASSWORD}"; + /usr/bin/mc mb --ignore-existing myminio/"$${ASSETS_BUCKET_NAME}"; + /usr/bin/mc anonymous set download myminio/"$${ASSETS_BUCKET_NAME}"; + /usr/bin/mc admin user add myminio "$${ASSETS_UPLOAD_KEY}" "$${ASSETS_UPLOAD_SECRET_KEY}"; + /usr/bin/mc admin policy attach myminio readwrite --user "$${ASSETS_UPLOAD_KEY}"; + exit 0; + ' + networks: + - app-network + +volumes: + caddy-data: + caddy-config: + postgres-data: + +networks: + app-network: diff --git a/self-host/minio/.gitkeep b/self-host/minio/.gitkeep new file mode 100644 index 000000000..e69de29bb