Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adapt to pending entities with BBIDs / Introducing Kysely #323

Open
wants to merge 14 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .eslintrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,6 @@ const stylisticIssuesRules = {
properties: 'always'
}
],
'comma-dangle': ERROR,
'comma-spacing': ERROR,
'comma-style': ERROR,
'computed-property-spacing': ERROR,
Expand Down
3 changes: 3 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
"scripts": {
"build": "rimraf lib/* && babel src --out-dir lib --extensions .js,.ts && tsc",
"build-js-for-test": "rimraf lib/* && babel src --out-dir lib --source-maps inline --extensions .js,.ts",
"kysely-codegen": "kysely-codegen --out-file src/types/schema.ts --schema bookbrainz --camel-case",
"lint": "eslint .",
"lint-errors": "eslint --quiet .",
"lint-staged": "lint-staged",
Expand Down Expand Up @@ -71,6 +72,7 @@
"deep-diff": "^1.0.2",
"immutable": "^3.8.2",
"knex": "^2.4.2",
"kysely": "^0.27.4",
"lodash": "^4.17.21",
"moment": "^2.29.1",
"pg": "^8.6.0",
Expand Down Expand Up @@ -99,6 +101,7 @@
"glob": "^7.1.2",
"husky": "^8.0.0",
"jsinspect": "^0.12.7",
"kysely-codegen": "^0.17.0",
"lint-staged": "^13.1.0",
"mocha": "^10.2.0",
"node-uuid": "^1.4.8",
Expand Down
3 changes: 1 addition & 2 deletions src/func/create-entity.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,7 @@ interface CreateEntityPropsType {
orm: ORM,
transacting: Transaction,
editorId: string,
entityData: ExtraEntityDataType,
entityType: EntityTypeString
entityData: ExtraEntityDataType
}

// TODO: function is only used to approve imports, check whether type error below is critical
Expand Down
167 changes: 83 additions & 84 deletions src/func/imports/approve-import.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright (C) 2018 Shivam Tripathi
* Copyright (C) 2024 David Kellner
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
Expand All @@ -16,92 +16,91 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/

import * as _ from 'lodash';
import {
getAdditionalEntityProps, getEntityModelByType, getEntitySetMetadataByType
} from '../entity';
import type {EntityType} from '../../types/schema';
import type {ImportMetadataWithSourceT} from '../../types/imports';
import type {ORM} from '../..';
import type {Transaction} from '../types';
import {createNote} from '../note';
import {deleteImport} from './delete-import';
import {incrementEditorEditCountById} from '../editor';
import {uncapitalize} from '../../util';


interface approveEntityPropsType {
orm: ORM,
transacting: Transaction,
export async function approveImport({editorId, importEntity, orm}: {
editorId: number,
importEntity: any,
editorId: string
}

export async function approveImport(
{orm, transacting, importEntity, editorId}: approveEntityPropsType
): Promise<Record<string, unknown>> {
const {source, importId, type: entityType, disambiguationId, aliasSet,
identifierSetId, annotationId} = importEntity;
const {id: aliasSetId} = aliasSet;

const {Annotation, Entity, Revision} = orm;

// Increase user edit count
const editorUpdatePromise =
incrementEditorEditCountById(orm, editorId, transacting);

// Create a new revision record
const revision = await new Revision({
authorId: editorId
}).save(null, {transacting});
const revisionId = revision.get('id');

if (annotationId) {
// Set revision of our annotation which is NULL for imports
await new Annotation({id: annotationId})
.save({lastRevisionId: revisionId}, {transacting});
}

const note = `Approved from automatically imported record of ${source}`;
// Create a new note promise
const notePromise = createNote(orm, note, editorId, revision, transacting);

// Get additional props
const additionalProps = getAdditionalEntityProps(importEntity, entityType);

// Collect the entity sets from the importEntity
const entitySetMetadata = getEntitySetMetadataByType(entityType);
const entitySets = entitySetMetadata.reduce(
(set, {entityIdField}) =>
_.assign(set, {[entityIdField]: importEntity[entityIdField]})
, {}
);

await Promise.all([notePromise, editorUpdatePromise]);

const newEntity = await new Entity({type: entityType})
.save(null, {transacting});
const bbid = newEntity.get('bbid');
const propsToSet = _.extend({
aliasSetId,
annotationId,
bbid,
disambiguationId,
identifierSetId,
revisionId
}, entitySets, additionalProps);

const Model = getEntityModelByType(orm, entityType);

const entityModel = await new Model(propsToSet)
.save(null, {
method: 'insert',
transacting
});

const entity = await entityModel.refresh({
transacting,
withRelated: ['defaultAlias']
orm: ORM,
}) {
const {bbid, type, annotationId} = importEntity;
const metadata: ImportMetadataWithSourceT = importEntity.importMetadata;
const entityType = uncapitalize(type as EntityType);

await orm.kysely.transaction().execute(async (trx) => {
const pendingUpdates = [
// Mark the pending entity as accepted
trx.updateTable('entity')
.set('isImport', false)
.where((eb) => eb.and({bbid, isImport: true}))
.executeTakeFirst(),
// Indicate approval of the entity by setting the accepted BBID
trx.updateTable('importMetadata')
.set('acceptedEntityBbid', bbid)
.where('pendingEntityBbid', '=', bbid)
.executeTakeFirst(),
// Increment revision count of the active editor
trx.updateTable('editor')
.set((eb) => ({
revisionsApplied: eb('revisionsApplied', '+', 1),
totalRevisions: eb('totalRevisions', '+', 1),
}))
.where('id', '=', editorId)
.executeTakeFirst(),
];

// Create a new revision and an entity header
const revision = await trx.insertInto('revision')
.values({authorId: editorId})
.returning('id')
.executeTakeFirstOrThrow();
await trx.insertInto(`${entityType}Header`)
.values({bbid})
.executeTakeFirstOrThrow();

// Create initial entity revision using the entity data from the import
await trx.insertInto(`${entityType}Revision`)
.values((eb) => ({
bbid,
dataId: eb.selectFrom(`${entityType}ImportHeader`)
.select('dataId')
.where('bbid', '=', bbid),
id: revision.id
}))
.executeTakeFirstOrThrow();

// Update the entity header with the revision, doing this earlier causes a FK constraint violation
pendingUpdates.push(trx.updateTable(`${entityType}Header`)
.set('masterRevisionId', revision.id)
.where('bbid', '=', bbid)
.executeTakeFirst());

if (annotationId) {
// Set revision of our annotation which is NULL for pending imports
pendingUpdates.push(trx.updateTable('annotation')
.set('lastRevisionId', revision.id)
.where('id', '=', annotationId)
.executeTakeFirst());
}

// Create edit note
await trx.insertInto('note')
.values({
authorId: editorId,
content: `Approved automatically imported record ${metadata.externalIdentifier} from ${metadata.source}`,
revisionId: revision.id,
})
.executeTakeFirstOrThrow();

return Promise.all(pendingUpdates.map(async (update) => {
const {numUpdatedRows} = await update;
if (Number(numUpdatedRows) !== 1) {
throw new Error(`Failed to approve import of ${bbid}`);
}
}));
});

await deleteImport(transacting, importId, bbid);

return entity;
}
66 changes: 27 additions & 39 deletions src/func/imports/create-import.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,16 @@
*/


import {ENTITY_TYPES, type EntityTypeString} from '../../types/entity';
import type {ImportHeaderT, ImportMetadataT, _ImportT} from '../../types/imports';
import {ENTITY_TYPES, EntityT, type EntityTypeString} from '../../types/entity';
import type {ImportHeaderT, ImportMetadataT} from '../../types/imports';
import type {ParsedEdition, ParsedEntity, QueuedEntity} from '../../types/parser';

import type {ORM} from '../..';
import type {Transaction} from '../types';
import _ from 'lodash';
import {camelToSnake} from '../../util';
import {getAdditionalEntityProps} from '../entity';
import {getOriginSourceId} from './misc';
import {getExternalSourceId} from './misc';
import {updateAliasSet} from '../alias';
import {updateAnnotation} from '../annotation';
import {updateDisambiguation} from '../disambiguation';
Expand All @@ -36,20 +36,18 @@ import {updateLanguageSet} from '../language';
import {updateReleaseEventSet} from '../releaseEvent';


function createImportRecord(transacting: Transaction, data: _ImportT) {
return transacting.insert(camelToSnake(data)).into('bookbrainz.import').returning('id');
function createEntityRecord(transacting: Transaction, data: EntityT) {
return transacting.insert(camelToSnake(data)).into('bookbrainz.entity').returning('bbid');
}

function createOrUpdateImportMetadata(transacting: Transaction, record: ImportMetadataT) {
return transacting.insert(camelToSnake(record)).into('bookbrainz.link_import')
.onConflict(['origin_source_id', 'origin_id']).merge();
return transacting.insert(camelToSnake(record)).into('bookbrainz.import_metadata')
.onConflict(['external_source_id', 'external_identifier']).merge();
}

function getImportMetadata(transacting: Transaction, externalSourceId: number, externalIdentifier: string) {
return transacting.select('import_id', 'entity_id').from('bookbrainz.link_import').where(camelToSnake({
originId: externalIdentifier,
originSourceId: externalSourceId
}));
return transacting.select('pending_entity_bbid', 'accepted_entity_bbid').from('bookbrainz.import_metadata')
.where(camelToSnake({externalIdentifier, externalSourceId}));
}

/** IDs of extra data sets which not all entity types have. */
Expand Down Expand Up @@ -93,7 +91,7 @@ function createImportDataRecord(transacting: Transaction, dataSets: DataSetIds,
function createOrUpdateImportHeader(transacting: Transaction, record: ImportHeaderT, entityType: EntityTypeString) {
const table = `bookbrainz.${_.snakeCase(entityType)}_import_header`;
return transacting.insert(camelToSnake(record)).into(table)
.onConflict('import_id').merge();
.onConflict('bbid').merge();
}

async function updateEntityExtraDataSets(
Expand Down Expand Up @@ -140,8 +138,8 @@ export type ImportStatus =

export type ImportResult = {

/** ID of the imported entity (numeric for now, will be a BBID in a future version). */
importId: number | string;
/** BBID of the pending imported entity. */
importId: string;

/** Import status of the processed entity. */
status: ImportStatus;
Expand All @@ -156,25 +154,16 @@ export function createImport(orm: ORM, importData: QueuedEntity, {

return orm.bookshelf.transaction<ImportResult>(async (transacting) => {
const {entityType} = importData;
const {alias, annotation, identifiers, disambiguation, source} = importData.data;
const {alias, annotation, identifiers, disambiguation, externalSource} = importData.data;

// Get origin_source
let originSourceId: number = null;
const externalSourceId: number = await getExternalSourceId(transacting, externalSource);

try {
originSourceId = await getOriginSourceId(transacting, source);
}
catch (err) {
// TODO: useless, we are only catching our self-thrown errors here
throw new Error(`Error during getting source id - ${err}`);
}

const [existingImport] = await getImportMetadata(transacting, originSourceId, importData.originId);
const [existingImport] = await getImportMetadata(transacting, externalSourceId, importData.externalIdentifier);
if (existingImport) {
const isPendingImport = !existingImport.entity_id;
const isPendingImport = !existingImport.accepted_entity_bbid;
if (existingImportAction === 'skip') {
return {
importId: existingImport.import_id,
importId: existingImport.pending_entity_bbid,
status: isPendingImport ? 'skipped pending' : 'skipped accepted'
};
}
Expand All @@ -186,14 +175,14 @@ export function createImport(orm: ORM, importData: QueuedEntity, {
if (existingImportAction === 'update pending') {
// We only want to update pending, but not accepted entities
return {
importId: existingImport.import_id,
importId: existingImport.pending_entity_bbid,
status: 'skipped accepted'
};
}
// We also want to create updates for already accepted entities ('update pending and accepted')
// TODO: implement this feature in a later version and drop the following temporary return statement
return {
importId: existingImport.import_id,
importId: existingImport.pending_entity_bbid,
status: 'skipped accepted'
};
}
Expand Down Expand Up @@ -229,24 +218,23 @@ export function createImport(orm: ORM, importData: QueuedEntity, {
}

// Create import entity (if it is not already existing from a previous import attempt)
let importId: number = existingImport?.import_id;
let importId: string = existingImport?.pending_entity_bbid;
if (!importId) {
try {
const [idObj] = await createImportRecord(transacting, {type: entityType});
importId = _.get(idObj, 'id');
const [idObj] = await createEntityRecord(transacting, {isImport: true, type: entityType});
importId = _.get(idObj, 'bbid');
}
catch (err) {
throw new Error(`Failed to create a new import ID: ${err}`);
}
}

const importMetadata: ImportMetadataT = {
importId,
importMetadata: importData.data.metadata,
importedAt: transacting.raw("timezone('UTC'::TEXT, now())"),
additionalData: importData.data.metadata,
externalIdentifier: importData.externalIdentifier,
externalSourceId,
lastEdited: importData.lastEdited,
originId: importData.originId,
originSourceId
pendingEntityBbid: importId
};

try {
Expand All @@ -257,7 +245,7 @@ export function createImport(orm: ORM, importData: QueuedEntity, {
}

try {
await createOrUpdateImportHeader(transacting, {dataId, importId}, entityType);
await createOrUpdateImportHeader(transacting, {bbid: importId, dataId}, entityType);
}
catch (err) {
throw new Error(`Failed to upsert import header: ${err}`);
Expand Down
Loading