Skip to content

Commit

Permalink
feat(repository): add the entity archival feature
Browse files Browse the repository at this point in the history
will allow to archive soft deleted data

GH-00
  • Loading branch information
yeshamavani committed Dec 16, 2024
1 parent 85757a8 commit 28c8644
Show file tree
Hide file tree
Showing 22 changed files with 4,313 additions and 1,465 deletions.
4,922 changes: 3,458 additions & 1,464 deletions package-lock.json

Large diffs are not rendered by default.

13 changes: 12 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,19 @@
"./sequelize": {
"types": "./dist/repositories/sequelize/index.d.ts",
"default": "./dist/repositories/sequelize/index.js"
},
"./aws-s3": {
"types": "./dist/aws-s3/index.d.ts",
"default": "./dist/aws-s3/index.js"
}
},
"typesVersions": {
"*": {
"sequelize": [
"./dist/repositories/sequelize/index.d.ts"
],
"aws-s3": [
"./dist/aws-s3/index.d.ts"
]
}
},
Expand Down Expand Up @@ -72,6 +79,7 @@
}
},
"devDependencies": {
"@aws-sdk/client-s3": "^3.468.0",
"@commitlint/cli": "^17.7.1",
"@commitlint/config-conventional": "^17.7.0",
"@loopback/boot": "^7.0.9",
Expand All @@ -87,10 +95,12 @@
"@semantic-release/npm": "^9.0.1",
"@semantic-release/release-notes-generator": "^10.0.3",
"@types/lodash": "^4.14.191",
"@types/node": "^16.18.119",
"@typescript-eslint/eslint-plugin": "^7.16.0",
"@typescript-eslint/parser": "^7.16.0",
"@types/node": "^18.11.9",
"aws-sdk": "^2.1692.0",
"commitizen": "^4.2.4",
"csv-parser": "^3.0.0",
"cz-conventional-changelog": "^3.3.0",
"cz-customizable": "^6.3.0",
"eslint": "^8.57.0",
Expand All @@ -106,6 +116,7 @@
"simple-git": "^3.15.1",
"source-map-support": "^0.5.21",
"sqlite3": "^5.1.4",
"stream": "^0.0.3",
"typescript": "~5.2.2"
},
"publishConfig": {
Expand Down
74 changes: 74 additions & 0 deletions src/archival-component.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import {
Binding,
Component,
ContextTags,
ControllerClass,
CoreBindings,
ProviderMap,
ServiceOrProviderClass,
inject,
injectable,
} from '@loopback/core';
import {Class, Repository} from '@loopback/repository';
import {Model, RestApplication} from '@loopback/rest';
import {ArchivalComponentBindings} from './keys';
import {
ArchivalMappingRepository,
RetrievalJobDetailsRepository,
} from './repositories';
import {ProcessRetrievedDataProvider} from './providers';
import {
BuildWhereConditionService,
ImportArchivedDataService,
} from './services';
import {ArchiveMapping, RetrievalJobDetails} from './models';

// Configure the binding for ArchivalComponent
@injectable({tags: {[ContextTags.KEY]: ArchivalComponentBindings.COMPONENT}})
export class ArchivalComponent implements Component {
constructor(
@inject(CoreBindings.APPLICATION_INSTANCE)
private readonly application: RestApplication,
) {
this.providers = {};

this.providers[ArchivalComponentBindings.PROCESS_RETRIEVED_DATA.key] =
ProcessRetrievedDataProvider;

this.application
.bind('services.BuildWhereConditionService')
.toClass(BuildWhereConditionService);
this.application
.bind('services.ImportArchivedDataService')
.toClass(ImportArchivedDataService);

this.repositories = [
ArchivalMappingRepository,
RetrievalJobDetailsRepository,
];

this.models = [ArchiveMapping, RetrievalJobDetails];
}
providers?: ProviderMap = {};

bindings?: Binding[] = [];

services?: ServiceOrProviderClass[];

/**
* An optional list of Repository classes to bind for dependency injection
* via `app.repository()` API.
*/
repositories?: Class<Repository<Model>>[];

/**
* An optional list of Model classes to bind for dependency injection
* via `app.model()` API.
*/
models?: Class<Model>[];

/**
* An array of controller classes
*/
controllers?: ControllerClass[];
}
80 changes: 80 additions & 0 deletions src/aws-s3/export-archive-data.provider.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
import {PutObjectCommand, S3} from '@aws-sdk/client-s3';
import {BindingScope, Provider, inject, injectable} from '@loopback/core';
import {AnyObject} from '@loopback/repository';
import {HttpErrors} from '@loopback/rest';
import {AWSS3Bindings, AwsS3Config} from '../keys';
import {ExportDataExternalSystem} from '../types';

@injectable({scope: BindingScope.TRANSIENT})
export class ExportArchiveDataProvider
implements Provider<ExportDataExternalSystem>
{
constructor(
@inject(AWSS3Bindings.Config, {optional: true})
private readonly config: AwsS3Config,
) {}
value(): ExportDataExternalSystem {
return async (seletedEntries: AnyObject[]) =>
this.exportToCsv(seletedEntries);
}
async exportToCsv(seletedEntries: AnyObject[]): Promise<string> {
const csvRows = [];
const header = Object.keys(seletedEntries[0]);

csvRows.push(header.join(','));

for (const entry of seletedEntries) {
const values = [];

for (const key of header) {
let value = entry[key];

// Check if it is an object
// convert object to string

if (value instanceof Date) {
value = new Date(value).toISOString();
} else if (value && typeof value === 'object') {
value = JSON.stringify(entry[key]);
// Escape existing quotation marks within the value
value = value.replace(/"/g, '""');
// Surround the value with quotation marks
value = `"${value}"`;
} else {
//this is intentional
}

values.push(value);
}
csvRows.push(values.join(','));
}
const csvString = csvRows.join('\n');
const timestamp = new Date().toISOString();
//Example: PATH_TO_UPLOAD_FILES='/path'
const fileName = `${process.env.PATH_TO_UPLOAD_FILES}/archive_${timestamp}.csv`;

const s3Config: AwsS3Config = {
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID ?? this.config.accessKeyId,
secretAccessKey:
process.env.AWS_SECRET_ACCESS_KEY ?? this.config.secretAccessKey,
},
region: process.env.AWS_REGION ?? this.config.region,
...this?.config,
// Other properties...
};
const s3Client: S3 = new S3(s3Config);
const bucketName = process.env.AWS_S3_BUCKET_NAME;
const params = {
Body: csvString,
Key: fileName,
Bucket: bucketName as string,
};
try {
await s3Client.send(new PutObjectCommand(params));
return params.Key;
} catch (error) {
throw new HttpErrors.UnprocessableEntity(error.message);
}
}
}
73 changes: 73 additions & 0 deletions src/aws-s3/import-archive-data.provider.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
import {
BindingScope,
injectable,
Provider,
ValueOrPromise,
} from '@loopback/core';
import {AnyObject} from '@loopback/repository';
import {HttpErrors} from '@loopback/rest';
import AWS from 'aws-sdk';
import {ImportDataExternalSystem} from '../types';

@injectable({scope: BindingScope.TRANSIENT})
export class ImportArchiveDataProvider
implements Provider<ImportDataExternalSystem>
{
value(): ValueOrPromise<ImportDataExternalSystem> {
return async (fileName: string) => this.getFileContent(fileName);
}

async getFileContent(fileName: string): Promise<AnyObject[]> {
const stream = require('stream');
const csv = require('csv-parser');

AWS.config = new AWS.Config({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region: process.env.AWS_REGION,
});
const s3 = new AWS.S3();

const params: AWS.S3.GetObjectRequest = {
Bucket: process.env.AWS_S3_BUCKET_NAME as string,
Key: fileName,
};
let s3Response;
try {
s3Response = await s3.getObject(params).promise();
const csvData = s3Response.Body!.toString('utf-8');

const jsonArray: AnyObject[] = await new Promise((resolve, reject) => {
const results: AnyObject[] = [];
stream.Readable.from(csvData)
.pipe(csv())
.on('data', (data: AnyObject) => results.push(data))
.on('end', () => resolve(results))
.on('error', reject);
});
const headers = Object.keys(jsonArray[0]);
for (const entry of jsonArray) {
for (const key of headers) {
const value = entry[key];
entry[key] = this.processEntryValue(key, value);
}
}
return jsonArray;
} catch (error) {
throw new HttpErrors.UnprocessableEntity(error.message);
}
}

private processEntryValue(key: string, value: string) {
if (value === '') {
return null;
}
if (
typeof value === 'string' &&
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z$/.test(value)
) {
return new Date(value);
}
return value;
}
}
2 changes: 2 additions & 0 deletions src/aws-s3/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
export * from './export-archive-data.provider';
export * from './import-archive-data.provider';
2 changes: 2 additions & 0 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,5 @@ export * from './mixins';
export * from './models';
export * from './repositories';
export * from './types';
export * from './keys';
export * from './archival-component';
47 changes: 47 additions & 0 deletions src/keys.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import {S3ClientConfig} from '@aws-sdk/client-s3';
import {BindingKey} from '@loopback/core';
import {
ExportDataExternalSystem,
ImportDataExternalSystem,
ProcessRetrievedData,
} from './types';
import {ArchivalComponent} from './archival-component';

/**
* Binding keys used by this component.
*/

const BINDING_PREFIX = 'sf.archival';

export namespace ArchivalComponentBindings {
export const COMPONENT = BindingKey.create<ArchivalComponent>(
`${BINDING_PREFIX}.ArchivalComponent`,
);

export const EXPORT_ARCHIVE_DATA =
BindingKey.create<ExportDataExternalSystem | null>(
`${BINDING_PREFIX}.entity.archive.export`,
);

export const IMPORT_ARCHIVE_DATA =
BindingKey.create<ImportDataExternalSystem | null>(
`${BINDING_PREFIX}.entity.archive.import`,
);

export const PROCESS_RETRIEVED_DATA =
BindingKey.create<ProcessRetrievedData | null>(
`${BINDING_PREFIX}.entity.import`,
);
}

export namespace AWSS3Bindings {
export const Config = BindingKey.create<AwsS3Config>(
`${BINDING_PREFIX}.archival.s3.config`,
);
}

export interface AwsS3Config extends S3ClientConfig {
accessKeyId: string;
secretAccessKey: string;
region?: string;
}
Loading

0 comments on commit 28c8644

Please sign in to comment.