diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..397828d --- /dev/null +++ b/.gitattributes @@ -0,0 +1,9 @@ +# Use LF line endings +*.ts text eol=lf +*.tsx text eol=lf +*.js text eol=lf +*.jsx text eol=lf +*.json text eol=lf +*.css text eol=lf +*.text text eol=lf +*.md text eol=lf \ No newline at end of file diff --git a/.github/workflows/fullstack.yml b/.github/workflows/fullstack.yml index b98fb12..0c49192 100644 --- a/.github/workflows/fullstack.yml +++ b/.github/workflows/fullstack.yml @@ -21,7 +21,7 @@ jobs: name: Install pnpm id: pnpm-install with: - version: 7 + version: 8 run_install: false - name: Get pnpm store directory diff --git a/api/.gitignore b/api/.gitignore deleted file mode 100644 index b3ab1ae..0000000 --- a/api/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -.idea/ -.vscode/ -node_modules/ -build/ -tmp/ -temp/ \ No newline at end of file diff --git a/api/.prettierrc b/api/.prettierrc index dcb7279..3ddabd6 100644 --- a/api/.prettierrc +++ b/api/.prettierrc @@ -1,4 +1,5 @@ { "singleQuote": true, - "trailingComma": "all" + "trailingComma": "all", + "endOfLine": "lf" } \ No newline at end of file diff --git a/api/package.json b/api/package.json index 798e9b4..ba92e4e 100644 --- a/api/package.json +++ b/api/package.json @@ -33,6 +33,9 @@ "@nestjs/typeorm": "^9.0.1", "@types/js-yaml": "^4.0.5", "@types/passport-jwt": "^3.0.7", + "class-transformer": "^0.5.1", + "google-auth-library": "^8.7.0", + "googleapis": "^118.0.0", "dotenv": "^16.0.3", "joi": "^17.7.0", "js-yaml": "^4.1.0", @@ -46,6 +49,7 @@ "webpack": "^5.75.0" }, "devDependencies": { + "@types/multer": "^1.4.7", "@automock/jest": "^1.0.1", "@golevelup/ts-jest": "^0.3.6", "@nestjs/cli": "^9.0.0", @@ -93,6 +97,10 @@ "collectCoverageFrom": [ "**/*.(t|j)s" ], + "moduleNameMapper": { + "^@mocks/(.*)$": "/src/mocks/$1" + }, + "coverageDirectory": "../coverage", "testEnvironment": "node" } } \ No newline at end of file diff --git a/api/src/app.module.ts b/api/src/app.module.ts index 3b887ef..cab0441 100644 --- a/api/src/app.module.ts +++ b/api/src/app.module.ts @@ -2,6 +2,8 @@ import { Module } from '@nestjs/common'; import { ConfigModule, ConfigService } from '@nestjs/config'; import { TypeOrmModule, TypeOrmModuleOptions } from '@nestjs/typeorm'; import { UsersModule } from './users/users.module'; +import { ApplicationsModule } from './application/applications.module'; +import { TimeSlotsModule } from './timeslots/timeslots.module'; import { AuthenticationModule } from './authentication/authentication.module'; import { APP_GUARD } from '@nestjs/core'; import { JwtGuard } from './authentication/jwt-guard.guard'; @@ -29,6 +31,8 @@ import { AuthorizationGuard } from './authorization/authorization.guard'; inject: [ConfigService], }), UsersModule, + ApplicationsModule, + TimeSlotsModule, AuthenticationModule, AuthorizationModule, ], diff --git a/api/src/application/application-response.dto.ts b/api/src/application/application-response.dto.ts new file mode 100644 index 0000000..e0ac281 --- /dev/null +++ b/api/src/application/application-response.dto.ts @@ -0,0 +1,30 @@ +import { + Application, + ApplicationState, + ApplicationType, + LangLevel, +} from '@hkrecruitment/shared'; +import { Exclude, Expose } from 'class-transformer'; + +@Exclude() +export class ApplicationResponseDto implements Partial { + @Expose() id: number; + @Expose() submission: string; + @Expose() state: ApplicationState; + @Expose() itaLevel: LangLevel; + @Expose() type: ApplicationType; + + @Expose() bscStudyPath?: string; + @Expose() bscAcademicYear?: number; + @Expose() bscGradesAvg?: number; + @Expose() cfu?: number; + + @Expose() mscStudyPath?: string; + @Expose() mscGradesAvg?: number; + @Expose() mscAcademicYear?: number; + + @Expose() phdDescription?: string; + + // @Expose() + // applicant: ApplicantDto +} diff --git a/api/src/application/application-types.ts b/api/src/application/application-types.ts new file mode 100644 index 0000000..6275550 --- /dev/null +++ b/api/src/application/application-types.ts @@ -0,0 +1,4 @@ +export type ApplicationFiles = { + cv: Express.Multer.File[]; + grades?: Express.Multer.File[]; +}; diff --git a/api/src/application/application.entity.ts b/api/src/application/application.entity.ts new file mode 100644 index 0000000..a9e0dd3 --- /dev/null +++ b/api/src/application/application.entity.ts @@ -0,0 +1,87 @@ +import { + Column, + Entity, + ChildEntity, + PrimaryGeneratedColumn, + TableInheritance, +} from 'typeorm'; +import { + Application as ApplicationInterface, + ApplicationState, + ApplicationType, + LangLevel, +} from '@hkrecruitment/shared'; +// import { TimeSlot } from '@hkrecruitment/shared/slot'; + +@Entity() +@TableInheritance({ column: 'type' }) // TypeORM column to discriminate child entities +export class Application implements ApplicationInterface { + @PrimaryGeneratedColumn('increment') + id: number; + + @Column() + type: ApplicationType; + + @Column('varchar', { length: 64, name: 'applicant_id' }) + applicantId: string; + + @Column() + submission: Date; + + @Column() + state: ApplicationState; + + @Column({ name: 'last_modified', nullable: true }) + lastModified: Date; + + @Column({ nullable: true, length: 255 }) + notes?: string; + + @Column({ length: 255 }) + cv: string; + + // @Column() + // availability: TimeSlot[]; + + // @Column({ "name": "interview_id" }) + // interviewId: number; + + @Column({ name: 'ita_level' }) + itaLevel: LangLevel; +} + +@ChildEntity(ApplicationType.BSC) +export class BscApplication extends Application { + @Column({ name: 'bsc_study_path', nullable: true }) + bscStudyPath: string; + + @Column({ name: 'bsc_academic_year', nullable: true }) + bscAcademicYear: number; + + @Column({ name: 'bsc_grades_avg', nullable: true }) + bscGradesAvg: number; + + @Column({ nullable: true }) + cfu: number; + + @Column({ nullable: true, length: 255 }) + grades: string; +} + +@ChildEntity(ApplicationType.MSC) +export class MscApplication extends Application { + @Column({ name: 'msc_study_path', nullable: true }) + mscStudyPath: string; + + @Column({ name: 'msc_grades_avg', nullable: true }) + mscGradesAvg: number; + + @Column({ name: 'msc_academic_year', nullable: true }) + mscAcademicYear: number; +} + +@ChildEntity(ApplicationType.PHD) +export class PhdApplication extends Application { + @Column({ name: 'phd_description', nullable: true }) + phdDescription: string; +} diff --git a/api/src/application/applications.controller.spec.ts b/api/src/application/applications.controller.spec.ts new file mode 100644 index 0000000..1c04510 --- /dev/null +++ b/api/src/application/applications.controller.spec.ts @@ -0,0 +1,403 @@ +import { createMockAbility } from '@hkrecruitment/shared/abilities.spec'; +import { ApplicationsController } from './applications.controller'; +import { ApplicationsService } from './applications.service'; +import { + Action, + ApplicationState, + ApplicationType, + Role, +} from '@hkrecruitment/shared'; +import { TestBed } from '@automock/jest'; +import { ApplicationResponseDto } from './application-response.dto'; +import { plainToClass } from 'class-transformer'; +import { Application } from './application.entity'; +import { + mockBscApplication, + mockMscApplication, + updateApplicationDTO, + testDate, +} from '@mocks/data'; +import { + BadRequestException, + ConflictException, + ForbiddenException, + NotFoundException, + UnprocessableEntityException, +} from '@nestjs/common'; +import { createMock } from '@golevelup/ts-jest'; +import { AuthenticatedRequest } from 'src/authorization/authenticated-request.types'; +import { ApplicationFiles } from './application-types'; +import { flattenApplication } from './create-application.dto'; + +describe('ApplicationController', () => { + let controller: ApplicationsController; + let service: ApplicationsService; + + /************* Test setup ************/ + + beforeAll(() => { + jest + .spyOn(global, 'Date') + .mockImplementation(() => testDate as unknown as string); + }); + + beforeEach(async () => { + const { unit, unitRef } = TestBed.create(ApplicationsController).compile(); + + controller = unit; + service = unitRef.get(ApplicationsService); + }); + + it('should be defined', () => { + expect(controller).toBeDefined(); + expect(service).toBeDefined(); + }); + + describe('listApplications', () => { + it('should return the list of applications', async () => { + const mockAbility = createMockAbility(({ can }) => { + can(Action.Read, 'Application'); + }); + const applications = [{ id: 1 }, { id: 2 }] as Application[]; + const appResponseDtos = [ + plainToClass(ApplicationResponseDto, applications[0]), + plainToClass(ApplicationResponseDto, applications[1]), + ]; + jest.spyOn(service, 'listApplications').mockResolvedValue(applications); + + const result = await controller.listApplications(mockAbility); + + expect(result).toEqual(appResponseDtos); + expect(service.listApplications).toHaveBeenCalledTimes(1); + expect(mockAbility.can).toHaveBeenCalled(); + }); + + it('should return only applications that user can read', async () => { + const mockAbility = createMockAbility(({ can }) => { + can(Action.Read, 'Application', { applicantId: '1' }); + }); + const applications = [ + { ...mockBscApplication, applicantId: '1' }, + { ...mockMscApplication, applicantId: '2' }, + ] as Application[]; + const appResponseDtos = [ + plainToClass(ApplicationResponseDto, applications[0]), + ]; + jest.spyOn(service, 'listApplications').mockResolvedValue(applications); + + const result = await controller.listApplications(mockAbility); + + expect(result).toEqual(appResponseDtos); + expect(service.listApplications).toHaveBeenCalledTimes(1); + expect(mockAbility.can).toHaveBeenCalled(); + }); + }); + + describe('getApplication', () => { + it('should return an application if it exists', async () => { + const mockAbility = createMockAbility(({ can }) => { + can(Action.Read, 'Application'); + }); + jest + .spyOn(service, 'findByApplicationId') + .mockResolvedValue(mockMscApplication); + const result = await controller.getApplication( + mockAbility, + mockMscApplication.id, + ); + const expectedApp = { + ...mockMscApplication, + id: mockMscApplication.id, + state: mockMscApplication.state, + submission: undefined, + phdDescription: undefined, + bscAcademicYear: undefined, + bscGradesAvg: undefined, + bscStudyPath: undefined, + cfu: undefined, + } as ApplicationResponseDto; + delete expectedApp['notes']; + expect(result).toEqual(expectedApp); + expect(service.findByApplicationId).toHaveBeenCalledTimes(1); + expect(service.findByApplicationId).toHaveBeenCalledWith( + mockMscApplication.id, + ); + expect(mockAbility.can).toHaveBeenCalled(); + }); + + it("should throw a NotFoundException if the application doesn't exist", async () => { + const mockAbility = createMockAbility(({ cannot }) => { + cannot(Action.Read, 'Application'); + }); + jest.spyOn(service, 'findByApplicationId').mockResolvedValue(null); + const result = controller.getApplication( + mockAbility, + mockBscApplication.id, + ); + await expect(result).rejects.toThrow(NotFoundException); + expect(service.findByApplicationId).toHaveBeenCalledTimes(1); + expect(service.findByApplicationId).toHaveBeenCalledWith( + mockBscApplication.id, + ); + }); + + it("should throw a ForbiddenException if the user can't read the application", async () => { + const mockAbility = createMockAbility(({ cannot }) => { + cannot(Action.Read, 'Application', { applicantId: '1' }); + }); + jest + .spyOn(service, 'findByApplicationId') + .mockResolvedValue({ ...mockBscApplication, applicantId: '2' }); + const result = controller.getApplication( + mockAbility, + mockBscApplication.id, + ); + await expect(result).rejects.toThrow(ForbiddenException); + expect(service.findByApplicationId).toHaveBeenCalledTimes(1); + expect(service.findByApplicationId).toHaveBeenCalledWith( + mockBscApplication.id, + ); + expect(mockAbility.can).toHaveBeenCalled(); + }); + }); + + describe('createApplication', () => { + it('should create an application if the user is allowed to create it', async () => { + const mockAbility = createMockAbility(({ can }) => { + can(Action.Create, 'Application'); + }); + const mockReq = createMock(); + mockReq.user.sub = '123'; + const expectedApplication = { + ...flattenApplication(mockBscApplication), + submission: undefined, + state: ApplicationState.New, + } as ApplicationResponseDto; + const mockFiles = createMock(); + jest + .spyOn(service, 'createApplication') + .mockResolvedValue(mockBscApplication); + const result = await controller.createApplication( + mockFiles, + mockBscApplication, + mockAbility, + mockReq, + ); + expect(result).toEqual(expectedApplication); + expect(service.createApplication).toHaveBeenCalledTimes(1); + expect(service.createApplication).toHaveBeenCalledWith( + mockBscApplication, + {}, + mockReq.user.sub, + ); + expect(mockAbility.can).toHaveBeenCalled(); + }); + + it.each([ApplicationType.BSC, ApplicationType.MSC])( + `should require grades file for non-phd applications`, + async (applicationType) => { + const mockAbility = createMockAbility(({ can }) => { + can(Action.Create, 'Application'); + }); + const mockReq = createMock(); + mockReq.user.sub = '123'; + const mockApplication = + applicationType === ApplicationType.BSC + ? mockBscApplication + : mockMscApplication; + const mockFiles = { + cv: createMock(), + }; + jest + .spyOn(service, 'createApplication') + .mockResolvedValue(mockApplication); + const result = controller.createApplication( + mockFiles, + mockApplication, + mockAbility, + mockReq, + ); + expect(result).rejects.toThrow(UnprocessableEntityException); + expect(service.createApplication).toHaveBeenCalledTimes(0); + }, + ); + + it.each(Object.values(ApplicationType))( + `should require cv file for all applications`, + async (applicationType) => { + const mockAbility = createMockAbility(({ can }) => { + can(Action.Create, 'Application'); + }); + const mockReq = createMock(); + mockReq.user.sub = '123'; + const mockApplication = { type: applicationType } as Application; + const mockFiles = {} as ApplicationFiles; + jest + .spyOn(service, 'createApplication') + .mockResolvedValue(mockApplication); + const result = controller.createApplication( + mockFiles, + mockApplication, + mockAbility, + mockReq, + ); + expect(result).rejects.toThrow(UnprocessableEntityException); + expect(service.createApplication).toHaveBeenCalledTimes(0); + }, + ); + + it('should throw a ForbiddenException if the user is not allowed to submit an application', async () => { + const mockAbility = createMockAbility(({ cannot }) => { + cannot(Action.Create, 'Application'); + }); + const mockReq = createMock(); + mockReq.user.sub = '123'; + const mockApplication = { type: ApplicationType.PHD } as Application; + const mockFiles = { + cv: createMock(), + } as ApplicationFiles; + jest + .spyOn(service, 'createApplication') + .mockResolvedValue(mockApplication); + const result = controller.createApplication( + mockFiles, + mockApplication, + mockAbility, + mockReq, + ); + expect(result).rejects.toThrow(ForbiddenException); + expect(service.createApplication).toHaveBeenCalledTimes(0); + }); + + it('should throw a ConflictException if the user already has a pending application', async () => { + const mockAbility = createMockAbility(({ can }) => { + can(Action.Create, 'Application'); + }); + const mockReq = createMock(); + mockReq.user.sub = '123'; + const mockApplication = { type: ApplicationType.PHD } as Application; + const mockFiles = { + cv: createMock(), + } as ApplicationFiles; + jest + .spyOn(service, 'createApplication') + .mockResolvedValue(mockApplication); + jest + .spyOn(service, 'findActiveApplicationByApplicantId') + .mockResolvedValue(true); + const result = controller.createApplication( + mockFiles, + mockApplication, + mockAbility, + mockReq, + ); + expect(result).rejects.toThrow(ConflictException); + expect(service.createApplication).toHaveBeenCalledTimes(0); + expect(service.findActiveApplicationByApplicantId).toHaveBeenCalledTimes( + 1, + ); + expect(service.findActiveApplicationByApplicantId).toHaveBeenCalledWith( + mockReq.user.sub, + ); + }); + }); + + describe('updateApplication', () => { + it('should update an application if it exists and the user is allowed to update it', async () => { + const mockAbility = createMockAbility(({ can }) => { + can(Action.Update, 'Application', { applicantId: '1' }); + }); + const updateApplicationDTO = { notes: 'Nothing special here' }; + const mockReq = createMock(); + const expectedApplication = { + ...mockBscApplication, + id: mockBscApplication.id, + state: mockBscApplication.state, + submission: undefined, + mscAcademicYear: undefined, + mscGradesAvg: undefined, + mscStudyPath: undefined, + phdDescription: undefined, + } as ApplicationResponseDto; + delete expectedApplication['notes']; + const mockedApplication = { ...mockBscApplication, applicantId: '1' }; + jest + .spyOn(service, 'findByApplicationId') + .mockResolvedValue(mockedApplication); + jest + .spyOn(service, 'updateApplication') + .mockResolvedValue({ ...mockBscApplication, ...updateApplicationDTO }); + const result = await controller.updateApplication( + mockBscApplication.id, + updateApplicationDTO, + mockAbility, + mockReq, + ); + expect(result).toEqual(expectedApplication); + expect(service.updateApplication).toHaveBeenCalledTimes(1); + expect(service.updateApplication).toHaveBeenCalledWith({ + ...mockedApplication, + ...updateApplicationDTO, + lastModified: new Date(), + }); + expect(mockAbility.can).toHaveBeenCalled(); + }); + + it("should throw a NotFoundException if the application doesn't exist", async () => { + const mockAbility = createMockAbility(({ can }) => { + can(Action.Update, 'Application'); + }); + const mockReq = createMock(); + jest.spyOn(service, 'findByApplicationId').mockResolvedValue(null); + const result = controller.updateApplication( + mockBscApplication.id, + updateApplicationDTO, + mockAbility, + mockReq, + ); + await expect(result).rejects.toThrow(NotFoundException); + expect(service.findByApplicationId).toHaveBeenCalledTimes(1); + expect(service.findByApplicationId).toHaveBeenCalledWith(1); + expect(service.updateApplication).not.toHaveBeenCalled(); + }); + + it("should throw a ForbiddenException if the user can't update the application", async () => { + const mockAbility = createMockAbility(({ can }) => { + can(Action.Update, 'Application', { applicantId: '1' }); + }); + const mockReq = createMock(); + jest + .spyOn(service, 'findByApplicationId') + .mockResolvedValue({ ...mockBscApplication, applicantId: '2' }); + const result = controller.updateApplication( + mockBscApplication.id, + updateApplicationDTO, + mockAbility, + mockReq, + ); + await expect(result).rejects.toThrow(ForbiddenException); + expect(service.updateApplication).not.toHaveBeenCalled(); + expect(mockAbility.can).toHaveBeenCalled(); + }); + + it('should throw a BadRequestException if the applicant tried to set an invalid state', async () => { + const mockAbility = createMockAbility(({ can }) => { + can(Action.Update, 'Application', { applicantId: '1' }); + }); + const mockReq = createMock(); + mockReq.user.role = Role.Applicant; + jest + .spyOn(service, 'findByApplicationId') + .mockResolvedValue({ ...mockBscApplication, applicantId: '1' }); + const result = controller.updateApplication( + mockBscApplication.id, + updateApplicationDTO, + mockAbility, + mockReq, + ); + await expect(result).rejects.toThrow(BadRequestException); + expect(service.updateApplication).not.toHaveBeenCalled(); + expect(mockAbility.can).toHaveBeenCalled(); + }); + }); +}); diff --git a/api/src/application/applications.controller.ts b/api/src/application/applications.controller.ts new file mode 100644 index 0000000..d74a6b9 --- /dev/null +++ b/api/src/application/applications.controller.ts @@ -0,0 +1,279 @@ +import { + Body, + Controller, + ForbiddenException, + BadRequestException, + NotFoundException, + ConflictException, + Get, + Param, + Patch, + Post, + Query, + Req, + UploadedFiles, + UseInterceptors, + HttpStatus, + HttpException, + UnprocessableEntityException, +} from '@nestjs/common'; +import { Application } from './application.entity'; +import { ApplicationsService } from './applications.service'; +import { + Action, + AppAbility, + ApplicationState, + ApplicationType, + checkAbility, + createApplicationSchema, + Role, + updateApplicationSchema, +} from '@hkrecruitment/shared'; +import { JoiValidate } from '../joi-validation/joi-validate.decorator'; +import { + ApiBadRequestResponse, + ApiBearerAuth, + ApiForbiddenResponse, + ApiNotFoundResponse, + ApiCreatedResponse, + ApiOkResponse, + ApiTags, + ApiUnauthorizedResponse, + ApiConflictResponse, + ApiQuery, + ApiConsumes, + ApiUnprocessableEntityResponse, +} from '@nestjs/swagger'; +import { AuthenticatedRequest } from 'src/authorization/authenticated-request.types'; +import * as Joi from 'joi'; +import { CheckPolicies } from 'src/authorization/check-policies.decorator'; +import { Ability } from 'src/authorization/ability.decorator'; +import { ApplicationResponseDto } from './application-response.dto'; +import { CreateApplicationDto } from './create-application.dto'; +import { UpdateApplicationDto } from './update-application.dto'; +import { plainToClass } from 'class-transformer'; +import { FileFieldsInterceptor } from '@nestjs/platform-express'; +import { ApplicationFiles } from './application-types'; + +@ApiBearerAuth() +@ApiTags('applications') +@Controller('applications') +export class ApplicationsController { + constructor(private readonly applicationsService: ApplicationsService) {} + + static MAX_UPLOAD_SIZE = 1024 * 1024 * 4; // 4MB + + @Get() + @ApiUnauthorizedResponse() + @ApiForbiddenResponse() + @ApiQuery({ + name: 'submittedFrom', + required: false, + type: 'string', + }) + @ApiQuery({ + name: 'submittedUntil', + required: false, + type: 'string', + }) + @ApiQuery({ + name: 'state', + required: false, + type: 'string', + enum: Object.values(ApplicationState), + }) + @JoiValidate({ + query: { + submittedFrom: Joi.date().iso().optional(), + submittedUntil: Joi.date().iso().optional(), + state: Joi.string(), + }, + }) + async listApplications( + @Ability() ability: AppAbility, + @Query('submittedFrom') submittedFrom?: string, // start date for time period + @Query('submittedUntil') submittedUntil?: string, // end date for time period + @Query('state') state?: string, + ): Promise { + const applications = await this.applicationsService.listApplications( + submittedFrom, + submittedUntil, + state, + ); + return applications + .filter((application) => + checkAbility(ability, Action.Read, application, 'Application'), + ) + .map((application) => plainToClass(ApplicationResponseDto, application)); + } + + // TODO: Move this to applicants.controller.ts + // TODO: decide if we need an applicant controller + // @Get('applicants/:applicant_id/applications') + // @ApiForbiddenResponse() + // async listApplicationsOfApplicant( + // @Param('applicant_id') applicantId: string, + // ): Promise { + // const applications = await this.applicationsService.findByApplicantId( + // applicantId, + // ); + // return applications.map((application) => plainToClass(ApplicationResponseDto, application));; + // } + + @ApiNotFoundResponse() + @ApiForbiddenResponse() + @Get(':application_id') + @JoiValidate({ + param: Joi.number().positive().integer().required().label('application_id'), + body: Joi.object().forbidden(), + }) + @CheckPolicies((ability) => ability.can(Action.Read, 'Application')) + async getApplication( + @Ability() ability: AppAbility, + @Param('application_id') applicationId: number, + ): Promise { + const application = await this.applicationsService.findByApplicationId( + applicationId, + ); + + if (!application) throw new NotFoundException(); + if (!checkAbility(ability, Action.Read, application, 'Application')) + throw new ForbiddenException(); + + return plainToClass(ApplicationResponseDto, application); + } + + @ApiBadRequestResponse() + @ApiForbiddenResponse() + @ApiUnprocessableEntityResponse({ + description: 'Invalid "cv" or "grades" file type or size', + }) + @ApiConflictResponse({ + description: 'User already has a pending application', + }) + @ApiCreatedResponse() + @ApiConsumes('multipart/form-data') + @JoiValidate({ + body: createApplicationSchema, + }) + @UseInterceptors( + FileFieldsInterceptor( + [ + { name: 'cv', maxCount: 1 }, + { name: 'grades', maxCount: 1 }, + ], + { + limits: { + fileSize: ApplicationsController.MAX_UPLOAD_SIZE, + }, + fileFilter: ( + req: Request, + file: Express.Multer.File, + callback: (error: Error, acceptFile: boolean) => void, + ) => { + if (file.mimetype !== 'application/pdf') + return callback( + new HttpException( + `${file.fieldname} is not a valid .pdf document`, + HttpStatus.UNPROCESSABLE_ENTITY, + ), + false, + ); + return callback(null, true); + }, + }, + ), + ) + @Post('/') + async createApplication( + @UploadedFiles() files: ApplicationFiles, + @Body() application: CreateApplicationDto, + @Ability() ability: AppAbility, + @Req() req: AuthenticatedRequest, + ): Promise { + if (!files || !files.cv) { + throw new UnprocessableEntityException('CV file is required'); + } + + if ( + application.type !== ApplicationType.PHD && + files.grades === undefined + ) { + // grades are required for non-phd applications + throw new UnprocessableEntityException('Grades file is required'); + } + + if (!checkAbility(ability, Action.Create, application, 'Application')) + throw new ForbiddenException(); + + // Get the user unique identifier from the request + const applicantId = req.user.sub; + + // An applicant can have only one application with (state != finalized || state != refused_by_applicant) + const hasActiveApplication = + await this.applicationsService.findActiveApplicationByApplicantId( + applicantId, + ); + if (hasActiveApplication) + throw new ConflictException( + 'Applicant already has a pending application', + ); + + return await this.applicationsService.createApplication( + application, + files, + applicantId, + ); + } + + @Patch(':application_id') + @ApiBadRequestResponse() + @ApiForbiddenResponse() + @ApiOkResponse() + @JoiValidate({ + param: Joi.number().positive().integer().required().label('application_id'), + body: updateApplicationSchema, + }) + async updateApplication( + @Param('application_id') applicationId: number, + @Body() updateApplication: UpdateApplicationDto, + @Ability() ability: AppAbility, + @Req() req: AuthenticatedRequest, + ): Promise { + const application = await this.applicationsService.findByApplicationId( + applicationId, + ); + if (application === null) throw new NotFoundException(); + + const appToCheck = { + ...updateApplication, + applicantId: application.applicantId, + }; + if ( + !checkAbility(ability, Action.Update, appToCheck, 'Application', [ + 'applicantId', + ]) + ) + throw new ForbiddenException(); + + // Applicants can only update status to "refused_by_applicant" + if ( + req.user.role === Role.Applicant && + (updateApplication.state !== ApplicationState.RefusedByApplicant || + Object.keys(updateApplication).length !== 1) + ) + throw new BadRequestException( + "You can only update your application state to 'refused_by_applicant'", + ); + + const updatedApplication = await this.applicationsService.updateApplication( + { + ...application, + ...updateApplication, + lastModified: new Date(), + }, + ); + + return plainToClass(ApplicationResponseDto, updatedApplication); + } +} diff --git a/api/src/application/applications.module.ts b/api/src/application/applications.module.ts new file mode 100644 index 0000000..72cb944 --- /dev/null +++ b/api/src/application/applications.module.ts @@ -0,0 +1,14 @@ +import { Module } from '@nestjs/common'; +import { ApplicationsService } from './applications.service'; +import { ApplicationsController } from './applications.controller'; +import { TypeOrmModule } from '@nestjs/typeorm'; +import { Application } from './application.entity'; +import { UsersModule } from 'src/users/users.module'; + +@Module({ + imports: [TypeOrmModule.forFeature([Application]), UsersModule], + providers: [ApplicationsService], + controllers: [ApplicationsController], + exports: [ApplicationsService], +}) +export class ApplicationsModule {} diff --git a/api/src/application/applications.service.spec.ts b/api/src/application/applications.service.spec.ts new file mode 100644 index 0000000..db7ab56 --- /dev/null +++ b/api/src/application/applications.service.spec.ts @@ -0,0 +1,344 @@ +import { Test, TestingModule } from '@nestjs/testing'; +import { ApplicationsService } from './applications.service'; +import { getRepositoryToken } from '@nestjs/typeorm'; +import { Application } from './application.entity'; +import { ApplicationState, ApplicationType } from '@hkrecruitment/shared'; +import { UsersService } from '../users/users.service'; +import { mockedRepository } from '@mocks/repositories'; +import { mockedUsersService } from '@mocks/services'; +import { + applicant, + applicationFiles, + mockBscApplication, + mockMscApplication, + mockPhdApplication, + mockCreateBscApplicationDTO, + mockCreateMscApplicationDTO, + mockCreatePhdApplicationDTO, + fileId, + applicantId, + folderId, + today, + testDate, +} from '@mocks/data'; +import { flattenApplication } from './create-application.dto'; +import { InternalServerErrorException } from '@nestjs/common'; + +const mockedGDrive = { + getFolderByName: jest.fn(), + insertFile: jest.fn(), + deleteItem: jest.fn(), + constructor: jest.fn(), +}; + +// Mock GDriveStorage +jest.mock('../google/GDrive/GDriveStorage', () => { + return { + GDriveStorage: jest.fn().mockImplementation(() => { + return mockedGDrive; + }), + }; +}); + +describe('ApplicationsService', () => { + let applicationService: ApplicationsService; + let usersService: UsersService; + + /************* Test setup ************/ + + beforeAll(() => { + jest + .spyOn(global, 'Date') + .mockImplementation(() => testDate as unknown as string); + }); + + beforeEach(async () => { + const module: TestingModule = await Test.createTestingModule({ + providers: [ + ApplicationsService, + { + provide: getRepositoryToken(Application), + useValue: mockedRepository, + }, + { + provide: UsersService, + useValue: mockedUsersService, + }, + ], + }).compile(); + + applicationService = module.get(ApplicationsService); + usersService = module.get(UsersService); + }); + + afterEach(() => jest.clearAllMocks()); + + /*************** Tests ***************/ + + it('should be defined', () => { + expect(applicationService).toBeDefined(); + }); + + describe('findAll', () => { + it('should return an array of applications', async () => { + const applications: Application[] = [mockBscApplication]; + jest.spyOn(mockedRepository, 'find').mockResolvedValue(applications); + const result = await applicationService.findAll(); + + expect(result).toEqual(applications); + expect(mockedRepository.find).toHaveBeenCalledTimes(1); + expect(mockedRepository.find).toHaveBeenCalledWith(); + }); + }); + + describe('findByApplicationId', () => { + it('should return an application by id', async () => { + const applicationId = 1; + jest + .spyOn(mockedRepository, 'findBy') + .mockResolvedValue([mockBscApplication]); + const result = await applicationService.findByApplicationId( + applicationId, + ); + + expect(result).toEqual(mockBscApplication); + expect(mockedRepository.findBy).toHaveBeenCalledTimes(1); + expect(mockedRepository.findBy).toHaveBeenCalledWith({ + id: applicationId, + }); + }); + + it('should return null when no application is found', async () => { + const applicationId = 2; + jest.spyOn(mockedRepository, 'findBy').mockResolvedValue([]); + const result = await applicationService.findByApplicationId( + applicationId, + ); + + expect(result).toBeNull(); + expect(mockedRepository.findBy).toHaveBeenCalledTimes(1); + expect(mockedRepository.findBy).toHaveBeenCalledWith({ + id: applicationId, + }); + }); + }); + + describe('findByApplicantId', () => { + it('should return an array of applications for the specified applicant', async () => { + const applicantId = 'abc123'; + const applications: Application[] = [mockBscApplication]; + jest.spyOn(mockedRepository, 'findBy').mockResolvedValue(applications); + const result = await applicationService.findByApplicantId(applicantId); + + expect(result).toEqual(applications); + expect(mockedRepository.findBy).toHaveBeenCalledTimes(1); + expect(mockedRepository.findBy).toHaveBeenCalledWith({ applicantId }); + }); + }); + + describe('findActiveApplicationByApplicantId', () => { + it('should return true when an active application exists for the specified applicant', async () => { + const applicantId = 'abc123'; + const activeApplication: Application = { + ...mockBscApplication, + state: ApplicationState.New, + }; + jest + .spyOn(mockedRepository, 'findBy') + .mockResolvedValue([activeApplication]); + const result = + await applicationService.findActiveApplicationByApplicantId( + applicantId, + ); + + expect(result).toBe(true); + expect(mockedRepository.findBy).toHaveBeenCalledTimes(1); + }); + + it('should return false when no application exists for the specified applicant', async () => { + const applicantId = 'abc123'; + jest.spyOn(mockedRepository, 'findBy').mockResolvedValue([]); + const result = + await applicationService.findActiveApplicationByApplicantId( + applicantId, + ); + + expect(result).toBe(false); + expect(mockedRepository.findBy).toHaveBeenCalledTimes(1); + }); + }); + + describe('listApplications', () => { + it('should return an array of applications based on the provided conditions', async () => { + const conditions = { + submission: { $gte: new Date(2023, 0, 1), $lte: new Date(2023, 0, 31) }, + state: ApplicationState.New, + }; + const applications: Application[] = [mockBscApplication]; + jest.spyOn(mockedRepository, 'findBy').mockResolvedValue(applications); + const result = await applicationService.listApplications( + conditions.submission.$gte.toISOString(), + conditions.submission.$lte.toISOString(), + conditions.state, + ); + + expect(result).toEqual(applications); + expect(mockedRepository.findBy).toHaveBeenCalledTimes(1); + }); + }); + + describe('delete', () => { + it('should remove the specified application from the database', async () => { + jest + .spyOn(mockedRepository, 'remove') + .mockResolvedValue(mockBscApplication); + + const result = await applicationService.delete(mockBscApplication); + expect(result).toEqual(mockBscApplication); + expect(mockedRepository.remove).toHaveBeenCalledTimes(1); + }); + }); + + function createTestApplication(applicationType: ApplicationType) { + it(`should create and return a new ${applicationType} application`, async () => { + const applicantId = 'abc123'; + const folderId = 'folder_abc123'; + const fileId = 'file_abc123'; + const today = '1/1/2023, 24:00:00'; + let mockApplication, mockCreateApplicationDTO; + switch (applicationType) { + case ApplicationType.BSC: + mockApplication = mockBscApplication; + mockCreateApplicationDTO = mockCreateBscApplicationDTO; + break; + case ApplicationType.MSC: + mockApplication = mockMscApplication; + mockCreateApplicationDTO = mockCreateMscApplicationDTO; + break; + case ApplicationType.PHD: + mockApplication = mockPhdApplication; + mockCreateApplicationDTO = mockCreatePhdApplicationDTO; + break; + } + const expectedCvFileName = `CV_${applicationType}_${applicant.firstName}_${applicant.lastName}_${today}`; + const expectedGradesFileName = `Grades_${applicationType}_${applicant.firstName}_${applicant.lastName}_${today}`; + const newApplication: Application = { + ...mockApplication, + applicantId, + state: ApplicationState.New, + lastModified: testDate, + submission: testDate, + }; + let expectedFileInertions; + const mockApplicationFiles = { ...applicationFiles }; + if (applicationType === ApplicationType.PHD) { + delete mockApplicationFiles.grades; // Grades are not required for PhD applications + expectedFileInertions = 1; + } else { + expectedFileInertions = 2; + } + + jest.spyOn(mockedRepository, 'save').mockResolvedValue(newApplication); + jest.spyOn(usersService, 'findByOauthId').mockResolvedValue(applicant); + jest.spyOn(mockedGDrive, 'getFolderByName').mockResolvedValue(folderId); + jest.spyOn(mockedGDrive, 'insertFile').mockResolvedValue(fileId); + + const result = await applicationService.createApplication( + mockCreateApplicationDTO, + mockApplicationFiles, + applicantId, + ); + const flattenedMockCreateApplicationDTOO = flattenApplication( + mockCreateApplicationDTO, + ); + + expect(result).toEqual(newApplication); + expect(mockedRepository.save).toHaveBeenCalledTimes(1); + expect(mockedRepository.save).toHaveBeenCalledWith( + flattenedMockCreateApplicationDTOO, + ); + expect(usersService.findByOauthId).toHaveBeenCalledTimes(1); + expect(usersService.findByOauthId).toHaveBeenCalledWith(applicantId); + expect(mockedGDrive.getFolderByName).toHaveBeenCalledTimes(1); + expect(mockedGDrive.getFolderByName).toHaveBeenCalledWith( + ApplicationsService.APPLICATIONS_FOLDER, + ); + expect(mockedGDrive.insertFile).toHaveBeenCalledTimes( + expectedFileInertions, + ); + expect(mockedGDrive.insertFile).toHaveBeenCalledWith( + expectedCvFileName, + applicationFiles.cv[0].buffer, + folderId, + ); + if (applicationType !== ApplicationType.PHD) + expect(mockedGDrive.insertFile).toHaveBeenCalledWith( + expectedGradesFileName, + applicationFiles.grades[0].buffer, + folderId, + ); + }); + } + + describe('createApplication', () => { + createTestApplication(ApplicationType.BSC); + createTestApplication(ApplicationType.MSC); + createTestApplication(ApplicationType.PHD); + + it('deletes uploaded google drive documents if an exception is thrown', async () => { + const expectedCvFileName = `CV_bsc_${applicant.firstName}_${applicant.lastName}_${today}`; + const expectedGradesFileName = `Grades_bsc_${applicant.firstName}_${applicant.lastName}_${today}`; + const testError = 'Test error'; + + jest.spyOn(mockedRepository, 'save').mockRejectedValue(testError); + jest.spyOn(usersService, 'findByOauthId').mockResolvedValue(applicant); + jest.spyOn(mockedGDrive, 'getFolderByName').mockResolvedValue(folderId); + jest.spyOn(mockedGDrive, 'insertFile').mockResolvedValue(fileId); + jest.spyOn(mockedGDrive, 'deleteItem').mockResolvedValue({}); + + await expect( + applicationService.createApplication( + mockCreateBscApplicationDTO, + applicationFiles, + applicantId, + ), + ).rejects.toThrow(InternalServerErrorException); + expect(usersService.findByOauthId).toHaveBeenCalledTimes(1); + expect(usersService.findByOauthId).toHaveBeenCalledWith(applicantId); + + expect(mockedGDrive.insertFile).toHaveBeenCalledTimes(2); + expect(mockedGDrive.insertFile).toHaveBeenCalledWith( + expectedCvFileName, + applicationFiles.cv[0].buffer, + folderId, + ); + expect(mockedGDrive.insertFile).toHaveBeenCalledWith( + expectedGradesFileName, + applicationFiles.grades[0].buffer, + folderId, + ); + expect(mockedGDrive.deleteItem).toHaveBeenCalledTimes(2); + expect(mockedGDrive.deleteItem).toHaveBeenCalledWith(fileId); + expect(mockedGDrive.deleteItem).toHaveBeenCalledWith(fileId); + }); + }); + + describe('updateApplication', () => { + it('should update and return an existing application', async () => { + const updatedApplication: Application = { + ...mockBscApplication, + state: ApplicationState.Accepted, + }; + jest + .spyOn(mockedRepository, 'save') + .mockResolvedValue(updatedApplication); + const result = await applicationService.updateApplication( + mockBscApplication, + ); + + expect(result).toEqual(updatedApplication); + expect(mockedRepository.save).toHaveBeenCalledTimes(1); + expect(mockedRepository.save).toHaveBeenCalledWith(mockBscApplication); + }); + }); +}); diff --git a/api/src/application/applications.service.ts b/api/src/application/applications.service.ts new file mode 100644 index 0000000..079b473 --- /dev/null +++ b/api/src/application/applications.service.ts @@ -0,0 +1,146 @@ +import { + Injectable, + InternalServerErrorException, + NotFoundException, +} from '@nestjs/common'; +import { InjectRepository } from '@nestjs/typeorm'; +import { Repository, Between, Not, In } from 'typeorm'; +import { Application } from './application.entity'; +import { GDriveStorage } from '../google/GDrive/GDriveStorage'; +import { + CreateApplicationDto, + flattenApplication, +} from './create-application.dto'; +import { ApplicationState, ApplicationType } from '@hkrecruitment/shared'; +import { UsersService } from '../users/users.service'; +import { ApplicationFiles } from './application-types'; + +@Injectable() +export class ApplicationsService { + constructor( + @InjectRepository(Application) + private readonly applicationRepository: Repository, + private readonly usersService: UsersService, + ) {} + + static APPLICATIONS_FOLDER = 'applications'; // Google Drive folder name + + async findAll(): Promise { + return this.applicationRepository.find(); + } + + async findByApplicationId( + applicationId: number, + ): Promise { + const matches = await this.applicationRepository.findBy({ + id: applicationId, + }); + return matches.length > 0 ? matches[0] : null; + } + + async findByApplicantId(applicantId: string): Promise { + return await this.applicationRepository.findBy({ applicantId }); + } + + async findActiveApplicationByApplicantId( + applicantId: string, + ): Promise { + const match = await this.applicationRepository.findBy({ + applicantId, + // Search only for applications that are still pending + state: Not(In(['finalized', 'rejected', 'refused_by_applicant'])), + }); + return match.length > 0; + } + + async listApplications( + submittedFrom: string, + submittedUntil: string, + state: string, + ): Promise { + const conditions = {}; + + // Add time range condition if both dates are specified + if (submittedFrom && submittedUntil) + conditions['submission'] = Between(submittedFrom, submittedUntil); + + // Add state condition when "state" is specified + if (state) conditions['state'] = state; + // Retrieve applications + return await this.applicationRepository.findBy(conditions); + } + + async delete(application: Application): Promise { + return this.applicationRepository.remove(application); + } + + async createApplication( + application: CreateApplicationDto, + files: ApplicationFiles, + applicantId: string, + ): Promise { + // Get applicant full name + const applicant = await this.usersService.findByOauthId(applicantId); + if (!applicant) throw new NotFoundException('Applicant not found'); + const applicantFullName = `${applicant.firstName}_${applicant.lastName}`; + const today = new Date(); + + // TODO: Create an Interview and set application.interview_id + application.submission = today; + application.state = ApplicationState.New; + application.applicantId = applicantId; + + const storage = new GDriveStorage(); + let gradesFileId = null; + let cvFileId = null; + + // Save files to Google Drive + try { + const applicationsFolder = await storage.getFolderByName( + ApplicationsService.APPLICATIONS_FOLDER, + ); + const formattedDatetime = today.toLocaleString('en-US', { + hour12: false, + }); + const fileName = `${application.type}_${applicantFullName}_${formattedDatetime}`; + // TODO: Create a folder for each applicant? Give it a unique name + + // Save CV + cvFileId = await storage.insertFile( + `CV_${fileName}`, + files.cv[0].buffer, + applicationsFolder, + ); + application.cv = cvFileId; + + // Save grades + if (files.grades) { + const applicationType = + application.type === ApplicationType.BSC + ? 'bscApplication' + : 'mscApplication'; + gradesFileId = await storage.insertFile( + `Grades_${fileName}`, + files.grades[0].buffer, + applicationsFolder, + ); + application[applicationType].grades = gradesFileId; + } + return await this.applicationRepository.save( + flattenApplication(application), + ); + } catch (err) { + console.log('Error caught: ', err); + // Delete files from Google Drive + if (cvFileId) await storage.deleteItem(cvFileId); + if (gradesFileId) await storage.deleteItem(gradesFileId); + throw new InternalServerErrorException(); + } + } + + async updateApplication(application: Application): Promise { + return await this.applicationRepository.save( + flattenApplication(application), + ); + } +} diff --git a/api/src/application/create-application.dto.ts b/api/src/application/create-application.dto.ts new file mode 100644 index 0000000..ab6a210 --- /dev/null +++ b/api/src/application/create-application.dto.ts @@ -0,0 +1,118 @@ +import { + Application, + ApplicationState, + ApplicationType, + LangLevel, +} from '@hkrecruitment/shared'; +import { ApiProperty } from '@nestjs/swagger'; +import { + BscApplication, + MscApplication, + PhdApplication, +} from './application.entity'; + +class CreateBscApplicationDto extends BscApplication { + @ApiProperty() + bscStudyPath: string; + + @ApiProperty() + bscAcademicYear: number; + + @ApiProperty() + bscGradesAvg: number; + + @ApiProperty() + cfu: number; + + @ApiProperty({ type: 'string', format: 'binary' }) + grades: any; +} + +class CreateMscApplicationDto extends MscApplication { + @ApiProperty({ required: false }) + bscStudyPath: string; + + @ApiProperty({ required: false }) + bscGradesAvg: number; + + @ApiProperty() + mscAcademicYear: number; + + @ApiProperty() + mscStudyPath: string; + + @ApiProperty() + mscGradesAvg: number; + + @ApiProperty() + cfu: number; + + @ApiProperty({ type: 'string', format: 'binary' }) + grades: any; +} + +class CreatePhdApplicationDto extends PhdApplication { + @ApiProperty() + mscStudyPath: string; + + @ApiProperty() + phdDescription: string; +} + +export class CreateApplicationDto implements Partial { + @ApiProperty({ enum: ApplicationType }) + type: ApplicationType; + + @ApiProperty({ required: false }) + notes?: string; + + @ApiProperty({ type: 'string', format: 'binary' }) + cv: any; + + @ApiProperty({ enum: LangLevel }) + itaLevel: LangLevel; + + // @ApiProperty() + // availability: TimeSlot; + + @ApiProperty({ required: false }) + bscApplication?: CreateBscApplicationDto; + + @ApiProperty({ required: false }) + mscApplication?: CreateMscApplicationDto; + + @ApiProperty({ required: false }) + phdApplication?: CreatePhdApplicationDto; + + /* Internal fields */ + + state?: ApplicationState; + + submission?: Date; + + lastModified?: Date; + + applicantId?: string; +} + +export function flattenApplication( + application: CreateApplicationDto, +): Application { + const newApplication: Application = { + ...application, + ...application.bscApplication, + ...application.mscApplication, + ...application.phdApplication, + }; + delete newApplication.bscApplication; + delete newApplication.mscApplication; + delete newApplication.phdApplication; + switch (newApplication.type) { + case ApplicationType.BSC: + return newApplication as BscApplication; + case ApplicationType.MSC: + return newApplication as MscApplication; + case ApplicationType.PHD: + return newApplication as PhdApplication; + } +} diff --git a/api/src/application/update-application.dto.ts b/api/src/application/update-application.dto.ts new file mode 100644 index 0000000..a2d459f --- /dev/null +++ b/api/src/application/update-application.dto.ts @@ -0,0 +1,21 @@ +import { Application, ApplicationState } from '@hkrecruitment/shared'; +import { ApiProperty } from '@nestjs/swagger'; + +class CustomMessage { + subject: string; + body: string; +} + +export class UpdateApplicationDto implements Partial { + @ApiProperty({ required: false }) + state?: ApplicationState; + + @ApiProperty({ required: false }) + notes?: string; + + // @ApiProperty({ required: false }) + // customMessage?: CustomMessage; + + // @ApiProperty({ required: false }) + // additionalText?: string; +} diff --git a/api/src/authorization/authorization.guard.ts b/api/src/authorization/authorization.guard.ts index f5a6dc4..0eecac5 100644 --- a/api/src/authorization/authorization.guard.ts +++ b/api/src/authorization/authorization.guard.ts @@ -1,4 +1,4 @@ -import { abilityForUser, getRoleChangeChecker } from '@hkrecruitment/shared'; +import { getRoleChangeChecker } from '@hkrecruitment/shared'; import { CanActivate, ExecutionContext, diff --git a/api/src/google/GAuth/GAuth.ts b/api/src/google/GAuth/GAuth.ts new file mode 100644 index 0000000..3d6ddf7 --- /dev/null +++ b/api/src/google/GAuth/GAuth.ts @@ -0,0 +1,136 @@ +/* + * Copyright (c) 2021 Riccardo Zaccone + * + * This file is part of api. + * api is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * api is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + + * You should have received a copy of the GNU General Public License + * along with api. If not, see . + */ + +/* + * File: GAuth.ts + * Project: api + * Authors: + * Riccardo Zaccone + * + * Created on 23 aprile 2021, 10:35 + */ + +import { OAuth2Client } from 'google-auth-library'; + +const fsp = require('fs').promises; +const readline = require('readline'); +const { google } = require('googleapis'); + +// If modifying these scopes, delete token.json. +const SCOPES = [ + 'https://www.googleapis.com/auth/gmail.send', + 'https://www.googleapis.com/auth/calendar', + 'https://www.googleapis.com/auth/drive', +]; + +// The file token.json stores the user's access and refresh tokens, and is +// created automatically when the authorization flow completes for the first +// time. +const TOKEN_PATH = './src/google/GAuth/authInfo/token.json'; +const CREDENTIALS_PATH = './src/google/GAuth/authInfo/credentials.json'; + +let googleClient: OAuth2Client | null = null; + +export function getAuth(): Promise { + return new Promise((resolve, reject) => { + if (googleClient == null) { + authorize() + .then((client: OAuth2Client) => { + googleClient = client; + resolve(googleClient); + }) + .catch((err) => reject(err)); + } else resolve(googleClient); + }); +} + +function authorize(): Promise { + //read credentials + return new Promise((resolve, reject) => { + fsp + .readFile(CREDENTIALS_PATH) + .then(async (content: string) => { + const credentials = JSON.parse(content); + const { client_secret, client_id, redirect_uris } = + credentials.installed; + + //get an OAuth2Client + const oAuth2Client = new google.auth.OAuth2( + client_id, + client_secret, + redirect_uris[0], + ); + // Check if we have previously stored a token. + fsp + .readFile(TOKEN_PATH) + .then((token: string) => { + oAuth2Client.setCredentials(JSON.parse(token)); + resolve(oAuth2Client); + }) + .catch(() => { + // error on reading the token + getNewToken(oAuth2Client) + .then((client: OAuth2Client) => { + resolve(client); + }) + .catch((err) => { + reject(err); + }); + }); + }) + .catch((err: any) => { + reject(err); + }); //error on reading credentials + }); +} + +/** + * Get and store new token after prompting for user authorization, + * configuring the OAuth2Client received as parameter + * @param {google.auth.OAuth2} oAuth2Client The OAuth2 client to get token for. + * @return {Promise} Promise object containing the configured oAuth2Client + */ +async function getNewToken(oAuth2Client: OAuth2Client): Promise { + const authUrl = oAuth2Client.generateAuthUrl({ + access_type: 'offline', + scope: SCOPES, + }); + console.log('Authorize this app by visiting this url:', authUrl); + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + }); + rl.question('Enter the code from that page here: '); + const it = rl[Symbol.asyncIterator](); + const line = await it.next(); + const code = line.value; + rl.close(); + return new Promise((resolve, reject) => { + oAuth2Client.getToken(code, (err, token) => { + if (err || !token) reject('Error retrieving access token: ' + err); + else { + oAuth2Client.setCredentials(token); + // Store the token to disk for later program executions + fsp + .writeFile(TOKEN_PATH, JSON.stringify(token)) + .then(() => resolve(oAuth2Client)) + .catch((err: any) => reject(err)); + } + }); + }); +} diff --git a/api/src/google/GDrive/FileStorageInterface.ts b/api/src/google/GDrive/FileStorageInterface.ts new file mode 100644 index 0000000..e7327a3 --- /dev/null +++ b/api/src/google/GDrive/FileStorageInterface.ts @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2021 Riccardo Zaccone + * + * This file is part of api. + * api is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * api is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + + * You should have received a copy of the GNU General Public License + * along with api. If not, see . + */ + +/* + * File: FileStorageInterface.ts + * Project: api + * Authors: + * Riccardo Zaccone + * + * Created on 19 aprile 2021, 10:58 + */ + +/** + * Represent a file storage service + * @interface + */ +export interface FileStorageInterface { + /** + * Creates a folder + * @param name the name to assign to the folder to be created + * @param parent_folder? the id of the parent folder + * @return {Promise} the id of the created folder + */ + createFolder(name: string, parent_folder?: string): Promise; + + /** + * Uploads a file + * @param name the name to assign to the file to be created + * @param rawData the data to upload + * @param parent_folder? the id of the parent folder + * @return {Promise} the id of the created file + */ + insertFile( + name: string, + rawData: Uint8Array, + parent_folder?: string, + ): Promise; + + /** + * Creates a file copying a file from the same service + * @param file_id the id of the file to be copied + * @param name the name to assign to the file to be created + * @param parent_folder? the id of the parent folder + * @return {Promise} the id of the created file + */ + copyFileFromStorage( + file_id: string, + name: string, + parent_folder?: string, + ): Promise; + + /** + * Deletes a file or directory + * @param item_id the id of the file to be deleted + * @return {Promise} the HTTP status code of the response + */ + deleteItem(item_id: string): Promise; + + /** + * Retrieves the id of a folder given its name + * @param name the name of the target folder + * @param parent_folder? the id of the folder to search into + * @return {Promise} the id of the folder + */ + getFolderByName(name: string, parent_folder?: string): Promise; + + /** + * Extracts the id of item (file or folder) from a string + * @param src a string containing the id of the resource + * @return {string} the id of the resource + */ + extractIdFrom(src: string): string; +} diff --git a/api/src/google/GDrive/GDriveStorage.spec.ts b/api/src/google/GDrive/GDriveStorage.spec.ts new file mode 100644 index 0000000..34c132e --- /dev/null +++ b/api/src/google/GDrive/GDriveStorage.spec.ts @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2021 Riccardo Zaccone + * + * This file is part of api. + * api is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * api is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + + * You should have received a copy of the GNU General Public License + * along with api. If not, see . + */ + +/* + * File: GDriveStorage.test.ts + * Project: api + * Authors: + * Riccardo Zaccone + * + * Created on 04 maggio 2021, 15:09 + */ + +import { GDriveStorage } from './GDriveStorage'; + +describe.skip('GDriveStorage Test', () => { + const storage = new GDriveStorage(); + const folderName = 'Test Folder (you can safely remove)'; + let folder_id: string; + let file_id: string; + const sharedFileUrl = + 'https://drive.google.com/file/d/12ZGHhDjxYMj-10gPnmAHFGQpZejMZOfC/view?usp=share_link'; + const sharedFileId = '12ZGHhDjxYMj-10gPnmAHFGQpZejMZOfC'; + let copied_file_id: string; + + describe('Resource creation', () => { + it('createFolder', () => { + return storage.createFolder(folderName).then((id) => { + folder_id = id; + expect(id).toBeDefined(); + }); + }); + + it('insertFile', () => { + return storage + .insertFile('Test File (you can safely remove)', new Uint8Array([42])) + .then((id) => { + file_id = id; + expect(id).toBeDefined(); + }); + }); + + it('copyFileFromStorage', () => { + return storage + .copyFileFromStorage( + sharedFileId, + 'Test Shared File (you can safely remove)', + ) + .then((id) => { + copied_file_id = id; + expect(id).toBeDefined(); + }); + }); + + it('getFolderByName', () => { + return expect(storage.getFolderByName(folderName)).resolves.toBe( + folder_id, + ); + }); + + it('fromUrlToId', () => { + expect(storage.extractIdFrom(sharedFileUrl)).toBe(sharedFileId); + }); + + describe('Resource creation inside folder', () => { + let file_id2: string; + let folder_id2: string; + + it('createFolder', () => { + return storage.createFolder(folderName, folder_id).then((id) => { + folder_id2 = id; + expect(id).toBeDefined(); + }); + }); + + it('insertFile', () => { + return storage + .insertFile( + 'Test File (you can safely remove)', + new Uint8Array([42]), + folder_id, + ) + .then((id) => { + file_id2 = id; + expect(id).toBeDefined(); + }); + }); + + it('Multiple folders with same name inside same folder forbidden', () => { + return expect(storage.createFolder(folderName)).rejects.toMatch( + 'Multiple folders with same name inside the same folder', + ); + }); + + describe('Resource deletion', () => { + it('deleteFile', () => { + //clean all the things done + expect(storage.deleteItem(file_id)).resolves.toBe(204); + expect(storage.deleteItem(file_id2)).resolves.toBe(204); + expect(storage.deleteItem(copied_file_id)).resolves.toBe(204); + expect(storage.deleteItem(folder_id2)).resolves.toBe(204); + return expect(storage.deleteItem(folder_id)).resolves.toBe(204); + }); + }); + }); + }); +}); diff --git a/api/src/google/GDrive/GDriveStorage.ts b/api/src/google/GDrive/GDriveStorage.ts new file mode 100644 index 0000000..fc95e28 --- /dev/null +++ b/api/src/google/GDrive/GDriveStorage.ts @@ -0,0 +1,192 @@ +/* + * Copyright (c) 2021 Riccardo Zaccone + * + * This file is part of api. + * api is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * api is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + + * You should have received a copy of the GNU General Public License + * along with api. If not, see . + */ + +/* + * File: GDriveStorage.ts + * Project: api + * Authors: + * Riccardo Zaccone + * + * Created on 19 aprile 2021, 11:21 + */ + +import { FileStorageInterface } from './FileStorageInterface'; +import { getAuth } from '../GAuth/GAuth'; +import { google } from 'googleapis'; +import { OAuth2Client } from 'google-auth-library'; +import { Readable } from 'stream'; + +/** + * @class GDriveStorage implements a storage service using Google Drive + * @inheritDoc + * @implements FileStorageInterface + */ +export class GDriveStorage implements FileStorageInterface { + copyFileFromStorage( + file_id: string, + name: string, + parent_folder?: string, + ): Promise { + return new Promise((resolve, reject) => { + getAuth() + .then((auth: OAuth2Client) => { + const drive = google.drive({ version: 'v3', auth }); + // @ts-ignore + drive.files + .copy({ + fileId: file_id, + resource: { + parents: parent_folder ? [parent_folder] : undefined, + name: name, + }, + supportsAllDrives: true, + fields: 'id', + }) + .then( + ( + file, // @ts-ignore + ) => resolve(file.data.id), + ) + .catch((err) => reject(err)); + }) + .catch((err) => reject(err)); + }); + } + + createFolder(name: string, parent_folder?: string): Promise { + return new Promise((resolve, reject) => { + getAuth() + .then((auth: OAuth2Client) => { + const fileMetadata = { + name: name, + mimeType: 'application/vnd.google-apps.folder', + parents: parent_folder ? [parent_folder] : undefined, + }; + this.getFolderByName(name, parent_folder) + .then((folder_id) => { + if (folder_id) resolve(folder_id); + else { + const drive = google.drive({ version: 'v3', auth }); + drive.files + .create({ + // @ts-ignore + resource: fileMetadata, + fields: 'id', + }) + .then( + ( + file, // @ts-ignore + ) => resolve(file.data.id), + ) + .catch((err) => reject(err)); + } + }) + .catch((err) => reject(err)); + }) + .catch((err) => reject(err)); + }); + } + + deleteItem(item_id: string): Promise { + return new Promise((resolve, reject) => { + getAuth() + .then((auth: OAuth2Client) => { + const drive = google.drive({ version: 'v3', auth }); + drive.files + .delete({ + fileId: item_id, // @ts-ignore + }) + .then((res) => resolve(res.status)) + .catch((err) => reject(err)); + }) + .catch((err) => reject(err)); + }); + } + + insertFile( + name: string, + rawData: Uint8Array, + parent_folder?: string, + ): Promise { + return new Promise((resolve, reject) => { + getAuth() + .then((auth: OAuth2Client) => { + const drive = google.drive({ version: 'v3', auth }); + const fileMetadata = { + name: name, + parents: parent_folder ? [parent_folder] : undefined, + }; + const stream = new Readable(); + stream.push(rawData); + stream.push(null); + const media = { + mimeType: 'application/pdf', + body: stream, + }; + drive.files + .create({ + // @ts-ignore + resource: fileMetadata, + media: media, + fields: 'id', + }) + .then( + ( + file, // @ts-ignore + ) => resolve(file.data.id), + ) + .catch((err) => reject(err)); + }) + .catch((err) => reject(err)); + }); + } + + getFolderByName(name: string, parent_folder?: string): Promise { + return new Promise((resolve, reject) => { + getAuth().then((auth: OAuth2Client) => { + const drive = google.drive({ version: 'v3', auth }); + const inParent = parent_folder + ? ` and '${parent_folder}' in parents` + : ''; + drive.files + .list({ + q: + `mimeType ='application/vnd.google-apps.folder' and name='${name}'` + + inParent, + fields: 'files(id, name)', + }) + .then((res) => { + if (res.data.files.length === 1) resolve(res.data.files.pop().id); + else if (res.data.files.length === 0) + resolve(null); //directory not present + else + reject('Multiple folders with same name inside the same folder'); + }) + .catch((err) => reject(err)); + }); + }); + } + /** + * Extracts the id of item (file or folder) from the sharing url + * @param src the url of the resource + * @return {string} the id of the resource + */ + extractIdFrom(src: string): string { + return src.match(/[-\w]{25,}/)[0]; + } +} diff --git a/api/src/main.ts b/api/src/main.ts index 2e05af0..f5a4394 100644 --- a/api/src/main.ts +++ b/api/src/main.ts @@ -6,7 +6,9 @@ dotenv.config({ path: '.env' }); async function bootstrap() { const app = await NestFactory.create(AppModule); + app.enableCors(); + app.setGlobalPrefix('v1'); const config = new DocumentBuilder() .setTitle('HKRecruitment API') diff --git a/api/src/mocks/data.ts b/api/src/mocks/data.ts new file mode 100644 index 0000000..f31aa3e --- /dev/null +++ b/api/src/mocks/data.ts @@ -0,0 +1,158 @@ +import { CreateApplicationDto } from 'src/application/create-application.dto'; +import { + ApplicationType, + ApplicationState, + LangLevel, + Role, +} from '@hkrecruitment/shared'; +import { + BscApplication, + MscApplication, + PhdApplication, +} from 'src/application/application.entity'; +import { UpdateApplicationDto } from 'src/application/update-application.dto'; + +export const testDate = new Date(2023, 0, 1); +export const testDateTimeStart = new Date(2023, 0, 1, 10, 30, 0); +export const testDateTime10Minutes = new Date(2023, 0, 1, 10, 40, 0); +export const testDateTime3Hours = new Date(2023, 0, 1, 13, 30, 0); +export const testDateTimeEnd = new Date(2023, 0, 1, 11, 30, 0); + +export const mockTimeSlot = { + start: testDateTimeStart, + end: testDateTimeEnd, + id: 1, +}; + +export const baseFile = { + encoding: '7bit', + mimetype: 'application/pdf', + buffer: Buffer.from(''), + size: 0, + stream: undefined, + destination: undefined, + path: '', +}; + +const cvFile = { + ...baseFile, + fieldname: 'cv', + originalname: 'cv', + filename: 'cv', +}; + +const gradesFile = { + ...baseFile, + fieldname: 'grades', + originalname: 'grades', + filename: 'grades', +}; + +export const mockBscApplication = { + type: ApplicationType.BSC, + id: 1, + state: ApplicationState.New, + notes: 'Notes', + itaLevel: LangLevel.NativeSpeaker, + bscStudyPath: 'Computer Engineering', + bscAcademicYear: 1, + bscGradesAvg: 25.8, + cfu: 50, +} as BscApplication; + +export const mockMscApplication = { + type: ApplicationType.MSC, + id: 1, + state: ApplicationState.New, + notes: 'Notes', + itaLevel: LangLevel.B2, + mscStudyPath: 'Medical Engineering', + mscAcademicYear: 1, + mscGradesAvg: 25.8, +} as MscApplication; + +export const mockPhdApplication = { + type: ApplicationType.PHD, + id: 1, + state: ApplicationState.New, + notes: 'Notes', + itaLevel: LangLevel.C1, + phdDescription: 'PHD Description', +} as PhdApplication; + +export const mockCreateBscApplicationDTO = { + type: ApplicationType.BSC, + itaLevel: mockBscApplication.itaLevel, + cv: cvFile, + notes: 'Nothing to add', + bscApplication: { + bscStudyPath: mockBscApplication.bscStudyPath, + bscAcademicYear: mockBscApplication.bscAcademicYear, + bscGradesAvg: mockBscApplication.bscGradesAvg, + cfu: mockBscApplication.cfu, + grades: gradesFile, + }, +} as CreateApplicationDto; + +export const mockCreateMscApplicationDTO = { + type: ApplicationType.MSC, + itaLevel: mockMscApplication.itaLevel, + cv: cvFile, + mscApplication: { + mscStudyPath: mockMscApplication.mscStudyPath, + mscAcademicYear: mockMscApplication.mscAcademicYear, + mscGradesAvg: mockMscApplication.mscGradesAvg, + cfu: mockBscApplication.cfu, + bscStudyPath: mockBscApplication.bscStudyPath, + bscGradesAvg: mockBscApplication.bscGradesAvg, + grades: gradesFile, + }, +} as CreateApplicationDto; + +export const mockCreatePhdApplicationDTO = { + type: ApplicationType.PHD, + cv: cvFile, + itaLevel: mockPhdApplication.itaLevel, + phdApplication: { + mscStudyPath: mockMscApplication.mscStudyPath, + phdDescription: mockPhdApplication.phdDescription, + }, +} as CreateApplicationDto; + +export const updateApplicationDTO = { + notes: 'Notes', + state: ApplicationState.Accepted, +} as UpdateApplicationDto; + +export const applicant = { + firstName: 'John', + lastName: 'Doe', + oauthId: '123', + sex: 'male', + email: 'email@example.com', + role: Role.Applicant, +}; + +export const applicationFiles = { + cv: [ + { + ...baseFile, + fieldname: 'cv', + originalname: 'cv', + filename: 'cv', + }, + ], + grades: [ + { + ...baseFile, + fieldname: 'grades', + originalname: 'grades', + filename: 'grades', + }, + ], +}; + +export const applicantId = 'abc123'; +export const folderId = 'folder_abc123'; +export const fileId = 'file_abc123'; +export const today = '1/1/2023, 24:00:00'; diff --git a/api/src/mocks/repositories.ts b/api/src/mocks/repositories.ts new file mode 100644 index 0000000..e418727 --- /dev/null +++ b/api/src/mocks/repositories.ts @@ -0,0 +1,6 @@ +export const mockedRepository = { + find: jest.fn(), + findBy: jest.fn(), + remove: jest.fn(), + save: jest.fn(), +}; diff --git a/api/src/mocks/services.ts b/api/src/mocks/services.ts new file mode 100644 index 0000000..6d7d17c --- /dev/null +++ b/api/src/mocks/services.ts @@ -0,0 +1,3 @@ +export const mockedUsersService = { + findByOauthId: jest.fn(), +}; diff --git a/api/src/timer/timer.interceptor.ts b/api/src/timer/timer.interceptor.ts new file mode 100644 index 0000000..0b6752c --- /dev/null +++ b/api/src/timer/timer.interceptor.ts @@ -0,0 +1,26 @@ +import { + CallHandler, + ExecutionContext, + Injectable, + Logger, + NestInterceptor, +} from '@nestjs/common'; +import { catchError, Observable, tap } from 'rxjs'; + +@Injectable() +export class TimerInterceptor implements NestInterceptor { + private readonly logger = new Logger(TimerInterceptor.name); + + intercept(context: ExecutionContext, next: CallHandler): Observable { + const startTimer = Date.now(); + const logTimer = () => + this.logger.log(`Request took ${Date.now() - startTimer}ms`); + return next.handle().pipe( + tap(logTimer), + catchError((error: Error) => { + logTimer(); + throw error; + }), + ); + } +} diff --git a/api/src/timeslots/create-timeslot.dto.ts b/api/src/timeslots/create-timeslot.dto.ts new file mode 100644 index 0000000..2b9aaea --- /dev/null +++ b/api/src/timeslots/create-timeslot.dto.ts @@ -0,0 +1,10 @@ +import { TimeSlot } from '@hkrecruitment/shared'; +import { ApiProperty } from '@nestjs/swagger'; + +export class CreateTimeSlotDto implements TimeSlot { + @ApiProperty() + start: Date; + + @ApiProperty() + end: Date; +} diff --git a/api/src/timeslots/timeslot.entity.ts b/api/src/timeslots/timeslot.entity.ts new file mode 100644 index 0000000..959f85f --- /dev/null +++ b/api/src/timeslots/timeslot.entity.ts @@ -0,0 +1,14 @@ +import { Column, Entity, PrimaryGeneratedColumn } from 'typeorm'; +import { TimeSlot as TimeSlotInterface } from '@hkrecruitment/shared'; + +@Entity() +export class TimeSlot implements TimeSlotInterface { + @PrimaryGeneratedColumn('increment') + id: number; + + @Column() + start: Date; + + @Column() + end: Date; +} diff --git a/api/src/timeslots/timeslots.controller.spec.ts b/api/src/timeslots/timeslots.controller.spec.ts new file mode 100644 index 0000000..0c2d34b --- /dev/null +++ b/api/src/timeslots/timeslots.controller.spec.ts @@ -0,0 +1,104 @@ +import { TestBed } from '@automock/jest'; +import { + mockTimeSlot, + testDate, + testDateTimeEnd, + testDateTimeStart, +} from '@mocks/data'; +import { TimeSlotsController } from './timeslots.controller'; +import { TimeSlotsService } from './timeslots.service'; +import { testDateTime10Minutes } from '@mocks/data'; +import { testDateTime3Hours } from '@mocks/data'; + +describe('TimeSlotController', () => { + let controller: TimeSlotsController; + let service: TimeSlotsService; + + /************* Test setup ************/ + + beforeEach(async () => { + const { unit, unitRef } = TestBed.create(TimeSlotsController).compile(); + controller = unit; + service = unitRef.get(TimeSlotsService); + }); + + it('should be defined', () => { + expect(controller).toBeDefined(); + expect(service).toBeDefined(); + }); + + // Create a time slot + describe('createTimeSlot', () => { + it('should allow creating a valid time slot', async () => { + const timeSlot = { + start: mockTimeSlot.start, + end: mockTimeSlot.end, + }; + + jest.spyOn(service, 'countOverlappingTimeSlots').mockResolvedValue(0); + jest.spyOn(service, 'createTimeSlot').mockResolvedValue(mockTimeSlot); + + const result = await controller.createTimeSlot(timeSlot); + + expect(result).toEqual(mockTimeSlot); + }); + + it('should throw an error if the duration is less than 30 minutes', async () => { + const timeSlot = { + start: testDateTimeStart, + end: testDateTime10Minutes, + }; + + await expect(controller.createTimeSlot(timeSlot)).rejects.toThrow( + 'The duration of the time slot must be at least 30 minutes', + ); + }); + + it('should throw an error if the duration is more than 60 minutes', async () => { + const timeSlot = { + start: testDateTimeStart, + end: testDateTime3Hours, + }; + + await expect(controller.createTimeSlot(timeSlot)).rejects.toThrow( + 'The duration of the time slot must be at most 60 minutes', + ); + }); + + it('should throw an error if the time slot overlaps with an existing time slot', async () => { + const timeSlot = { + start: testDateTimeStart, + end: testDateTimeEnd, + }; + + jest.spyOn(service, 'countOverlappingTimeSlots').mockResolvedValue(1); + + await expect(controller.createTimeSlot(timeSlot)).rejects.toThrow( + 'The time slot overlaps with existing time slots', + ); + }); + }); + + describe('deleteTimeSlot', () => { + it('should allow deleting an existing time slot', async () => { + jest.spyOn(service, 'findById').mockResolvedValue(mockTimeSlot); + jest.spyOn(service, 'deleteTimeSlot').mockResolvedValue(mockTimeSlot); + + await expect(controller.deleteTimeSlot(mockTimeSlot.id)).resolves.toEqual( + mockTimeSlot, + ); + expect(service.deleteTimeSlot).toHaveBeenCalledWith(mockTimeSlot); + expect(service.deleteTimeSlot).toHaveBeenCalledTimes(1); + }); + + it('should throw an error if the time slot does not exist', async () => { + jest.spyOn(service, 'findById').mockResolvedValue(null); + jest.spyOn(service, 'deleteTimeSlot').mockResolvedValue(mockTimeSlot); + + await expect( + controller.deleteTimeSlot(mockTimeSlot.id), + ).rejects.toThrowError('Time slot not found'); + expect(service.deleteTimeSlot).toHaveBeenCalledTimes(0); + }); + }); +}); diff --git a/api/src/timeslots/timeslots.controller.ts b/api/src/timeslots/timeslots.controller.ts new file mode 100644 index 0000000..3202071 --- /dev/null +++ b/api/src/timeslots/timeslots.controller.ts @@ -0,0 +1,91 @@ +import { + Body, + Controller, + BadRequestException, + NotFoundException, + ConflictException, + Param, + Post, + Delete, +} from '@nestjs/common'; +import { TimeSlotsService } from './timeslots.service'; +import { Action, createTimeSlotSchema, TimeSlot } from '@hkrecruitment/shared'; +import { JoiValidate } from '../joi-validation/joi-validate.decorator'; +import { + ApiBadRequestResponse, + ApiBearerAuth, + ApiForbiddenResponse, + ApiNotFoundResponse, + ApiCreatedResponse, + ApiOkResponse, + ApiTags, + ApiConflictResponse, + ApiNoContentResponse, +} from '@nestjs/swagger'; +import { CheckPolicies } from 'src/authorization/check-policies.decorator'; +import { CreateTimeSlotDto } from './create-timeslot.dto'; +import * as Joi from 'joi'; + +@ApiBearerAuth() +@ApiTags('timeslots') +@Controller('timeslots') +export class TimeSlotsController { + constructor(private readonly timeSlotsService: TimeSlotsService) {} + + @ApiBadRequestResponse() + @ApiForbiddenResponse() + @ApiConflictResponse({ + description: 'The time slot overlaps with existing time slots', + }) + @ApiCreatedResponse() + @JoiValidate({ + body: createTimeSlotSchema, + }) + @CheckPolicies((ability) => ability.can(Action.Create, 'TimeSlot')) + @Post() + async createTimeSlot(@Body() timeSlot: CreateTimeSlotDto): Promise { + const startDate = new Date(timeSlot.start); + const endDate = new Date(timeSlot.end); + + // Check duration + const durationInMinutes = + (endDate.getTime() - startDate.getTime()) / (1000 * 60); + if (durationInMinutes < 30) { + throw new BadRequestException( + 'The duration of the time slot must be at least 30 minutes', + ); + } else if (durationInMinutes > 60) { + throw new BadRequestException( + 'The duration of the time slot must be at most 60 minutes', + ); + } + + // Check overlapping timeslots + const overlappingTimeSlots = + await this.timeSlotsService.countOverlappingTimeSlots(startDate, endDate); + if (overlappingTimeSlots > 0) + throw new ConflictException( + 'The time slot overlaps with existing time slots', + ); + + return await this.timeSlotsService.createTimeSlot(timeSlot); + } + + @ApiBadRequestResponse() + @ApiForbiddenResponse() + @ApiNotFoundResponse() + @ApiOkResponse() + @ApiNoContentResponse() + @CheckPolicies((ability) => ability.can(Action.Delete, 'TimeSlot')) + @Delete('/:time_slot_id') + @JoiValidate({ + param: Joi.number().positive().integer().required().label('time_slot_id'), + }) + async deleteTimeSlot( + @Param('time_slot_id') timeSlotId: number, + ): Promise { + const timeSlot = await this.timeSlotsService.findById(timeSlotId); + if (!timeSlot) throw new NotFoundException('Time slot not found'); + return await this.timeSlotsService.deleteTimeSlot(timeSlot); + } +} diff --git a/api/src/timeslots/timeslots.module.ts b/api/src/timeslots/timeslots.module.ts new file mode 100644 index 0000000..15299da --- /dev/null +++ b/api/src/timeslots/timeslots.module.ts @@ -0,0 +1,14 @@ +import { Module } from '@nestjs/common'; +import { TimeSlotsService } from './timeslots.service'; +import { TimeSlotsController } from './timeslots.controller'; +import { TypeOrmModule } from '@nestjs/typeorm'; +import { TimeSlot } from './timeslot.entity'; +import { UsersModule } from 'src/users/users.module'; + +@Module({ + imports: [TypeOrmModule.forFeature([TimeSlot]), UsersModule], + providers: [TimeSlotsService], + controllers: [TimeSlotsController], + exports: [TimeSlotsService], +}) +export class TimeSlotsModule {} diff --git a/api/src/timeslots/timeslots.service.spec.ts b/api/src/timeslots/timeslots.service.spec.ts new file mode 100644 index 0000000..fde00f1 --- /dev/null +++ b/api/src/timeslots/timeslots.service.spec.ts @@ -0,0 +1,76 @@ +import { mockTimeSlot, testDate } from '@mocks/data'; +import { mockedRepository } from '@mocks/repositories'; +import { TestingModule, Test } from '@nestjs/testing'; +import { getRepositoryToken } from '@nestjs/typeorm'; +import { TimeSlot } from './timeslot.entity'; +import { TimeSlotsService } from './timeslots.service'; + +describe('TimeSlotsService', () => { + let timeSlotService: TimeSlotsService; + + /************* Test setup ************/ + + beforeAll(() => { + jest + .spyOn(global, 'Date') + .mockImplementation(() => testDate as unknown as string); + }); + + beforeEach(async () => { + const module: TestingModule = await Test.createTestingModule({ + providers: [ + TimeSlotsService, + { + provide: getRepositoryToken(TimeSlot), + useValue: mockedRepository, + }, + ], + }).compile(); + + timeSlotService = module.get(TimeSlotsService); + }); + + afterEach(() => jest.clearAllMocks()); + + /*************** Tests ***************/ + + it('should be defined', () => { + expect(timeSlotService).toBeDefined(); + }); + + describe('deleteTimeSlot', () => { + it('should remove the specified timeslot from the database', async () => { + jest.spyOn(mockedRepository, 'remove').mockResolvedValue(mockTimeSlot); + const result = await timeSlotService.deleteTimeSlot(mockTimeSlot); + expect(result).toEqual(mockTimeSlot); + expect(mockedRepository.remove).toHaveBeenCalledTimes(1); + }); + }); + + describe('listTimeSlots', () => { + it('should return all timeslots', async () => { + jest.spyOn(mockedRepository, 'find').mockResolvedValue([mockTimeSlot]); + const result = await timeSlotService.listTimeSlots(); + expect(result).toEqual([mockTimeSlot]); + expect(mockedRepository.find).toHaveBeenCalledTimes(1); + }); + }); + + describe('findById', () => { + it('should return the timeslot with the specified id', async () => { + jest.spyOn(mockedRepository, 'findBy').mockResolvedValue([mockTimeSlot]); + const result = await timeSlotService.findById(mockTimeSlot.id); + expect(result).toEqual(mockTimeSlot); + expect(mockedRepository.findBy).toHaveBeenCalledTimes(1); + }); + }); + + describe('createTimeSlot', () => { + it('should create a new timeslot', async () => { + jest.spyOn(mockedRepository, 'save').mockResolvedValue(mockTimeSlot); + const result = await timeSlotService.createTimeSlot(mockTimeSlot); + expect(result).toEqual(mockTimeSlot); + expect(mockedRepository.save).toHaveBeenCalledTimes(1); + }); + }); +}); diff --git a/api/src/timeslots/timeslots.service.ts b/api/src/timeslots/timeslots.service.ts new file mode 100644 index 0000000..ec5e1b9 --- /dev/null +++ b/api/src/timeslots/timeslots.service.ts @@ -0,0 +1,54 @@ +import { Injectable } from '@nestjs/common'; +import { InjectRepository } from '@nestjs/typeorm'; +import { Repository, LessThan, MoreThan } from 'typeorm'; +import { TimeSlot } from './timeslot.entity'; +import { CreateTimeSlotDto } from './create-timeslot.dto'; + +@Injectable() +export class TimeSlotsService { + constructor( + @InjectRepository(TimeSlot) + private readonly timeSlotRepository: Repository, + ) {} + + async countOverlappingTimeSlots( + startDate: Date, + endDate: Date, + ): Promise { + const count = await this.timeSlotRepository.count({ + where: [ + { + // start < startDate && end > startDate + start: LessThan(startDate), + end: MoreThan(startDate), + }, + // OR + { + // start < endDate || end > endDate + start: LessThan(endDate), + end: MoreThan(endDate), + }, + ], + }); + return count; + } + + async listTimeSlots(): Promise { + return await this.timeSlotRepository.find(); + } + + async deleteTimeSlot(timeSlot: TimeSlot): Promise { + return await this.timeSlotRepository.remove(timeSlot); + } + + async findById(timeSlotId: number): Promise { + const matches = await this.timeSlotRepository.findBy({ + id: timeSlotId, + }); + return matches.length > 0 ? matches[0] : null; + } + + async createTimeSlot(timeSlot: CreateTimeSlotDto): Promise { + return await this.timeSlotRepository.save(timeSlot); + } +} diff --git a/api/tsconfig.json b/api/tsconfig.json index e1dda04..2f8cc3c 100644 --- a/api/tsconfig.json +++ b/api/tsconfig.json @@ -1,10 +1,10 @@ { - "compilerOptions": { - "module": "commonjs", + "compilerOptions": { + "module": "commonjs", "declaration": true, "removeComments": true, - "emitDecoratorMetadata": true, - "experimentalDecorators": true, + "emitDecoratorMetadata": true, + "experimentalDecorators": true, "allowSyntheticDefaultImports": true, "target": "es2017", "sourceMap": true, @@ -18,8 +18,15 @@ "forceConsistentCasingInFileNames": false, "noFallthroughCasesInSwitch": false, "paths": { - "@hkrecruitment/shared": ["../shared/src"], - "@hkrecruitment/shared/*": ["../shared/src/*"] + "@hkrecruitment/shared": [ + "../shared/src" + ], + "@hkrecruitment/shared/*": [ + "../shared/src/*" + ], + "@mocks/*": [ + "src/mocks/*" + ], } - } + } } \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..f0377c6 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,9 @@ +services: + db: + image: postgres:latest + environment: + - POSTGRES_USER=hkrecruitment + - POSTGRES_PASSWORD=password + - POSTGRES_DB=hkrecruitment + ports: + - '5432:5432' diff --git a/documentation/.gitignore b/documentation/.gitignore new file mode 100644 index 0000000..5c60607 --- /dev/null +++ b/documentation/.gitignore @@ -0,0 +1,8 @@ +*.toc +*.aux +*.log +*.out +*.gz +*.pdf +*.fls +*.fdb_latexmk \ No newline at end of file diff --git a/documentation/COPYING b/documentation/COPYING new file mode 100644 index 0000000..8602804 --- /dev/null +++ b/documentation/COPYING @@ -0,0 +1,422 @@ + + GNU Free Documentation License + Version 1.3, 3 November 2008 + + + Copyright (C) 2000, 2001, 2002, 2007, 2008 Free Software Foundation, Inc. + + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +0. PREAMBLE + +The purpose of this License is to make a manual, textbook, or other +functional and useful document "free" in the sense of freedom: to +assure everyone the effective freedom to copy and redistribute it, +with or without modifying it, either commercially or noncommercially. +Secondarily, this License preserves for the author and publisher a way +to get credit for their work, while not being considered responsible +for modifications made by others. + +This License is a kind of "copyleft", which means that derivative +works of the document must themselves be free in the same sense. It +complements the GNU General Public License, which is a copyleft +license designed for free software. + +We have designed this License in order to use it for manuals for free +software, because free software needs free documentation: a free +program should come with manuals providing the same freedoms that the +software does. But this License is not limited to software manuals; +it can be used for any textual work, regardless of subject matter or +whether it is published as a printed book. We recommend this License +principally for works whose purpose is instruction or reference. + + +1. APPLICABILITY AND DEFINITIONS + +This License applies to any manual or other work, in any medium, that +contains a notice placed by the copyright holder saying it can be +distributed under the terms of this License. Such a notice grants a +world-wide, royalty-free license, unlimited in duration, to use that +work under the conditions stated herein. The "Document", below, +refers to any such manual or work. Any member of the public is a +licensee, and is addressed as "you". You accept the license if you +copy, modify or distribute the work in a way requiring permission +under copyright law. + +A "Modified Version" of the Document means any work containing the +Document or a portion of it, either copied verbatim, or with +modifications and/or translated into another language. + +A "Secondary Section" is a named appendix or a front-matter section of +the Document that deals exclusively with the relationship of the +publishers or authors of the Document to the Document's overall +subject (or to related matters) and contains nothing that could fall +directly within that overall subject. (Thus, if the Document is in +part a textbook of mathematics, a Secondary Section may not explain +any mathematics.) The relationship could be a matter of historical +connection with the subject or with related matters, or of legal, +commercial, philosophical, ethical or political position regarding +them. + +The "Invariant Sections" are certain Secondary Sections whose titles +are designated, as being those of Invariant Sections, in the notice +that says that the Document is released under this License. If a +section does not fit the above definition of Secondary then it is not +allowed to be designated as Invariant. The Document may contain zero +Invariant Sections. If the Document does not identify any Invariant +Sections then there are none. + +The "Cover Texts" are certain short passages of text that are listed, +as Front-Cover Texts or Back-Cover Texts, in the notice that says that +the Document is released under this License. A Front-Cover Text may +be at most 5 words, and a Back-Cover Text may be at most 25 words. + +A "Transparent" copy of the Document means a machine-readable copy, +represented in a format whose specification is available to the +general public, that is suitable for revising the document +straightforwardly with generic text editors or (for images composed of +pixels) generic paint programs or (for drawings) some widely available +drawing editor, and that is suitable for input to text formatters or +for automatic translation to a variety of formats suitable for input +to text formatters. A copy made in an otherwise Transparent file +format whose markup, or absence of markup, has been arranged to thwart +or discourage subsequent modification by readers is not Transparent. +An image format is not Transparent if used for any substantial amount +of text. A copy that is not "Transparent" is called "Opaque". + +Examples of suitable formats for Transparent copies include plain +ASCII without markup, Texinfo input format, LaTeX input format, SGML +or XML using a publicly available DTD, and standard-conforming simple +HTML, PostScript or PDF designed for human modification. Examples of +transparent image formats include PNG, XCF and JPG. Opaque formats +include proprietary formats that can be read and edited only by +proprietary word processors, SGML or XML for which the DTD and/or +processing tools are not generally available, and the +machine-generated HTML, PostScript or PDF produced by some word +processors for output purposes only. + +The "Title Page" means, for a printed book, the title page itself, +plus such following pages as are needed to hold, legibly, the material +this License requires to appear in the title page. For works in +formats which do not have any title page as such, "Title Page" means +the text near the most prominent appearance of the work's title, +preceding the beginning of the body of the text. + +The "publisher" means any person or entity that distributes copies of +the Document to the public. + +A section "Entitled XYZ" means a named subunit of the Document whose +title either is precisely XYZ or contains XYZ in parentheses following +text that translates XYZ in another language. (Here XYZ stands for a +specific section name mentioned below, such as "Acknowledgements", +"Dedications", "Endorsements", or "History".) To "Preserve the Title" +of such a section when you modify the Document means that it remains a +section "Entitled XYZ" according to this definition. + +The Document may include Warranty Disclaimers next to the notice which +states that this License applies to the Document. These Warranty +Disclaimers are considered to be included by reference in this +License, but only as regards disclaiming warranties: any other +implication that these Warranty Disclaimers may have is void and has +no effect on the meaning of this License. + +2. VERBATIM COPYING + +You may copy and distribute the Document in any medium, either +commercially or noncommercially, provided that this License, the +copyright notices, and the license notice saying this License applies +to the Document are reproduced in all copies, and that you add no +other conditions whatsoever to those of this License. You may not use +technical measures to obstruct or control the reading or further +copying of the copies you make or distribute. However, you may accept +compensation in exchange for copies. If you distribute a large enough +number of copies you must also follow the conditions in section 3. + +You may also lend copies, under the same conditions stated above, and +you may publicly display copies. + + +3. COPYING IN QUANTITY + +If you publish printed copies (or copies in media that commonly have +printed covers) of the Document, numbering more than 100, and the +Document's license notice requires Cover Texts, you must enclose the +copies in covers that carry, clearly and legibly, all these Cover +Texts: Front-Cover Texts on the front cover, and Back-Cover Texts on +the back cover. Both covers must also clearly and legibly identify +you as the publisher of these copies. The front cover must present +the full title with all words of the title equally prominent and +visible. You may add other material on the covers in addition. +Copying with changes limited to the covers, as long as they preserve +the title of the Document and satisfy these conditions, can be treated +as verbatim copying in other respects. + +If the required texts for either cover are too voluminous to fit +legibly, you should put the first ones listed (as many as fit +reasonably) on the actual cover, and continue the rest onto adjacent +pages. + +If you publish or distribute Opaque copies of the Document numbering +more than 100, you must either include a machine-readable Transparent +copy along with each Opaque copy, or state in or with each Opaque copy +a computer-network location from which the general network-using +public has access to download using public-standard network protocols +a complete Transparent copy of the Document, free of added material. +If you use the latter option, you must take reasonably prudent steps, +when you begin distribution of Opaque copies in quantity, to ensure +that this Transparent copy will remain thus accessible at the stated +location until at least one year after the last time you distribute an +Opaque copy (directly or through your agents or retailers) of that +edition to the public. + +It is requested, but not required, that you contact the authors of the +Document well before redistributing any large number of copies, to +give them a chance to provide you with an updated version of the +Document. + + +4. MODIFICATIONS + +You may copy and distribute a Modified Version of the Document under +the conditions of sections 2 and 3 above, provided that you release +the Modified Version under precisely this License, with the Modified +Version filling the role of the Document, thus licensing distribution +and modification of the Modified Version to whoever possesses a copy +of it. In addition, you must do these things in the Modified Version: + +A. Use in the Title Page (and on the covers, if any) a title distinct + from that of the Document, and from those of previous versions + (which should, if there were any, be listed in the History section + of the Document). You may use the same title as a previous version + if the original publisher of that version gives permission. +B. List on the Title Page, as authors, one or more persons or entities + responsible for authorship of the modifications in the Modified + Version, together with at least five of the principal authors of the + Document (all of its principal authors, if it has fewer than five), + unless they release you from this requirement. +C. State on the Title page the name of the publisher of the + Modified Version, as the publisher. +D. Preserve all the copyright notices of the Document. +E. Add an appropriate copyright notice for your modifications + adjacent to the other copyright notices. +F. Include, immediately after the copyright notices, a license notice + giving the public permission to use the Modified Version under the + terms of this License, in the form shown in the Addendum below. +G. Preserve in that license notice the full lists of Invariant Sections + and required Cover Texts given in the Document's license notice. +H. Include an unaltered copy of this License. +I. Preserve the section Entitled "History", Preserve its Title, and add + to it an item stating at least the title, year, new authors, and + publisher of the Modified Version as given on the Title Page. If + there is no section Entitled "History" in the Document, create one + stating the title, year, authors, and publisher of the Document as + given on its Title Page, then add an item describing the Modified + Version as stated in the previous sentence. +J. Preserve the network location, if any, given in the Document for + public access to a Transparent copy of the Document, and likewise + the network locations given in the Document for previous versions + it was based on. These may be placed in the "History" section. + You may omit a network location for a work that was published at + least four years before the Document itself, or if the original + publisher of the version it refers to gives permission. +K. For any section Entitled "Acknowledgements" or "Dedications", + Preserve the Title of the section, and preserve in the section all + the substance and tone of each of the contributor acknowledgements + and/or dedications given therein. +L. Preserve all the Invariant Sections of the Document, + unaltered in their text and in their titles. Section numbers + or the equivalent are not considered part of the section titles. +M. Delete any section Entitled "Endorsements". Such a section + may not be included in the Modified Version. +N. Do not retitle any existing section to be Entitled "Endorsements" + or to conflict in title with any Invariant Section. +O. Preserve any Warranty Disclaimers. + +If the Modified Version includes new front-matter sections or +appendices that qualify as Secondary Sections and contain no material +copied from the Document, you may at your option designate some or all +of these sections as invariant. To do this, add their titles to the +list of Invariant Sections in the Modified Version's license notice. +These titles must be distinct from any other section titles. + +You may add a section Entitled "Endorsements", provided it contains +nothing but endorsements of your Modified Version by various +parties--for example, statements of peer review or that the text has +been approved by an organization as the authoritative definition of a +standard. + +You may add a passage of up to five words as a Front-Cover Text, and a +passage of up to 25 words as a Back-Cover Text, to the end of the list +of Cover Texts in the Modified Version. Only one passage of +Front-Cover Text and one of Back-Cover Text may be added by (or +through arrangements made by) any one entity. If the Document already +includes a cover text for the same cover, previously added by you or +by arrangement made by the same entity you are acting on behalf of, +you may not add another; but you may replace the old one, on explicit +permission from the previous publisher that added the old one. + +The author(s) and publisher(s) of the Document do not by this License +give permission to use their names for publicity for or to assert or +imply endorsement of any Modified Version. + + +5. COMBINING DOCUMENTS + +You may combine the Document with other documents released under this +License, under the terms defined in section 4 above for modified +versions, provided that you include in the combination all of the +Invariant Sections of all of the original documents, unmodified, and +list them all as Invariant Sections of your combined work in its +license notice, and that you preserve all their Warranty Disclaimers. + +The combined work need only contain one copy of this License, and +multiple identical Invariant Sections may be replaced with a single +copy. If there are multiple Invariant Sections with the same name but +different contents, make the title of each such section unique by +adding at the end of it, in parentheses, the name of the original +author or publisher of that section if known, or else a unique number. +Make the same adjustment to the section titles in the list of +Invariant Sections in the license notice of the combined work. + +In the combination, you must combine any sections Entitled "History" +in the various original documents, forming one section Entitled +"History"; likewise combine any sections Entitled "Acknowledgements", +and any sections Entitled "Dedications". You must delete all sections +Entitled "Endorsements". + + +6. COLLECTIONS OF DOCUMENTS + +You may make a collection consisting of the Document and other +documents released under this License, and replace the individual +copies of this License in the various documents with a single copy +that is included in the collection, provided that you follow the rules +of this License for verbatim copying of each of the documents in all +other respects. + +You may extract a single document from such a collection, and +distribute it individually under this License, provided you insert a +copy of this License into the extracted document, and follow this +License in all other respects regarding verbatim copying of that +document. + + +7. AGGREGATION WITH INDEPENDENT WORKS + +A compilation of the Document or its derivatives with other separate +and independent documents or works, in or on a volume of a storage or +distribution medium, is called an "aggregate" if the copyright +resulting from the compilation is not used to limit the legal rights +of the compilation's users beyond what the individual works permit. +When the Document is included in an aggregate, this License does not +apply to the other works in the aggregate which are not themselves +derivative works of the Document. + +If the Cover Text requirement of section 3 is applicable to these +copies of the Document, then if the Document is less than one half of +the entire aggregate, the Document's Cover Texts may be placed on +covers that bracket the Document within the aggregate, or the +electronic equivalent of covers if the Document is in electronic form. +Otherwise they must appear on printed covers that bracket the whole +aggregate. + + +8. TRANSLATION + +Translation is considered a kind of modification, so you may +distribute translations of the Document under the terms of section 4. +Replacing Invariant Sections with translations requires special +permission from their copyright holders, but you may include +translations of some or all Invariant Sections in addition to the +original versions of these Invariant Sections. You may include a +translation of this License, and all the license notices in the +Document, and any Warranty Disclaimers, provided that you also include +the original English version of this License and the original versions +of those notices and disclaimers. In case of a disagreement between +the translation and the original version of this License or a notice +or disclaimer, the original version will prevail. + +If a section in the Document is Entitled "Acknowledgements", +"Dedications", or "History", the requirement (section 4) to Preserve +its Title (section 1) will typically require changing the actual +title. + + +9. TERMINATION + +You may not copy, modify, sublicense, or distribute the Document +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense, or distribute it is void, and +will automatically terminate your rights under this License. + +However, if you cease all violation of this License, then your license +from a particular copyright holder is reinstated (a) provisionally, +unless and until the copyright holder explicitly and finally +terminates your license, and (b) permanently, if the copyright holder +fails to notify you of the violation by some reasonable means prior to +60 days after the cessation. + +Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + +Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, receipt of a copy of some or all of the same material does +not give you any rights to use it. + + +10. FUTURE REVISIONS OF THIS LICENSE + +The Free Software Foundation may publish new, revised versions of the +GNU Free Documentation License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in +detail to address new problems or concerns. See +https://www.gnu.org/licenses/. + +Each version of the License is given a distinguishing version number. +If the Document specifies that a particular numbered version of this +License "or any later version" applies to it, you have the option of +following the terms and conditions either of that specified version or +of any later version that has been published (not as a draft) by the +Free Software Foundation. If the Document does not specify a version +number of this License, you may choose any version ever published (not +as a draft) by the Free Software Foundation. If the Document +specifies that a proxy can decide which future versions of this +License can be used, that proxy's public statement of acceptance of a +version permanently authorizes you to choose that version for the +Document. + +11. RELICENSING + +"Massive Multiauthor Collaboration Site" (or "MMC Site") means any +World Wide Web server that publishes copyrightable works and also +provides prominent facilities for anybody to edit those works. A +public wiki that anybody can edit is an example of such a server. A +"Massive Multiauthor Collaboration" (or "MMC") contained in the site +means any set of copyrightable works thus published on the MMC site. + +"CC-BY-SA" means the Creative Commons Attribution-Share Alike 3.0 +license published by Creative Commons Corporation, a not-for-profit +corporation with a principal place of business in San Francisco, +California, as well as future copyleft versions of that license +published by that same organization. + +"Incorporate" means to publish or republish a Document, in whole or in +part, as part of another Document. + +An MMC is "eligible for relicensing" if it is licensed under this +License, and if all works that were first published under this License +somewhere other than this MMC, and subsequently incorporated in whole or +in part into the MMC, (1) had no cover texts or invariant sections, and +(2) were thus incorporated prior to November 1, 2008. + +The operator of an MMC Site may republish an MMC contained in the site +under CC-BY-SA on the same site at any time before August 1, 2009, +provided the MMC is eligible for relicensing. + diff --git a/documentation/README.md b/documentation/README.md new file mode 100644 index 0000000..f2f6f9f --- /dev/null +++ b/documentation/README.md @@ -0,0 +1,22 @@ + + +# HKrecruitment - documentation +HKrecruitment is the platform used by HKN Polito to handle +the recruitment process. This branch contains the whole documentation written in LateX +for the React application and the API server. + +## Authors + +* **Riccardo Zaccone** - *API server* - [HKN Polito](https://hknpolito.org/) +* **Arianna Ravera** - *API server* - [HKN Polito](https://hknpolito.org/) +* **Marco Pappalardo** - *React application* - [HKN Polito](https://hknpolito.org/) + + +## License +Copyright (c) 2021 HKN Polito. + +Permission is granted to copy, distribute and/or modify this document +under the terms of the GNU Free Documentation License, Version 1.3 +or any later version published by the Free Software Foundation; +with no Invariant Sections, no Front-Cover Texts, and no Back-Cover Texts. +See the COPYING file for details. diff --git a/documentation/appendix/GNUFreeDocumentationLicense.tex b/documentation/appendix/GNUFreeDocumentationLicense.tex new file mode 100644 index 0000000..bd8675d --- /dev/null +++ b/documentation/appendix/GNUFreeDocumentationLicense.tex @@ -0,0 +1,522 @@ +%\pagenumbering{arabic} + +\hfuzz = .6pt % avoid black boxes + +\chapter{\rlap{GNU Free Documentation License}} +\phantomsection % so hyperref creates bookmarks +\addcontentsline{toc}{chapter}{GNU Free Documentation License} +\label{appendix:GNU} + +\begin{center} + + Version 1.3, 3 November 2008 + + + Copyright \copyright{} 2000, 2001, 2002, 2007, 2008 Free Software Foundation, Inc. + + \bigskip + + \texttt{} + + \bigskip + + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. +\end{center} + + +\begin{center} + {\bf\large Preamble} +\end{center} + +The purpose of this License is to make a manual, textbook, or other +functional and useful document ``free'' in the sense of freedom: to +assure everyone the effective freedom to copy and redistribute it, +with or without modifying it, either commercially or noncommercially. +Secondarily, this License preserves for the author and publisher a way +to get credit for their work, while not being considered responsible +for modifications made by others. + +This License is a kind of ``copyleft'', which means that derivative +works of the document must themselves be free in the same sense. It +complements the GNU General Public License, which is a copyleft +license designed for free software. + +We have designed this License in order to use it for manuals for free +software, because free software needs free documentation: a free +program should come with manuals providing the same freedoms that the +software does. But this License is not limited to software manuals; +it can be used for any textual work, regardless of subject matter or +whether it is published as a printed book. We recommend this License +principally for works whose purpose is instruction or reference. + + +\begin{center} + {\Large\bf 1. APPLICABILITY AND DEFINITIONS\par} + \phantomsection + \addcontentsline{toc}{section}{1. APPLICABILITY AND DEFINITIONS} +\end{center} + +This License applies to any manual or other work, in any medium, that +contains a notice placed by the copyright holder saying it can be +distributed under the terms of this License. Such a notice grants a +world-wide, royalty-free license, unlimited in duration, to use that +work under the conditions stated herein. The ``\textbf{Document}'', below, +refers to any such manual or work. Any member of the public is a +licensee, and is addressed as ``\textbf{you}''. You accept the license if you +copy, modify or distribute the work in a way requiring permission +under copyright law. + +A ``\textbf{Modified Version}'' of the Document means any work containing the +Document or a portion of it, either copied verbatim, or with +modifications and/or translated into another language. + +A ``\textbf{Secondary Section}'' is a named appendix or a front-matter section of +the Document that deals exclusively with the relationship of the +publishers or authors of the Document to the Document's overall subject +(or to related matters) and contains nothing that could fall directly +within that overall subject. (Thus, if the Document is in part a +textbook of mathematics, a Secondary Section may not explain any +mathematics.) The relationship could be a matter of historical +connection with the subject or with related matters, or of legal, +commercial, philosophical, ethical or political position regarding +them. + +The ``\textbf{Invariant Sections}'' are certain Secondary Sections whose titles +are designated, as being those of Invariant Sections, in the notice +that says that the Document is released under this License. If a +section does not fit the above definition of Secondary then it is not +allowed to be designated as Invariant. The Document may contain zero +Invariant Sections. If the Document does not identify any Invariant +Sections then there are none. + +The ``\textbf{Cover Texts}'' are certain short passages of text that are listed, +as Front-Cover Texts or Back-Cover Texts, in the notice that says that +the Document is released under this License. A Front-Cover Text may +be at most 5 words, and a Back-Cover Text may be at most 25 words. + +A ``\textbf{Transparent}'' copy of the Document means a machine-readable copy, +represented in a format whose specification is available to the +general public, that is suitable for revising the document +straightforwardly with generic text editors or (for images composed of +pixels) generic paint programs or (for drawings) some widely available +drawing editor, and that is suitable for input to text formatters or +for automatic translation to a variety of formats suitable for input +to text formatters. A copy made in an otherwise Transparent file +format whose markup, or absence of markup, has been arranged to thwart +or discourage subsequent modification by readers is not Transparent. +An image format is not Transparent if used for any substantial amount +of text. A copy that is not ``Transparent'' is called ``\textbf{Opaque}''. + +Examples of suitable formats for Transparent copies include plain +ASCII without markup, Texinfo input format, LaTeX input format, SGML +or XML using a publicly available DTD, and standard-conforming simple +HTML, PostScript or PDF designed for human modification. Examples of +transparent image formats include PNG, XCF and JPG. Opaque formats +include proprietary formats that can be read and edited only by +proprietary word processors, SGML or XML for which the DTD and/or +processing tools are not generally available, and the +machine-generated HTML, PostScript or PDF produced by some word +processors for output purposes only. + +The ``\textbf{Title Page}'' means, for a printed book, the title page itself, +plus such following pages as are needed to hold, legibly, the material +this License requires to appear in the title page. For works in +formats which do not have any title page as such, ``Title Page'' means +the text near the most prominent appearance of the work's title, +preceding the beginning of the body of the text. + +The ``\textbf{publisher}'' means any person or entity that distributes +copies of the Document to the public. + +A section ``\textbf{Entitled XYZ}'' means a named subunit of the Document whose +title either is precisely XYZ or contains XYZ in parentheses following +text that translates XYZ in another language. (Here XYZ stands for a +specific section name mentioned below, such as ``\textbf{Acknowledgements}'', +``\textbf{Dedications}'', ``\textbf{Endorsements}'', or ``\textbf{History}''.) +To ``\textbf{Preserve the Title}'' +of such a section when you modify the Document means that it remains a +section ``Entitled XYZ'' according to this definition. + +The Document may include Warranty Disclaimers next to the notice which +states that this License applies to the Document. These Warranty +Disclaimers are considered to be included by reference in this +License, but only as regards disclaiming warranties: any other +implication that these Warranty Disclaimers may have is void and has +no effect on the meaning of this License. + + +\begin{center} + {\Large\bf 2. VERBATIM COPYING\par} + \phantomsection + \addcontentsline{toc}{section}{2. VERBATIM COPYING} +\end{center} + +You may copy and distribute the Document in any medium, either +commercially or noncommercially, provided that this License, the +copyright notices, and the license notice saying this License applies +to the Document are reproduced in all copies, and that you add no other +conditions whatsoever to those of this License. You may not use +technical measures to obstruct or control the reading or further +copying of the copies you make or distribute. However, you may accept +compensation in exchange for copies. If you distribute a large enough +number of copies you must also follow the conditions in section~3. + +You may also lend copies, under the same conditions stated above, and +you may publicly display copies. + + +\begin{center} + {\Large\bf 3. COPYING IN QUANTITY\par} + \phantomsection + \addcontentsline{toc}{section}{3. COPYING IN QUANTITY} +\end{center} + + +If you publish printed copies (or copies in media that commonly have +printed covers) of the Document, numbering more than 100, and the +Document's license notice requires Cover Texts, you must enclose the +copies in covers that carry, clearly and legibly, all these Cover +Texts: Front-Cover Texts on the front cover, and Back-Cover Texts on +the back cover. Both covers must also clearly and legibly identify +you as the publisher of these copies. The front cover must present +the full title with all words of the title equally prominent and +visible. You may add other material on the covers in addition. +Copying with changes limited to the covers, as long as they preserve +the title of the Document and satisfy these conditions, can be treated +as verbatim copying in other respects. + +If the required texts for either cover are too voluminous to fit +legibly, you should put the first ones listed (as many as fit +reasonably) on the actual cover, and continue the rest onto adjacent +pages. + +If you publish or distribute Opaque copies of the Document numbering +more than 100, you must either include a machine-readable Transparent +copy along with each Opaque copy, or state in or with each Opaque copy +a computer-network location from which the general network-using +public has access to download using public-standard network protocols +a complete Transparent copy of the Document, free of added material. +If you use the latter option, you must take reasonably prudent steps, +when you begin distribution of Opaque copies in quantity, to ensure +that this Transparent copy will remain thus accessible at the stated +location until at least one year after the last time you distribute an +Opaque copy (directly or through your agents or retailers) of that +edition to the public. + +It is requested, but not required, that you contact the authors of the +Document well before redistributing any large number of copies, to give +them a chance to provide you with an updated version of the Document. + + +\begin{center} + {\Large\bf 4. MODIFICATIONS\par} + \phantomsection + \addcontentsline{toc}{section}{4. MODIFICATIONS} +\end{center} + +You may copy and distribute a Modified Version of the Document under +the conditions of sections 2 and 3 above, provided that you release +the Modified Version under precisely this License, with the Modified +Version filling the role of the Document, thus licensing distribution +and modification of the Modified Version to whoever possesses a copy +of it. In addition, you must do these things in the Modified Version: + +\begin{itemize} + \item[A.] + Use in the Title Page (and on the covers, if any) a title distinct + from that of the Document, and from those of previous versions + (which should, if there were any, be listed in the History section + of the Document). You may use the same title as a previous version + if the original publisher of that version gives permission. + + \item[B.] + List on the Title Page, as authors, one or more persons or entities + responsible for authorship of the modifications in the Modified + Version, together with at least five of the principal authors of the + Document (all of its principal authors, if it has fewer than five), + unless they release you from this requirement. + + \item[C.] + State on the Title page the name of the publisher of the + Modified Version, as the publisher. + + \item[D.] + Preserve all the copyright notices of the Document. + + \item[E.] + Add an appropriate copyright notice for your modifications + adjacent to the other copyright notices. + + \item[F.] + Include, immediately after the copyright notices, a license notice + giving the public permission to use the Modified Version under the + terms of this License, in the form shown in the Addendum below. + + \item[G.] + Preserve in that license notice the full lists of Invariant Sections + and required Cover Texts given in the Document's license notice. + + \item[H.] + Include an unaltered copy of this License. + + \item[I.] + Preserve the section Entitled ``History'', Preserve its Title, and add + to it an item stating at least the title, year, new authors, and + publisher of the Modified Version as given on the Title Page. If + there is no section Entitled ``History'' in the Document, create one + stating the title, year, authors, and publisher of the Document as + given on its Title Page, then add an item describing the Modified + Version as stated in the previous sentence. + + \item[J.] + Preserve the network location, if any, given in the Document for + public access to a Transparent copy of the Document, and likewise + the network locations given in the Document for previous versions + it was based on. These may be placed in the ``History'' section. + You may omit a network location for a work that was published at + least four years before the Document itself, or if the original + publisher of the version it refers to gives permission. + + \item[K.] + For any section Entitled ``Acknowledgements'' or ``Dedications'', + Preserve the Title of the section, and preserve in the section all + the substance and tone of each of the contributor acknowledgements + and/or dedications given therein. + + \item[L.] + Preserve all the Invariant Sections of the Document, + unaltered in their text and in their titles. Section numbers + or the equivalent are not considered part of the section titles. + + \item[M.] + Delete any section Entitled ``Endorsements''. Such a section + may not be included in the Modified Version. + + \item[N.] + Do not retitle any existing section to be Entitled ``Endorsements'' + or to conflict in title with any Invariant Section. + + \item[O.] + Preserve any Warranty Disclaimers. +\end{itemize} + +If the Modified Version includes new front-matter sections or +appendices that qualify as Secondary Sections and contain no material +copied from the Document, you may at your option designate some or all +of these sections as invariant. To do this, add their titles to the +list of Invariant Sections in the Modified Version's license notice. +These titles must be distinct from any other section titles. + +You may add a section Entitled ``Endorsements'', provided it contains +nothing but endorsements of your Modified Version by various +parties---for example, statements of peer review or that the text has +been approved by an organization as the authoritative definition of a +standard. + +You may add a passage of up to five words as a Front-Cover Text, and a +passage of up to 25 words as a Back-Cover Text, to the end of the list +of Cover Texts in the Modified Version. Only one passage of +Front-Cover Text and one of Back-Cover Text may be added by (or +through arrangements made by) any one entity. If the Document already +includes a cover text for the same cover, previously added by you or +by arrangement made by the same entity you are acting on behalf of, +you may not add another; but you may replace the old one, on explicit +permission from the previous publisher that added the old one. + +The author(s) and publisher(s) of the Document do not by this License +give permission to use their names for publicity for or to assert or +imply endorsement of any Modified Version. + + +\begin{center} + {\Large\bf 5. COMBINING DOCUMENTS\par} + \phantomsection + \addcontentsline{toc}{section}{5. COMBINING DOCUMENTS} +\end{center} + + +You may combine the Document with other documents released under this +License, under the terms defined in section~4 above for modified +versions, provided that you include in the combination all of the +Invariant Sections of all of the original documents, unmodified, and +list them all as Invariant Sections of your combined work in its +license notice, and that you preserve all their Warranty Disclaimers. + +The combined work need only contain one copy of this License, and +multiple identical Invariant Sections may be replaced with a single +copy. If there are multiple Invariant Sections with the same name but +different contents, make the title of each such section unique by +adding at the end of it, in parentheses, the name of the original +author or publisher of that section if known, or else a unique number. +Make the same adjustment to the section titles in the list of +Invariant Sections in the license notice of the combined work. + +In the combination, you must combine any sections Entitled ``History'' +in the various original documents, forming one section Entitled +``History''; likewise combine any sections Entitled ``Acknowledgements'', +and any sections Entitled ``Dedications''. You must delete all sections +Entitled ``Endorsements''. + +\begin{center} + {\Large\bf 6. COLLECTIONS OF DOCUMENTS\par} + \phantomsection + \addcontentsline{toc}{section}{6. COLLECTIONS OF DOCUMENTS} +\end{center} + +You may make a collection consisting of the Document and other documents +released under this License, and replace the individual copies of this +License in the various documents with a single copy that is included in +the collection, provided that you follow the rules of this License for +verbatim copying of each of the documents in all other respects. + +You may extract a single document from such a collection, and distribute +it individually under this License, provided you insert a copy of this +License into the extracted document, and follow this License in all +other respects regarding verbatim copying of that document. + + +\begin{center} + {\Large\bf 7. AGGREGATION WITH INDEPENDENT WORKS\par} + \phantomsection + \addcontentsline{toc}{section}{7. AGGREGATION WITH INDEPENDENT WORKS} +\end{center} + + +A compilation of the Document or its derivatives with other separate +and independent documents or works, in or on a volume of a storage or +distribution medium, is called an ``aggregate'' if the copyright +resulting from the compilation is not used to limit the legal rights +of the compilation's users beyond what the individual works permit. +When the Document is included in an aggregate, this License does not +apply to the other works in the aggregate which are not themselves +derivative works of the Document. + +If the Cover Text requirement of section~3 is applicable to these +copies of the Document, then if the Document is less than one half of +the entire aggregate, the Document's Cover Texts may be placed on +covers that bracket the Document within the aggregate, or the +electronic equivalent of covers if the Document is in electronic form. +Otherwise they must appear on printed covers that bracket the whole +aggregate. + + +\begin{center} + {\Large\bf 8. TRANSLATION\par} + \phantomsection + \addcontentsline{toc}{section}{8. TRANSLATION} +\end{center} + + +Translation is considered a kind of modification, so you may +distribute translations of the Document under the terms of section~4. +Replacing Invariant Sections with translations requires special +permission from their copyright holders, but you may include +translations of some or all Invariant Sections in addition to the +original versions of these Invariant Sections. You may include a +translation of this License, and all the license notices in the +Document, and any Warranty Disclaimers, provided that you also include +the original English version of this License and the original versions +of those notices and disclaimers. In case of a disagreement between +the translation and the original version of this License or a notice +or disclaimer, the original version will prevail. + +If a section in the Document is Entitled ``Acknowledgements'', +``Dedications'', or ``History'', the requirement (section~4) to Preserve +its Title (section~1) will typically require changing the actual +title. + + +\begin{center} + {\Large\bf 9. TERMINATION\par} + \phantomsection + \addcontentsline{toc}{section}{9. TERMINATION} +\end{center} + + +You may not copy, modify, sublicense, or distribute the Document +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense, or distribute it is void, and +will automatically terminate your rights under this License. + +However, if you cease all violation of this License, then your license +from a particular copyright holder is reinstated (a) provisionally, +unless and until the copyright holder explicitly and finally +terminates your license, and (b) permanently, if the copyright holder +fails to notify you of the violation by some reasonable means prior to +60 days after the cessation. + +Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + +Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, receipt of a copy of some or all of the same material does +not give you any rights to use it. + + +\begin{center} + {\Large\bf 10. FUTURE REVISIONS OF THIS LICENSE\par} + \phantomsection + \addcontentsline{toc}{section}{10. FUTURE REVISIONS OF THIS LICENSE} +\end{center} + + +The Free Software Foundation may publish new, revised versions +of the GNU Free Documentation License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. See +\texttt{https://www.gnu.org/licenses/}. + +Each version of the License is given a distinguishing version number. +If the Document specifies that a particular numbered version of this +License ``or any later version'' applies to it, you have the option of +following the terms and conditions either of that specified version or +of any later version that has been published (not as a draft) by the +Free Software Foundation. If the Document does not specify a version +number of this License, you may choose any version ever published (not +as a draft) by the Free Software Foundation. If the Document +specifies that a proxy can decide which future versions of this +License can be used, that proxy's public statement of acceptance of a +version permanently authorizes you to choose that version for the +Document. + + +\begin{center} + {\Large\bf 11. RELICENSING\par} + \phantomsection + \addcontentsline{toc}{section}{11. RELICENSING} +\end{center} + + +``Massive Multiauthor Collaboration Site'' (or ``MMC Site'') means any +World Wide Web server that publishes copyrightable works and also +provides prominent facilities for anybody to edit those works. A +public wiki that anybody can edit is an example of such a server. A +``Massive Multiauthor Collaboration'' (or ``MMC'') contained in the +site means any set of copyrightable works thus published on the MMC +site. + +``CC-BY-SA'' means the Creative Commons Attribution-Share Alike 3.0 +license published by Creative Commons Corporation, a not-for-profit +corporation with a principal place of business in San Francisco, +California, as well as future copyleft versions of that license +published by that same organization. + +``Incorporate'' means to publish or republish a Document, in whole or +in part, as part of another Document. + +An MMC is ``eligible for relicensing'' if it is licensed under this +License, and if all works that were first published under this License +somewhere other than this MMC, and subsequently incorporated in whole +or in part into the MMC, (1) had no cover texts or invariant sections, +and (2) were thus incorporated prior to November 1, 2008. + +The operator of an MMC Site may republish an MMC contained in the site +under CC-BY-SA on the same site at any time before August 1, 2009, +provided the MMC is eligible for relicensing. diff --git a/documentation/chapters/objectives.tex b/documentation/chapters/objectives.tex new file mode 100644 index 0000000..56fe1db --- /dev/null +++ b/documentation/chapters/objectives.tex @@ -0,0 +1,10 @@ +%File: obiettivi.tex +%Data creazione: 31/12/2020 +%Data ultima modifica: 31/12/2020 + +\chapter{Project objectives} +\section{Included features} +\section{Process requirements} +\subsection{Validity constraints} +\subsection{Optimality criteria} +\section{Form di apply} diff --git a/documentation/chapters/overview.tex b/documentation/chapters/overview.tex new file mode 100644 index 0000000..50db622 --- /dev/null +++ b/documentation/chapters/overview.tex @@ -0,0 +1,8 @@ +%File: panoramica.tex +%Data creazione: 31/12/2020 +%Data ultima modifica: 31/12/2020 + +\chapter{Overview} +\section{Introduction} +\section{About this document} +\section{Periodic review} \ No newline at end of file diff --git a/documentation/chapters/security.tex b/documentation/chapters/security.tex new file mode 100644 index 0000000..3223e4d --- /dev/null +++ b/documentation/chapters/security.tex @@ -0,0 +1,159 @@ +%File: sicurezza.tex +%Data creazione: 31/12/2020 +%Data ultima modifica: 31/12/2020 + + +\chapter{Security} + +\section{Authentication} +Authentication is \textit{"the process of verifying a claim that a system entity or system resource has a certain attribute value"} (RFC-4949, Internet security glossary). +According the NIST SP800.63B digital authentication model (see Figure \ref{DAM}), an actor who wants to use a system is called an \textit{applicant}: if it possesses an authenticator it can provide it to the \textbf{CSP} (Credential Service Provider), or it can get one. The CSP is that component that will issue or enrol user credential and authenticator and verify and store associated attributes. +When this procedure is completed successfully, the actor becomes a \textit{subscriber}, that is an entity recorded in the authentication system. Later, when the actor wants to use some network service, typically the actor is called a \textit{claimant}, because he claims to be a valid user: generally, an authentication protocol against a \textbf{verifier} is run, who verifies this claim. When this process end with success, the actor become a subscriber with an open authenticated session with the \textbf{relying party}, that will request and receive an authN assertion from the verifier to assess user identity (and attributes). The relying party is the end application server, which requests the actor to be authenticated. The verifier may have a communication with the CSP to validate the binding between the authenticator used in the authentication protocol and the credential claimed. + +These roles may be separate or collapsed together. For the purpose of this project, several options have been considered for managing the authentication. The following subsections describe each a different solution with relative benefits and drawbacks. + +\begin{figure}[h] + \centering + \includegraphics[width=0.7\textwidth]{DAM.png} + \caption{NIST SP800.63B digital authentication model} + \label{DAM} +\end{figure} + +\subsection{Custom login con with standalone implementation} +In this case the verifier and the CSP are the embedded in the API server, that is the relying party. +\subsubsection*{Direct use of tokens and sessions} +%Pros and cons +\begin{itemize} + \item \textbf{Pros:} + \begin{itemize} + \item greater implementation flexibility; + \item less dependence on external services (hosting platform only). + \end{itemize} + \item \textbf{Cons:} + \begin{itemize} + \item double implementation required for direct login and social login (also required for internal members); + \item authentication and related security systems not at the level of ad-hoc professional platforms. + \end{itemize} +\end{itemize} + +\subsection*{Use of external libraries (es. Passport)} +%Pros and cons +\begin{itemize} + \item \textbf{Pros:} + \begin{itemize} + \item less dependence on external services (hosting platform only); + \item predisposition to social login (Google, LinkedIn). + \end{itemize} + \item \textbf{Cons:} + \begin{itemize} + \item improved architecture compared to direct use, but with security features still delegated to the developer (e.g. rate limit). + \end{itemize} +\end{itemize} + +\subsection{Social login Provider} +In this case the verifier and the CSP are external to the API server: their functionalities are provided by the social provider of choice (e.g. Google, LinkedIn). + +%Pros and cons +\begin{itemize} + \item \textbf{Pros:} + \begin{itemize} + \item authentication and security measures delegated to external services; + \item authentication and related professional security systems; + \item Delegated Identity Management (authentication data management and security issues are delegated). + \end{itemize} + \item \textbf{Cons:} + \begin{itemize} + \item need to implement more services to guarantee more authentication alternatives to users; + \item impossibility of simple registration to the WebApp, users are bound to have an account of at least one of the services offered. This does not arise for HKN members, given the associative email, but it is potentially limiting for applicants. + \end{itemize} +\end{itemize} + +\subsection{Third party custom service (Auth0, Amazon Cognito, Okta)} +In this case the verifier is the third party authentication server while the CSP is that same entity in case of traditional login with reusable password, or the social provider when that feature is used. +%Pros and cons +\begin{itemize} + \item \textbf{Pros:} + \begin{itemize} + \item authentication and security measures delegated to external services; + \item authentication and related professional security systems; + \item Delegated Identity Management (authentication data management and security issues are delegated); + \item possibility of access via social login provider (Google). + \end{itemize} + \item \textbf{Cons:} + \begin{itemize} + \item possible, but limited customization possibilities; + \item limitation of 7000 active users / month with the free plan; + \item double dependency to external platforms (hosting platform and authentication service). + \end{itemize} +\end{itemize} + +\subsection{Our choice} +Basing on the motivations above, our choice is to use a third party custom service. For this purpose, we compared the two major solutions available on the market: Auth0 and Amazon Cognito. + +Amazon Cognito and Auth0 are both authentication tools. They are most commonly used by developers to implement on mobile or web applications being built. +Amazon Cognito is used across company sizes, particularly companies that already live in the Amazon tech ecosystem. It’s ideal for 1st-party applications built for in-house use. In contrast, Auth0 is most commonly used by smaller organizations or teams, particularly those that can make use of the tool’s free version. Auth0 excels at helping these teams implement and manage authentication across services or apps, or across multiple clients. +\subsection*{Features} +Amazon Cognito and Auth0 focus on serving distinct audiences, and they emphasize different feature strengths accordingly. +Amazon Cognito stands out for its use in Amazon environments, although it is still a strong option beyond Amazon apps. It’s also ideal for managing authentication across multiple internally-facing or used tools. \textbf{This means that Cognito is often the first choice for internal applications built on Amazon infrastructure.} It also provides a range of sign-on capabilities, including integrating with 3rd-party ID providers like Microsoft Active Directory. +\textbf{Auth0 provides stronger support and features for smaller-scale teams and companies.} For instance, it provides excellent documentation, as well as a mix of prebuilt and customizable authentication methods. \textbf{Auth0 is also much cheaper to initially get off the ground, with a robust free version that can suffice for very small use cases.} It also provides a more user-friendly administrative interface relative to other authentication providers. +\subsection*{Limitations} +Amazon Cognito and Auth0 also each have some limitations worth considering. +\textbf{Amazon Cognito is less accessible to smaller or less advanced developers.} While it offers more advanced features, those capabilities lack sufficient documentation for some reviewers, which can create a \textbf{longer and more intensive learning curve}, even among skilled developers. It also makes customization more complex to develop and implement than comparable authentication products. +On the other hand, Auth0 is less scalable for midsize and large companies. Its customizability at higher levels is much more limited, both in terms of functionality and branding or design. Auth0’s pricing structure also makes it less ideal for companies as they scale up. Companies should ensure that the initial pricing structure is efficient for their needs, and make sure that they don’t scale out of cost-efficiency as they grow. + +\subsection{Single Sign-On with Auth0 and authentication flow} +Single Sign-On (SSO) authentication is now required more than ever. Nowadays, almost every website requires some form of authentication to access its features and content. With the number of websites and services rising, a centralized login system has become a necessity. +Sooner or later web development teams face one problem: you have developed an application at domain X and now you want your new deployment at domain Y to use the same login information as the other domain. In fact, you want more: you want users who are already logged-in at domain X to be already logged-in at domain Y. This is what SSO is all about. +Whenever users go to a domain that requires authentication, they are redirected to the authentication domain. As users are already logged-in at that domain, they can be immediately redirected to the original domain with the necessary authentication token. +Auth0 Single Sign-On (SSO) solution works as a bridge between different SSO frameworks: Figure \ref{fig:SSOn} describes the mechanism. For more details, see the official article \href{https://auth0.com/blog/what-is-and-how-does-single-sign-on-work/}{\textit{What Is and How Does Single Sign-On Authentication Work?}} + +\begin{figure}[ht] + \centering + \includegraphics[width=0.9\textwidth]{auth0.png} + + \caption{Auth0 Single Sign-On (SSO) solution} + \label{fig:SSOn} +\end{figure} + +\subsection*{What happens when an application is composed by a React front-end and an API server} +When, like in this project, the application is a \textit{Single-Page Application} communicating with an API server, the authentication system is different from the classical one in which the server communicates with the verifier to authenticate the user. With machine-to-machine (M2M) applications, such as CLIs, daemons, or services running on your back-end, the system authenticates and authorizes the app rather than a user. For this scenario, typical authentication schemes like username + password or social logins don't make sense. Instead, M2M apps use the Client Credentials Flow (defined in OAuth 2.0 RFC 6749, section 4.4), in which they pass along their Client ID and Client Secret to authenticate themselves and get a token. Figure \ref{fig:ClientCredentialFlowAuth0} shows the Client Credentials Flow in Auth0: for more details see \href{https://auth0.com/docs/flows/client-credentials-flow}{\textit{Client Credentials Flow}}. An important detail to point out is that the API server will contact Auth0 to be able to verify the authenticity and integrity of the token received by the front-end app: for more details see \href{https://auth0.com/docs/tokens/json-web-tokens/validate-json-web-tokens}{\textit{Validate JSON Web Tokens}}. + +\begin{figure}[ht] + \centering + \includegraphics[width=0.9\textwidth]{auth-sequence-client-credentials.png} + + \caption{Client Credentials Flow in Auth0} + \label{fig:ClientCredentialFlowAuth0} +\end{figure} + +\subsection*{What does an Auth0 JWT contain?} +JSON web tokens (JWTs) claims are pieces of information asserted about a subject. In a JWT, a claim appears as a name/value pair where the name is always a string and the value can be any JSON value. It is possible to read more about this subject on \href{https://auth0.com/docs/tokens/json-web-tokens/json-web-token-claims}{\textit{JSON Web Token Claims}} webpage. + +As per default, a token that will be received by the front-end application will contain at least the following claims: +\begin{lstlisting}[language=json] +{ +"name": "John Doe", +"nickname": "john.doe", +"picture": "https://myawesomeavatar.com/avatar.png", +"updated_at": "2017-03-30T15:13:40.474Z", +"email": "john.doe@test.com", +"email_verified": false, +"sub": "auth0|USER-ID", +} +\end{lstlisting} +Among these, three claims are important for our purposes: +\begin{itemize} + \item \texttt{email}: is the email used to sign up or log in. During registration phase, its value can be taken as default value. Please note that the is no constraint for a user on changing its email address: in this case the new address will be used when its necessary to contact the user, but the authentication will still use the address used during sign up; + \item \texttt{email\_verified}: indicates whether the user has verified the email address provided during sign up. This value van be used by the client and the API server as an additional check that the user is a real one, for example forbidding the proceed with registration if the value is \texttt{false}; + \item \texttt{sub}: it is used as user identification inside our system. +\end{itemize} + +\subsection{How this impacts on our system} +When logging in, the system should understand if the logged user is a member or an applicant. This somehow imposes this information to be required during logging in phase, and then confirmed by looking into the database. Because the login form is not entirely on our control, this can be non trivial. +After having considered several possibilities, the easiest and fastest one is to impose the following rules on user registration: +\begin{itemize} + \item Member users can sign up only by using their official HKN email address, either using the "sign up with Google" or by inserting the email address. Any other email address associated with the registration of a member will lead to an error; + \item Applicants can use whatever supported method. A registration of a member with an HKN email address will lead to an error; + \item Since now it is possible that the same person is associated with two different account that have associated different email addresses, so two different entries in the person table, the uniqueness constraint on the \texttt{phone\_no} attribute must be changed: now two different members or two different applicants cannot share the same \texttt{phone\_no}. +\end{itemize} +In such a way, the system is able to determine if a user is a member or an applicant by looking at the email address only, that is contained inside the json web token (JWT) exchanged with Auth0. \ No newline at end of file diff --git a/documentation/chapters/specifications.tex b/documentation/chapters/specifications.tex new file mode 100644 index 0000000..933533f --- /dev/null +++ b/documentation/chapters/specifications.tex @@ -0,0 +1,17 @@ +%File: specifications.tex +%Data creazione: 31/12/2020 +%Data ultima modifica: 31/12/2020 + +\chapter{Technical specifications} + +\section{Information model} +\input{chapters/specifications/models.tex} + +\section{Database schema} + +\section{RESTful APIs} +\input{chapters/specifications/routes.tex} + +\section{Software architecture} + +\section{Integration with Google RESTful APIs} diff --git a/documentation/chapters/specifications/models.tex b/documentation/chapters/specifications/models.tex new file mode 100644 index 0000000..5ae4b9c --- /dev/null +++ b/documentation/chapters/specifications/models.tex @@ -0,0 +1,30 @@ +\subsection{Enums} + +\subsubsection{ApplicationState} +\begin{itemize} + \item New: "new" + \item Accepted: "accepted" + \item Rejected: "rejected" + \item Confirmed: "confirmed" + \item Finalized: "finalized" + \item RefusedByApplicant: "refused\_by\_applicant" +\end{itemize} + + +\subsubsection{ApplicationType} +\begin{itemize} + \item BSC: "bsc" + \item MSC: "msc" + \item PHD: "phd" +\end{itemize} + + +\subsubsection{LangLevel} +\begin{itemize} + \item B2: "B2" + \item C1: "C1" + \item C2: "C2" + \item NativeSpeaker: "native\_speaker" +\end{itemize} + +\subsection{DTOs} diff --git a/documentation/chapters/specifications/routes.tex b/documentation/chapters/specifications/routes.tex new file mode 100644 index 0000000..99c5e93 --- /dev/null +++ b/documentation/chapters/specifications/routes.tex @@ -0,0 +1,88 @@ +% GET /applications + +\subsection{\texttt{\textcolor{blue}{GET} /v1/applications}} + +Returns a list of all applications. +If any query parameters are provided, applications are filtered accordingly. +The response includes only applications and their details that are authorized for the requesting user. + +\subsubsection{Query Parameters} +\begin{itemize} + \item \textbf{submittedFrom} [Date] (optional): Start date of the time period for filtering applications. + \item \textbf{submittedUntil} [Date] (optional) : End date of the time period for filtering applications. + \item \textbf{state} [ApplicationState] (optional): Retrieve only applications with this state. +\end{itemize} + +\subsubsection{Path Variables} +None + +\subsubsection{Request Body} +None + +\subsubsection{Response Body} +Array of \textbf{ApplicationResponseDto} objects. + +% GET /applications/:application_id + +\subsection{\texttt{\textcolor{blue}{GET} /v1/applications/:application\_id }} + +Returns the details of a specific application identified by its ID. + +\subsubsection{Query Parameters} +None + +\subsubsection{Path Variables} +\begin{itemize} + \item \textbf{application\_id} [Integer]: The ID of the application to retrieve. +\end{itemize} + +\subsubsection{Request Body} +None + +\subsubsection{Response Body} +An \textbf{ApplicationResponseDto} object representing the application details. + +% POST /applications + +\subsection{\texttt{\textcolor{blue}{POST} /v1/applications}} + +Submits a new application for the logged-in user. +The newly created application state is set to \textbf{ApplicationState.New}. +% TODO: A new Interview is created and associated to the application. +The operation will fail if the applicant already has a pending application. + +\subsubsection{Query Parameters} +None + +\subsubsection{Path Variables} +None + +\subsubsection{Request Body} +A \textbf{CreateApplicationDto} object. +The body of the request must be encoded as \textbf{multipart/form-data}. +The "cv" and the optional "grades" files must be uploaded respectively as two fields with name "cv" and "grades". + +\subsubsection{Response Body} +The newly created \textbf{Application} object. + +% PATCH /v1/applications/:application_id + +\subsection{\texttt{\textcolor{blue}{PATCH} /v1/applications/:application\_id}} + +Updates the details of an existing application identified by its ID. +If the user is an applicant, they can only update the application state to \textbf{ApplicationState.RefusedByApplicant}. +Other registered users can update the application state and notes. + +\subsubsection{Query Parameters} +None + +\subsubsection{Path Variables} +\begin{itemize} + \item \textbf{application\_id} [Integer]: The ID of the application to update. +\end{itemize} + +\subsubsection{Request Body} +A \textbf{UpdateApplicationDto} object. + +\subsubsection{Response Body} +An \textbf{ApplicationResponseDto} object representing the updated application. \ No newline at end of file diff --git a/documentation/chapters/testing.tex b/documentation/chapters/testing.tex new file mode 100644 index 0000000..3325fb3 --- /dev/null +++ b/documentation/chapters/testing.tex @@ -0,0 +1,7 @@ +%File: testing.tex +%Data creazione: 31/12/2020 +%Data ultima modifica: 31/12/2020 + +\chapter{Testing} +\section{Techniques and tools} +\section{Test coverage} \ No newline at end of file diff --git a/documentation/ext_sections/preliminary.tex b/documentation/ext_sections/preliminary.tex new file mode 100644 index 0000000..0a7dd5a --- /dev/null +++ b/documentation/ext_sections/preliminary.tex @@ -0,0 +1,13 @@ +%File: versioni.tex +%Data creazione: 31/12/2020 +%Data ultima modifica: 31/12/2020 + +\chapter*{Preliminary notes} +Copyright (C) 2021 HKNPolito. + +Permission is granted to copy, distribute and/or modify this document +under the terms of the GNU Free Documentation License, Version 1.3 +or any later version published by the Free Software Foundation; +with no Invariant Sections, no Front-Cover Texts, and no Back-Cover Texts. +A copy of the license is included in the appendix \ref{appendix:GNU} entitled "GNU +Free Documentation License". \ No newline at end of file diff --git a/documentation/ext_sections/versions.tex b/documentation/ext_sections/versions.tex new file mode 100644 index 0000000..516702a --- /dev/null +++ b/documentation/ext_sections/versions.tex @@ -0,0 +1,18 @@ +%File: versioni.tex +%Data creazione: 31/12/2020 +%Data ultima modifica: 31/12/2020 + +\chapter*{Versions and revisions of this document } + +\begin{table}[hb] + \centering + \begin{tabular}{llp{0.2\textwidth}p{0.4\textwidth}} + \toprule + Version & Date & Edited by & Change description \\ + \midrule + 1.0 & 31/12/2020 & Riccardo Zaccone & Document creation \\ + \bottomrule + \end{tabular} + \caption{Project specification version history} + \label{tab:change_history} +\end{table} \ No newline at end of file diff --git a/documentation/hkrecruitment-documentation.tex b/documentation/hkrecruitment-documentation.tex new file mode 100644 index 0000000..9876579 --- /dev/null +++ b/documentation/hkrecruitment-documentation.tex @@ -0,0 +1,123 @@ +\documentclass[11pt]{report} +\usepackage[english]{babel} +\usepackage[T1]{fontenc} +\usepackage[utf8]{inputenc} +\usepackage{graphicx} +\usepackage{fancyhdr} +\usepackage{vmargin} +\usepackage{hyperref} +\usepackage{subfig} +\usepackage{booktabs} +\usepackage{longtable} +\setmarginsrb{2.5 cm}{2.5 cm}{2.5 cm}{2.5 cm}{1 cm}{1.5 cm}{1 cm}{1.5 cm} +\graphicspath{{images/}} + +\usepackage[dvipsnames]{xcolor} +\usepackage{listings} +\lstdefinelanguage{json}{ + basicstyle=\scriptsize\ttfamily, + string=[s]{"}{"}, + stringstyle=\color{blue}, + comment=[l]{:}, + commentstyle=\color{black}, +} + +\lstdefinelanguage{JavaScript}{ + basicstyle=\scriptsize\ttfamily, + keywords={typeof, new, true, false, catch, function, return, null, catch, switch, var, if, in, while, do, else, case, break, const, let}, + keywordstyle=\color{blue}\bfseries, + ndkeywords={class, export, boolean, throw, implements, import, this}, + ndkeywordstyle=\color{darkgray}\bfseries, + identifierstyle=\color{black}, + sensitive=false, + comment=[l]{//}, + morecomment=[s]{/*}{*/}, + commentstyle=\color{ForestGreen}\ttfamily, + stringstyle=\color{red}\ttfamily, + morestring=[b]', + morestring=[b]" +} + +\title{HKRecruitment Project Specifications} % Title +%\author{Riccardo Zaccone} % Author +\author{Riccardo Zaccone \\ Arianna Ravera} % Authors +\date{\today} % Date +%\date{08 novembre 2020} % Date + +\makeatletter +\let\thetitle\@title +\let\theauthor\@author +\let\thedate\@date +\makeatother + +\hypersetup{ + pdftitle={\thetitle}, + pdfauthor={\theauthor}, + pdfsubject={}, + pdfkeywords={}, + hidelinks} + +\pagestyle{fancy} +\fancyhf{} +\lhead{\thetitle} +\cfoot{\thepage} + +\begin{document} + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +\begin{titlepage} + \centering + \vspace*{0.5 cm} + \includegraphics[scale = 0.2]{hkn_logo.pdf}\\[1.0 cm] % HKN Logo + \textsc{\LARGE HKN PoliTo $\mid$ Mu Nu Chapter of IEEE-HKN}\\[-0.2 cm] % Name + \rule{\linewidth}{0.2 mm} \\ + \textsc{\large Politecnico di Torino IEEE Student Branch}\\[1.0 cm] % Branch Name + \textsc{\Large Area IT}\\[0.5 cm] % Area + \rule{\linewidth}{0.2 mm} \\[0.4 cm] + { \huge \bfseries \thetitle}\\ + \rule{\linewidth}{0.2 mm} \\%[1 cm] + \textsc{\Large Design and implementation of a RESTful web-application for recruitment process management}\\[0 cm] + \textsc{}\\[0.4 cm] + + \begin{minipage}{0.4\textwidth} + \begin{flushleft} \large + \emph{Autore:}\\ + \theauthor + \end{flushleft} + \end{minipage}~ + \begin{minipage}{0.4\textwidth} + \begin{flushright} \large + \emph{Affiliazione:} \\ + Member \\ + Member \\ + %Member \\ + \end{flushright} + \end{minipage}\\[2.0 cm] + + {\large Versione 1.0 \\ \thedate}\\[2 cm] + \vfill + +\end{titlepage} + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +\include{ext_sections/preliminary} +\tableofcontents +\pagebreak + + +\include{ext_sections/versions} +\include{chapters/overview} +\include{chapters/objectives} +\include{chapters/security} +\include{chapters/specifications} +\include{chapters/testing} + + +\appendix +\include{appendix/GNUFreeDocumentationLicense} + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + +\end{document} \ No newline at end of file diff --git a/documentation/images/DAM.png b/documentation/images/DAM.png new file mode 100644 index 0000000..d64a3c2 Binary files /dev/null and b/documentation/images/DAM.png differ diff --git a/documentation/images/TS-JS types.jpg b/documentation/images/TS-JS types.jpg new file mode 100644 index 0000000..8745ec3 Binary files /dev/null and b/documentation/images/TS-JS types.jpg differ diff --git a/documentation/images/auth-sequence-client-credentials.png b/documentation/images/auth-sequence-client-credentials.png new file mode 100644 index 0000000..78d5e76 Binary files /dev/null and b/documentation/images/auth-sequence-client-credentials.png differ diff --git a/documentation/images/auth0.png b/documentation/images/auth0.png new file mode 100644 index 0000000..1d0b9d8 Binary files /dev/null and b/documentation/images/auth0.png differ diff --git a/frontend/src/ApiRequests.tsx b/frontend/src/ApiRequests.tsx new file mode 100644 index 0000000..e8b19c3 --- /dev/null +++ b/frontend/src/ApiRequests.tsx @@ -0,0 +1,44 @@ +async function apiRequest(endpoint: string, how: string, params?: any) { + debugger; + const body = how === "GET" ? undefined : JSON.stringify(params); + const response = await fetch(endpoint, { method: how, body: body }); + const data = await response.json(); + return data; +} + +export async function getApplicants() { + return await apiRequest("/v1/applications", "GET"); +} + +export async function getUsers() { + return await apiRequest("/v1/users", "GET"); +} + +// Supposizioni + +export async function getInterviewsByDates(startDate: string, endDate: string) { + return await apiRequest( + `/v1/interviews?startDate=${startDate}&endDate=${endDate}}`, + "GET" + ); +} + +export async function getInterviewsByDate(date: string) { + return await apiRequest(`/v1/interviews?date=${date}`, "GET"); +} + +/* +const GetApplicants = ({ name }) => { + const [count, setCount] = React.useState(0); + + return ( +
+

Hello, {name}!

+

Count: {count}

+ +
+ ); +}; + +export default GetApplicants; +*/ diff --git a/frontend/src/App.css b/frontend/src/App.css index cee9b92..7c12824 100644 --- a/frontend/src/App.css +++ b/frontend/src/App.css @@ -23,6 +23,10 @@ font-size: calc(10px + 2vmin); color: black; } +.active { + background: green !important; + color: white !important; +} .App-link { color: #61dafb; @@ -48,17 +52,17 @@ background-color: #f9f9f9; } .table { - --bs-table-bg: rgba(55, 81, 113, 0.8) !important; + --bs-table-bg: rgba(55, 81, 113, 0.8) !important ; --bs-table-accent-bg: transparent; --bs-table-striped-color: white !important; - --bs-table-striped-bg: rgba(55, 81, 113, 0.8) !important; + --bs-table-striped-bg: rgba(55, 81, 113, 0.8) !important ; --bs-table-active-color: white !important; --bs-table-active-bg: rgba(0, 0, 0, 0.1); - --bs-table-hover-color: white !important; + --bs-table-hover-color: white !important ; --bs-table-hover-bg: rgba(0, 0, 0, 0.075); width: 100%; margin-bottom: 1rem; - color: white !important; + color: white important!; vertical-align: top; border-color: #dee2e6; } diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx index c02f654..709f43b 100644 --- a/frontend/src/App.jsx +++ b/frontend/src/App.jsx @@ -10,6 +10,10 @@ import { useAuth0 } from "@auth0/auth0-react"; import AvaiabilitiesTable from "./AvaiabilitiesTable"; function App() { + function parseJwt(token) { + return JSON.parse(Buffer.from(token.split(".")[1], "base64").toString()); + } + const { isLoading, isAuthenticated, @@ -20,22 +24,34 @@ function App() { getAccessTokenSilently, } = useAuth0(); + const [accessToken, setAccessToken] = useState(""); + useEffect(() => { if (isAuthenticated) { getAccessTokenSilently({ audience: import.meta.env.VITE_AUTH0_AUDIENCE, grant_type: "client_credentials", }).then((token) => { - console.log(`Token: ${token}`); + setAccessToken(parseJwt(token)); }); } }, [isAuthenticated]); + if (accessToken === "") { + return
Loading...
; + } + return ( } + element={ + + } /> } /> @@ -51,7 +67,7 @@ function AfterLogin(props) { {props.isAuthenticated && !props.user.email.endsWith("@hknpolito.org") && } {props.isAuthenticated && props.user.email.endsWith("@hknpolito.org") && ( - + )} ); diff --git a/frontend/src/AvaiabilitiesCell.jsx b/frontend/src/AvaiabilitiesCell.jsx index 1d46dca..d48b762 100644 --- a/frontend/src/AvaiabilitiesCell.jsx +++ b/frontend/src/AvaiabilitiesCell.jsx @@ -1,12 +1,19 @@ import React, { useState, useEffect } from "react"; function AvaiabilitiesCell(props) { - const [className, setClassName] = useState(""); + const [className, setClassName] = useState("name"); return ( -
setClassName("active")}> + + className == "name active" + ? setClassName("name") + : setClassName("name active") + } + className={className} + > {props.name} -
+ ); } export default AvaiabilitiesCell; diff --git a/frontend/src/AvaiabilitiesTable.jsx b/frontend/src/AvaiabilitiesTable.tsx similarity index 74% rename from frontend/src/AvaiabilitiesTable.jsx rename to frontend/src/AvaiabilitiesTable.tsx index 319f60e..712b46b 100644 --- a/frontend/src/AvaiabilitiesTable.jsx +++ b/frontend/src/AvaiabilitiesTable.tsx @@ -4,14 +4,48 @@ import Row from "react-bootstrap/Row"; import Col from "react-bootstrap/Col"; import AvaiabilitiesCell from "./AvaiabilitiesCell"; import moment from "moment"; - +import { useState, useEffect } from "react"; import { createUserSchema } from "@hkrecruitment/shared"; +import React from "react"; +import { getApplicants, getUsers, getInterviewsByDates } from "./ApiRequests"; function AvaiabilitiesTable(props) { - let start = "2014-09-08T08:02:17"; - let duration = 45; - let end = "20.00"; + //const start = "2014-09-08T08:00:00"; + const step = 45; + //const end = "20.00"; + + /* Abbiamo queste entità + * - application (id, applicantId, submission, state, lastModified, notes, cv, itaLevel) + * - bscapplication (bscStudyPath, bscAcademicYear, bscGradesAvg, cfu, grades) + * - mscapplication (mscStudyPath, mscGradesAvg, mscAcademicYear) + * - phdapplication (phdDescription) + * - user (oauthId, firstName, lastName, sex, email, phone_no, telegramId, role) + */ + + // All'apertura di questa pagina, viene impostata la data ad oggi + + const [startDate, setStartDate] = useState(new Date()); + + // Suppongo che interviews restituisca un array di colloqui fissati in un certo periodo, ciascuno con data e ora + const [interviews, setInterviews] = useState(null); + useEffect(() => { + if (startDate !== null) { + setInterviews( + getInterviewsByDates(startDate.getDate(), startDate.getDate() + 7) + ); + } + }, [startDate]); + + // Crea una matrice 16x7 a partire dalle interviste + let fill = [[], []]; + if (interviews !== null) { + for (let interview in interviews.sort()) { + // TO-DO + } + } + + /* let fill = [ [ "persona1, persona2,...", @@ -142,11 +176,14 @@ function AvaiabilitiesTable(props) { "persona1, persona2,...", ], ]; - + let timestamp = moment(start); for (let row of fill) { - row.unshift(moment(start).format("hh:mm")); + row.unshift(timestamp.format("HH:mm")); + timestamp = timestamp.add(step, "m"); } + */ + return ( @@ -168,9 +205,7 @@ function AvaiabilitiesTable(props) { {fill.map((names, index) => ( {names.map((name, index) => ( - - - + ))} ))} diff --git a/frontend/src/LoginButton.jsx b/frontend/src/LoginButton.tsx similarity index 100% rename from frontend/src/LoginButton.jsx rename to frontend/src/LoginButton.tsx diff --git a/frontend/src/index.css b/frontend/src/index.css index 4a1df4d..7183f7b 100644 --- a/frontend/src/index.css +++ b/frontend/src/index.css @@ -11,3 +11,7 @@ code { font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New", monospace; } + +.active { + color: green; +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 5227d75..c25b86d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1,5 +1,9 @@ lockfileVersion: '6.0' +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + importers: .: {} @@ -17,7 +21,7 @@ importers: version: link:../shared '@nestjs/common': specifier: ^9.0.0 - version: 9.2.1(reflect-metadata@0.1.13)(rxjs@7.5.7) + version: 9.2.1(class-transformer@0.5.1)(reflect-metadata@0.1.13)(rxjs@7.5.7) '@nestjs/config': specifier: ^2.2.0 version: 2.2.0(@nestjs/common@9.2.1)(reflect-metadata@0.1.13)(rxjs@7.5.7) @@ -32,7 +36,7 @@ importers: version: 9.2.1(@nestjs/common@9.2.1)(@nestjs/core@9.2.1) '@nestjs/swagger': specifier: ^6.1.3 - version: 6.1.3(@fastify/static@6.6.0)(@nestjs/common@9.2.1)(@nestjs/core@9.2.1)(reflect-metadata@0.1.13) + version: 6.1.3(@fastify/static@6.6.0)(@nestjs/common@9.2.1)(@nestjs/core@9.2.1)(class-transformer@0.5.1)(reflect-metadata@0.1.13) '@nestjs/typeorm': specifier: ^9.0.1 version: 9.0.1(@nestjs/common@9.2.1)(@nestjs/core@9.2.1)(reflect-metadata@0.1.13)(rxjs@7.5.7)(typeorm@0.3.11) @@ -42,9 +46,18 @@ importers: '@types/passport-jwt': specifier: ^3.0.7 version: 3.0.7 + class-transformer: + specifier: ^0.5.1 + version: 0.5.1 dotenv: specifier: ^16.0.3 version: 16.0.3 + google-auth-library: + specifier: ^8.7.0 + version: 8.7.0 + googleapis: + specifier: ^118.0.0 + version: 118.0.0 joi: specifier: ^17.7.0 version: 17.7.0 @@ -87,7 +100,7 @@ importers: version: 9.1.5(@swc/core@1.3.56) '@nestjs/schematics': specifier: ^9.0.0 - version: 9.0.3(chokidar@3.5.3)(typescript@4.8.4) + version: 9.0.3(typescript@4.5.2) '@nestjs/testing': specifier: ^9.0.0 version: 9.2.1(@nestjs/common@9.2.1)(@nestjs/core@9.2.1)(@nestjs/platform-express@9.2.1) @@ -103,6 +116,9 @@ importers: '@types/jest': specifier: 28.1.8 version: 28.1.8 + '@types/multer': + specifier: ^1.4.7 + version: 1.4.7 '@types/node': specifier: ^16.11.10 version: 16.18.4 @@ -230,6 +246,9 @@ importers: '@casl/ability': specifier: ^6.3.3 version: 6.3.3 + '@joi/date': + specifier: ^2.1.0 + version: 2.1.0 joi: specifier: ^17.7.0 version: 17.7.0 @@ -1112,6 +1131,7 @@ packages: /@gar/promisify@1.1.3: resolution: {integrity: sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==} + requiresBuild: true optional: true /@golevelup/ts-jest@0.3.6: @@ -1396,6 +1416,12 @@ packages: chalk: 4.1.2 dev: true + /@joi/date@2.1.0: + resolution: {integrity: sha512-2zN5m0LgxZp/cynHGbzEImVmFIa+n+IOb/Nlw5LX/PLJneeCwG1NbiGw7MvPjsAKUGQK8z31Nn6V6lEN+4fZhg==} + dependencies: + moment: 2.29.4 + dev: false + /@jridgewell/gen-mapping@0.1.1: resolution: {integrity: sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==} engines: {node: '>=6.0.0'} @@ -1487,7 +1513,7 @@ packages: - webpack-cli dev: true - /@nestjs/common@9.2.1(reflect-metadata@0.1.13)(rxjs@7.5.7): + /@nestjs/common@9.2.1(class-transformer@0.5.1)(reflect-metadata@0.1.13)(rxjs@7.5.7): resolution: {integrity: sha512-nZuo3oDsSSlC5mti/M2aCWTEIfHPGDXmBwWgPeCpRbrNz3IWd109rkajll+yxgidVjznAdBS9y00JkAVJblNYw==} peerDependencies: cache-manager: <=5 @@ -1503,6 +1529,7 @@ packages: class-validator: optional: true dependencies: + class-transformer: 0.5.1 iterare: 1.2.1 reflect-metadata: 0.1.13 rxjs: 7.5.7 @@ -1516,7 +1543,7 @@ packages: reflect-metadata: ^0.1.13 rxjs: ^6.0.0 || ^7.2.0 dependencies: - '@nestjs/common': 9.2.1(reflect-metadata@0.1.13)(rxjs@7.5.7) + '@nestjs/common': 9.2.1(class-transformer@0.5.1)(reflect-metadata@0.1.13)(rxjs@7.5.7) dotenv: 16.0.1 dotenv-expand: 8.0.3 lodash: 4.17.21 @@ -1543,7 +1570,7 @@ packages: '@nestjs/websockets': optional: true dependencies: - '@nestjs/common': 9.2.1(reflect-metadata@0.1.13)(rxjs@7.5.7) + '@nestjs/common': 9.2.1(class-transformer@0.5.1)(reflect-metadata@0.1.13)(rxjs@7.5.7) '@nestjs/platform-express': 9.2.1(@nestjs/common@9.2.1)(@nestjs/core@9.2.1) '@nuxtjs/opencollective': 0.3.2 fast-safe-stringify: 2.1.1 @@ -1557,7 +1584,7 @@ packages: transitivePeerDependencies: - encoding - /@nestjs/mapped-types@1.2.0(@nestjs/common@9.2.1)(reflect-metadata@0.1.13): + /@nestjs/mapped-types@1.2.0(@nestjs/common@9.2.1)(class-transformer@0.5.1)(reflect-metadata@0.1.13): resolution: {integrity: sha512-NTFwPZkQWsArQH8QSyFWGZvJ08gR+R4TofglqZoihn/vU+ktHEJjMqsIsADwb7XD97DhiD+TVv5ac+jG33BHrg==} peerDependencies: '@nestjs/common': ^7.0.8 || ^8.0.0 || ^9.0.0 @@ -1570,7 +1597,8 @@ packages: class-validator: optional: true dependencies: - '@nestjs/common': 9.2.1(reflect-metadata@0.1.13)(rxjs@7.5.7) + '@nestjs/common': 9.2.1(class-transformer@0.5.1)(reflect-metadata@0.1.13)(rxjs@7.5.7) + class-transformer: 0.5.1 reflect-metadata: 0.1.13 dev: false @@ -1580,7 +1608,7 @@ packages: '@nestjs/common': ^8.0.0 || ^9.0.0 passport: ^0.4.0 || ^0.5.0 || ^0.6.0 dependencies: - '@nestjs/common': 9.2.1(reflect-metadata@0.1.13)(rxjs@7.5.7) + '@nestjs/common': 9.2.1(class-transformer@0.5.1)(reflect-metadata@0.1.13)(rxjs@7.5.7) passport: 0.6.0 dev: false @@ -1590,7 +1618,7 @@ packages: '@nestjs/common': ^9.0.0 '@nestjs/core': ^9.0.0 dependencies: - '@nestjs/common': 9.2.1(reflect-metadata@0.1.13)(rxjs@7.5.7) + '@nestjs/common': 9.2.1(class-transformer@0.5.1)(reflect-metadata@0.1.13)(rxjs@7.5.7) '@nestjs/core': 9.2.1(@nestjs/common@9.2.1)(@nestjs/platform-express@9.2.1)(reflect-metadata@0.1.13)(rxjs@7.5.7) body-parser: 1.20.1 cors: 2.8.5 @@ -1615,7 +1643,22 @@ packages: - chokidar dev: true - /@nestjs/swagger@6.1.3(@fastify/static@6.6.0)(@nestjs/common@9.2.1)(@nestjs/core@9.2.1)(reflect-metadata@0.1.13): + /@nestjs/schematics@9.0.3(typescript@4.5.2): + resolution: {integrity: sha512-kZrU/lrpVd2cnK8I3ibDb3Wi1ppl3wX3U3lVWoL+DzRRoezWKkh8upEL4q0koKmuXnsmLiu3UPxFeMOrJV7TSA==} + peerDependencies: + typescript: ^4.3.5 + dependencies: + '@angular-devkit/core': 14.2.1(chokidar@3.5.3) + '@angular-devkit/schematics': 14.2.1(chokidar@3.5.3) + fs-extra: 10.1.0 + jsonc-parser: 3.2.0 + pluralize: 8.0.0 + typescript: 4.5.2 + transitivePeerDependencies: + - chokidar + dev: true + + /@nestjs/swagger@6.1.3(@fastify/static@6.6.0)(@nestjs/common@9.2.1)(@nestjs/core@9.2.1)(class-transformer@0.5.1)(reflect-metadata@0.1.13): resolution: {integrity: sha512-H9C/yRgLFb5QrAt6iGrYmIX9X7Q0zXkgZaTNUATljUBra+RCWrEUbLHBcGjTAOtcIyGV/vmyCLv68YSVcZoE0Q==} peerDependencies: '@fastify/static': ^6.0.0 @@ -1627,9 +1670,9 @@ packages: optional: true dependencies: '@fastify/static': 6.6.0 - '@nestjs/common': 9.2.1(reflect-metadata@0.1.13)(rxjs@7.5.7) + '@nestjs/common': 9.2.1(class-transformer@0.5.1)(reflect-metadata@0.1.13)(rxjs@7.5.7) '@nestjs/core': 9.2.1(@nestjs/common@9.2.1)(@nestjs/platform-express@9.2.1)(reflect-metadata@0.1.13)(rxjs@7.5.7) - '@nestjs/mapped-types': 1.2.0(@nestjs/common@9.2.1)(reflect-metadata@0.1.13) + '@nestjs/mapped-types': 1.2.0(@nestjs/common@9.2.1)(class-transformer@0.5.1)(reflect-metadata@0.1.13) js-yaml: 4.1.0 lodash: 4.17.21 path-to-regexp: 3.2.0 @@ -1653,7 +1696,7 @@ packages: '@nestjs/platform-express': optional: true dependencies: - '@nestjs/common': 9.2.1(reflect-metadata@0.1.13)(rxjs@7.5.7) + '@nestjs/common': 9.2.1(class-transformer@0.5.1)(reflect-metadata@0.1.13)(rxjs@7.5.7) '@nestjs/core': 9.2.1(@nestjs/common@9.2.1)(@nestjs/platform-express@9.2.1)(reflect-metadata@0.1.13)(rxjs@7.5.7) '@nestjs/platform-express': 9.2.1(@nestjs/common@9.2.1)(@nestjs/core@9.2.1) tslib: 2.4.1 @@ -1668,7 +1711,7 @@ packages: rxjs: ^7.2.0 typeorm: ^0.3.0 dependencies: - '@nestjs/common': 9.2.1(reflect-metadata@0.1.13)(rxjs@7.5.7) + '@nestjs/common': 9.2.1(class-transformer@0.5.1)(reflect-metadata@0.1.13)(rxjs@7.5.7) '@nestjs/core': 9.2.1(@nestjs/common@9.2.1)(@nestjs/platform-express@9.2.1)(reflect-metadata@0.1.13)(rxjs@7.5.7) reflect-metadata: 0.1.13 rxjs: 7.5.7 @@ -1699,6 +1742,7 @@ packages: /@npmcli/fs@1.1.1: resolution: {integrity: sha512-8KG5RD0GVP4ydEzRn/I4BNDuxDtqVbOdm8675T49OIG/NGhaK0pjPX7ZcDlvKYbA+ulvVK3ztfcF4uBdOxuJbQ==} + requiresBuild: true dependencies: '@gar/promisify': 1.1.3 semver: 7.3.8 @@ -1708,6 +1752,7 @@ packages: resolution: {integrity: sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==} engines: {node: '>=10'} deprecated: This functionality has been moved to @npmcli/fs + requiresBuild: true dependencies: mkdirp: 1.0.4 rimraf: 3.0.2 @@ -1933,6 +1978,7 @@ packages: /@tootallnate/once@1.1.2: resolution: {integrity: sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==} engines: {node: '>= 6'} + requiresBuild: true optional: true /@tsconfig/node10@1.0.9: @@ -2066,6 +2112,12 @@ packages: /@types/mime@3.0.1: resolution: {integrity: sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==} + /@types/multer@1.4.7: + resolution: {integrity: sha512-/SNsDidUFCvqqcWDwxv2feww/yqhNeTRL5CVoL3jU4Goc4kKEL10T7Eye65ZqPNi4HRx8sAEX59pV1aEH7drNA==} + dependencies: + '@types/express': 4.17.14 + dev: true + /@types/node@16.18.4: resolution: {integrity: sha512-9qGjJ5GyShZjUfx2ArBIGM+xExdfLvvaCyQR0t6yRXKPcWCVYF/WemtX/uIU3r7FYECXRXkIiw2Vnhn6y8d+pw==} @@ -2495,6 +2547,7 @@ packages: /agentkeepalive@4.3.0: resolution: {integrity: sha512-7Epl1Blf4Sy37j4v9f9FjICCh4+KAQOyXgHEwlyBiAQLbhKdq/i2QQU3amQalS/wPhdPzDXPL5DMR5bkn+YeWg==} engines: {node: '>= 8.0.0'} + requiresBuild: true dependencies: debug: 4.3.4 depd: 2.0.0 @@ -2506,6 +2559,7 @@ packages: /aggregate-error@3.1.0: resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} engines: {node: '>=8'} + requiresBuild: true dependencies: clean-stack: 2.2.0 indent-string: 4.0.0 @@ -2613,6 +2667,7 @@ packages: /are-we-there-yet@3.0.1: resolution: {integrity: sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} + requiresBuild: true dependencies: delegates: 1.0.0 readable-stream: 3.6.0 @@ -2638,6 +2693,11 @@ packages: engines: {node: '>=8'} dev: true + /arrify@2.0.1: + resolution: {integrity: sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==} + engines: {node: '>=8'} + dev: false + /asap@2.0.6: resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} dev: true @@ -2728,6 +2788,10 @@ packages: /base64-js@1.5.1: resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + /bignumber.js@9.1.1: + resolution: {integrity: sha512-pHm4LsMJ6lzgNGVfZHjMoO8sdoRhOzOH4MLmY65Jg70bpxCKu5iOHNJyfF6OyvYw7t8Fpf35RuzUyqnQsj8Vig==} + dev: false + /binary-extensions@2.2.0: resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} engines: {node: '>=8'} @@ -2870,6 +2934,7 @@ packages: /cacache@15.3.0: resolution: {integrity: sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==} engines: {node: '>= 10'} + requiresBuild: true dependencies: '@npmcli/fs': 1.1.1 '@npmcli/move-file': 1.1.2 @@ -2993,6 +3058,9 @@ packages: resolution: {integrity: sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==} dev: true + /class-transformer@0.5.1: + resolution: {integrity: sha512-SQa1Ws6hUbfC98vKGxZH3KFY0Y1lm5Zm0SY8XX9zbK7FJCyVEac3ATW0RIpwzW+oOfmHE5PMPufDG9hCfoEOMw==} + /classnames@2.3.2: resolution: {integrity: sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw==} dev: false @@ -3007,6 +3075,7 @@ packages: /clean-stack@2.2.0: resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} engines: {node: '>=6'} + requiresBuild: true optional: true /cli-cursor@3.1.0: @@ -3464,10 +3533,12 @@ packages: /env-paths@2.2.1: resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==} engines: {node: '>=6'} + requiresBuild: true optional: true /err-code@2.0.3: resolution: {integrity: sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==} + requiresBuild: true optional: true /error-ex@1.3.2: @@ -3786,6 +3857,10 @@ packages: transitivePeerDependencies: - supports-color + /extend@3.0.2: + resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} + dev: false + /external-editor@3.1.0: resolution: {integrity: sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==} engines: {node: '>=4'} @@ -4015,6 +4090,7 @@ packages: /gauge@4.0.4: resolution: {integrity: sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} + requiresBuild: true dependencies: aproba: 2.0.0 color-support: 1.1.3 @@ -4026,6 +4102,30 @@ packages: wide-align: 1.1.5 optional: true + /gaxios@5.1.0: + resolution: {integrity: sha512-aezGIjb+/VfsJtIcHGcBSerNEDdfdHeMros+RbYbGpmonKWQCOVOes0LVZhn1lDtIgq55qq0HaxymIoae3Fl/A==} + engines: {node: '>=12'} + dependencies: + extend: 3.0.2 + https-proxy-agent: 5.0.1 + is-stream: 2.0.1 + node-fetch: 2.6.7 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + + /gcp-metadata@5.2.0: + resolution: {integrity: sha512-aFhhvvNycky2QyhG+dcfEdHBF0FRbYcf39s6WNHUDysKSrbJ5vuFbjydxBcmewtXeV248GP8dWT3ByPNxsyHCw==} + engines: {node: '>=12'} + dependencies: + gaxios: 5.1.0 + json-bigint: 1.0.0 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + /gensync@1.0.0-beta.2: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} engines: {node: '>=6.9.0'} @@ -4121,6 +4221,58 @@ packages: slash: 3.0.0 dev: true + /google-auth-library@8.7.0: + resolution: {integrity: sha512-1M0NG5VDIvJZEnstHbRdckLZESoJwguinwN8Dhae0j2ZKIQFIV63zxm6Fo6nM4xkgqUr2bbMtV5Dgo+Hy6oo0Q==} + engines: {node: '>=12'} + dependencies: + arrify: 2.0.1 + base64-js: 1.5.1 + ecdsa-sig-formatter: 1.0.11 + fast-text-encoding: 1.0.6 + gaxios: 5.1.0 + gcp-metadata: 5.2.0 + gtoken: 6.1.2 + jws: 4.0.0 + lru-cache: 6.0.0 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + + /google-p12-pem@4.0.1: + resolution: {integrity: sha512-WPkN4yGtz05WZ5EhtlxNDWPhC4JIic6G8ePitwUWy4l+XPVYec+a0j0Ts47PDtW59y3RwAhUd9/h9ZZ63px6RQ==} + engines: {node: '>=12.0.0'} + hasBin: true + dependencies: + node-forge: 1.3.1 + dev: false + + /googleapis-common@6.0.4: + resolution: {integrity: sha512-m4ErxGE8unR1z0VajT6AYk3s6a9gIMM6EkDZfkPnES8joeOlEtFEJeF8IyZkb0tjPXkktUfYrE4b3Li1DNyOwA==} + engines: {node: '>=12.0.0'} + dependencies: + extend: 3.0.2 + gaxios: 5.1.0 + google-auth-library: 8.7.0 + qs: 6.11.0 + url-template: 2.0.8 + uuid: 9.0.0 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + + /googleapis@118.0.0: + resolution: {integrity: sha512-Ny6zJOGn5P/YDT6GQbJU6K0lSzEu4Yuxnsn45ZgBIeSQ1RM0FolEjUToLXquZd89DU9wUfqA5XYHPEctk1TFWg==} + engines: {node: '>=12.0.0'} + dependencies: + google-auth-library: 8.7.0 + googleapis-common: 6.0.4 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + /graceful-fs@4.2.10: resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==} @@ -4128,6 +4280,18 @@ packages: resolution: {integrity: sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==} dev: true + /gtoken@6.1.2: + resolution: {integrity: sha512-4ccGpzz7YAr7lxrT2neugmXQ3hP9ho2gcaityLVkiUecAiwiy60Ii8gRbZeOsXV19fYaRjgBSshs8kXw+NKCPQ==} + engines: {node: '>=12.0.0'} + dependencies: + gaxios: 5.1.0 + google-p12-pem: 4.0.1 + jws: 4.0.0 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + /has-flag@3.0.0: resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} engines: {node: '>=4'} @@ -4184,6 +4348,7 @@ packages: /http-cache-semantics@4.1.1: resolution: {integrity: sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==} + requiresBuild: true optional: true /http-errors@2.0.0: @@ -4199,6 +4364,7 @@ packages: /http-proxy-agent@4.0.1: resolution: {integrity: sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==} engines: {node: '>= 6'} + requiresBuild: true dependencies: '@tootallnate/once': 1.1.2 agent-base: 6.0.2 @@ -4228,6 +4394,7 @@ packages: /humanize-ms@1.2.1: resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==} + requiresBuild: true dependencies: ms: 2.1.3 optional: true @@ -4241,6 +4408,7 @@ packages: /iconv-lite@0.6.3: resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} engines: {node: '>=0.10.0'} + requiresBuild: true dependencies: safer-buffer: 2.1.2 optional: true @@ -4281,10 +4449,12 @@ packages: /indent-string@4.0.0: resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} engines: {node: '>=8'} + requiresBuild: true optional: true /infer-owner@1.0.4: resolution: {integrity: sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==} + requiresBuild: true optional: true /inflight@1.0.6: @@ -4349,6 +4519,7 @@ packages: /ip@2.0.0: resolution: {integrity: sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==} + requiresBuild: true optional: true /ipaddr.js@1.9.1: @@ -4400,6 +4571,7 @@ packages: /is-lambda@1.0.1: resolution: {integrity: sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==} + requiresBuild: true optional: true /is-number@7.0.0: @@ -4415,7 +4587,6 @@ packages: /is-stream@2.0.1: resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} engines: {node: '>=8'} - dev: true /is-unicode-supported@0.1.0: resolution: {integrity: sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==} @@ -4427,6 +4598,7 @@ packages: /isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + requiresBuild: true /istanbul-lib-coverage@3.2.0: resolution: {integrity: sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==} @@ -4955,6 +5127,12 @@ packages: hasBin: true dev: true + /json-bigint@1.0.0: + resolution: {integrity: sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==} + dependencies: + bignumber.js: 9.1.1 + dev: false + /json-parse-even-better-errors@2.3.1: resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} @@ -5021,6 +5199,14 @@ packages: safe-buffer: 5.2.1 dev: false + /jwa@2.0.0: + resolution: {integrity: sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==} + dependencies: + buffer-equal-constant-time: 1.0.1 + ecdsa-sig-formatter: 1.0.11 + safe-buffer: 5.2.1 + dev: false + /jwks-rsa@3.0.0: resolution: {integrity: sha512-x9qNrP/kD6tOfrLzBVC5HaneBTR+fCEGIjwk/xSdl+KA7Tzf+R3oiY9ibrONKVLF9fR0V03enkitYPZkO65fAQ==} engines: {node: '>=14'} @@ -5042,6 +5228,13 @@ packages: safe-buffer: 5.2.1 dev: false + /jws@4.0.0: + resolution: {integrity: sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==} + dependencies: + jwa: 2.0.0 + safe-buffer: 5.2.1 + dev: false + /kleur@3.0.3: resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} engines: {node: '>=6'} @@ -5207,6 +5400,7 @@ packages: /make-fetch-happen@9.1.0: resolution: {integrity: sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg==} engines: {node: '>= 10'} + requiresBuild: true dependencies: agentkeepalive: 4.3.0 cacache: 15.3.0 @@ -5312,6 +5506,7 @@ packages: /minipass-collect@1.0.2: resolution: {integrity: sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==} engines: {node: '>= 8'} + requiresBuild: true dependencies: minipass: 3.3.6 optional: true @@ -5319,6 +5514,7 @@ packages: /minipass-fetch@1.4.1: resolution: {integrity: sha512-CGH1eblLq26Y15+Azk7ey4xh0J/XfJfrCox5LDJiKqI2Q2iwOLOKrlmIaODiSQS8d18jalF6y2K2ePUm0CmShw==} engines: {node: '>=8'} + requiresBuild: true dependencies: minipass: 3.3.6 minipass-sized: 1.0.3 @@ -5330,6 +5526,7 @@ packages: /minipass-flush@1.0.5: resolution: {integrity: sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==} engines: {node: '>= 8'} + requiresBuild: true dependencies: minipass: 3.3.6 optional: true @@ -5337,6 +5534,7 @@ packages: /minipass-pipeline@1.2.4: resolution: {integrity: sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==} engines: {node: '>=8'} + requiresBuild: true dependencies: minipass: 3.3.6 optional: true @@ -5344,6 +5542,7 @@ packages: /minipass-sized@1.0.3: resolution: {integrity: sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==} engines: {node: '>=8'} + requiresBuild: true dependencies: minipass: 3.3.6 optional: true @@ -5471,6 +5670,11 @@ packages: dependencies: whatwg-url: 5.0.0 + /node-forge@1.3.1: + resolution: {integrity: sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==} + engines: {node: '>= 6.13.0'} + dev: false + /node-gyp@8.4.1: resolution: {integrity: sha512-olTJRgUtAb/hOXG0E93wZDs5YiJlgbXxTwQAFHyNlRsXQnYzUaF2aGgujZbw+hR8aF4ZG/rST57bWMWD16jr9w==} engines: {node: '>= 10.12.0'} @@ -5564,6 +5768,7 @@ packages: /npmlog@6.0.2: resolution: {integrity: sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} + requiresBuild: true dependencies: are-we-there-yet: 3.0.1 console-control-strings: 1.1.0 @@ -5676,6 +5881,7 @@ packages: /p-map@4.0.0: resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} engines: {node: '>=10'} + requiresBuild: true dependencies: aggregate-error: 3.1.0 optional: true @@ -5946,6 +6152,7 @@ packages: /promise-inflight@1.0.1: resolution: {integrity: sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==} + requiresBuild: true peerDependencies: bluebird: '*' peerDependenciesMeta: @@ -5960,6 +6167,7 @@ packages: /promise-retry@2.0.1: resolution: {integrity: sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==} engines: {node: '>=10'} + requiresBuild: true dependencies: err-code: 2.0.3 retry: 0.12.0 @@ -6265,6 +6473,7 @@ packages: /retry@0.12.0: resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==} engines: {node: '>= 4'} + requiresBuild: true optional: true /reusify@1.0.4: @@ -6457,11 +6666,13 @@ packages: /smart-buffer@4.2.0: resolution: {integrity: sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==} engines: {node: '>= 6.0.0', npm: '>= 3.0.0'} + requiresBuild: true optional: true /socks-proxy-agent@6.2.1: resolution: {integrity: sha512-a6KW9G+6B3nWZ1yB8G7pJwL3ggLy1uTzKAgCb7ttblwqdz9fMGJUuTy3uFzEP48FAs9FLILlmzDlE2JJhVQaXQ==} engines: {node: '>= 10'} + requiresBuild: true dependencies: agent-base: 6.0.2 debug: 4.3.4 @@ -6473,6 +6684,7 @@ packages: /socks@2.7.1: resolution: {integrity: sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==} engines: {node: '>= 10.13.0', npm: '>= 3.0.0'} + requiresBuild: true dependencies: ip: 2.0.0 smart-buffer: 4.2.0 @@ -6538,6 +6750,7 @@ packages: /ssri@8.0.1: resolution: {integrity: sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ==} engines: {node: '>= 8'} + requiresBuild: true dependencies: minipass: 3.3.6 optional: true @@ -7151,12 +7364,14 @@ packages: /unique-filename@1.1.1: resolution: {integrity: sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==} + requiresBuild: true dependencies: unique-slug: 2.0.2 optional: true /unique-slug@2.0.2: resolution: {integrity: sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w==} + requiresBuild: true dependencies: imurmurhash: 0.1.4 optional: true @@ -7196,6 +7411,10 @@ packages: dependencies: punycode: 2.1.1 + /url-template@2.0.8: + resolution: {integrity: sha512-XdVKMF4SJ0nP/O7XIPB0JwAEuT9lDIYnNsK8yGVe43y0AWoKeJNdv3ZNWh7ksJ6KqQFjOO6ox/VEitLnaVNufw==} + dev: false + /util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} diff --git a/shared/package.json b/shared/package.json index 21b7b06..9688ff6 100644 --- a/shared/package.json +++ b/shared/package.json @@ -21,6 +21,7 @@ }, "dependencies": { "@casl/ability": "^6.3.3", + "@joi/date": "^2.1.0", "joi": "^17.7.0" }, "jest": { diff --git a/shared/src/abilities.ts b/shared/src/abilities.ts index 915da9b..56a3e5d 100644 --- a/shared/src/abilities.ts +++ b/shared/src/abilities.ts @@ -6,6 +6,9 @@ import { subject, } from "@casl/ability"; import { applyAbilitiesForPerson, Person, Role } from "./person"; +import { Application, applyAbilitiesOnApplication } from "./application"; +import { applyAbilitiesOnAvailability, Availability } from "./availability"; +import { TimeSlot } from "./timeslot"; export interface UserAuth { sub: string; @@ -19,8 +22,12 @@ export enum Action { Update = "update", Delete = "delete", } -type SubjectsTypes = Partial; -type SubjectNames = "Person"; +type SubjectsTypes = + | Partial + | Partial + | Partial + | Partial; +type SubjectNames = "Person" | "Application" | "Availability" | "TimeSlot"; export type Subjects = SubjectsTypes | SubjectNames; export type AppAbility = PureAbility<[Action, Subjects]>; @@ -35,6 +42,8 @@ export const abilityForUser = (user: UserAuth): AppAbility => { const builder = new AbilityBuilder(createMongoAbility); applyAbilitiesForPerson(user, builder); + applyAbilitiesOnApplication(user, builder); + applyAbilitiesOnAvailability(user, builder); const { build } = builder; return build(); @@ -44,11 +53,15 @@ export const checkAbility = ( ability: AppAbility, action: Action, subjectObj: SubjectsTypes, - subjectName: SubjectNames + subjectName: SubjectNames, + conditions: String[] = [] ): boolean => { const subj = subject(subjectName, subjectObj); + return ( ability.can(action, subj) && - Object.keys(subj).every((field) => ability.can(action, subj, field)) + Object.keys(subj) + .filter((field) => !conditions.includes(field)) + .every((field) => ability.can(action, subj, field)) ); }; diff --git a/shared/src/application.spec.ts b/shared/src/application.spec.ts new file mode 100644 index 0000000..5ec6783 --- /dev/null +++ b/shared/src/application.spec.ts @@ -0,0 +1,496 @@ +import { + Application, + createApplicationSchema, + updateApplicationSchema, + applyAbilitiesOnApplication, + LangLevel, + ApplicationState, + ApplicationType, + applicationsConfig, +} from "./application"; +import { createMockAbility } from "./abilities.spec"; +import { Action, UserAuth, checkAbility } from "./abilities"; +import { Role } from "./person"; + +describe("Application", () => { + const mockApplication: Partial = { + notes: "Notes", + cv: { + encoding: "7bit", + mimetype: "application/pdf", + size: 0, + }, + itaLevel: LangLevel.B2, + }; + + const mockBscApplication: Partial = { + ...mockApplication, + type: ApplicationType.BSC, + bscApplication: { + bscStudyPath: "Electronic Engineering", + bscAcademicYear: 1, + bscGradesAvg: 27.8, + cfu: 50, + }, + }; + + const mockMscApplication: Partial = { + ...mockApplication, + type: ApplicationType.MSC, + mscApplication: { + bscStudyPath: "Electronic Engineering", + bscGradesAvg: 28.9, + mscStudyPath: "Electronic Engineering II", + mscAcademicYear: 1, + mscGradesAvg: 28.6, + cfu: 65, + }, + }; + + const mockPhdApplication: Partial = { + ...mockApplication, + type: ApplicationType.PHD, + phdApplication: { + mscStudyPath: "Electronic Engineering II", + phdDescription: "Electronic Engineering III", + }, + }; + + const mockApplications = [ + mockBscApplication, + mockMscApplication, + mockPhdApplication, + ] + // Define toString method for each mockApplication, so as to print it in the test name + .map((account) => + Object.assign(account, { + toString: function () { + return this.type; + }, + }) + ); + + describe("createApplicationSchema", () => { + it.each(mockApplications)( + `should allow a valid %s application`, + (mockApplication) => { + expect( + createApplicationSchema.validate(mockApplication) + ).not.toHaveProperty("error"); + } + ); + + it("should allow to not set optional fields", () => { + const application: Partial = { + ...mockMscApplication, + notes: undefined, + }; + expect(createApplicationSchema.validate(application)).not.toHaveProperty( + "error" + ); + }); + + it.each(Object.values(ApplicationType))( + `should require application-specific field for %s applications`, + (applicationType) => { + const { error } = createApplicationSchema.validate({ + ...mockApplication, + type: applicationType, + }); + expect(error).toBeDefined(); + expect(error.message).toMatch( + `\"${applicationType}Application\" is required` + ); + } + ); + + it("should require all required fields", () => { + const application: Partial = {}; + const reqiredFields = ["type", "itaLevel"]; + const { error } = createApplicationSchema.validate(application); + expect(error).toBeDefined(); + for (let requiredField of reqiredFields) + expect(error.message).toMatch(`\"${requiredField}\" is required`); + }); + + it("should require required bscApplication-specific fields", () => { + const application = { + ...mockApplication, + type: ApplicationType.BSC, + bscApplication: {}, + }; + const reqiredFields = [ + "bscStudyPath", + "bscAcademicYear", + "bscGradesAvg", + "cfu", + ]; + const { error } = createApplicationSchema.validate(application); + expect(error).toBeDefined(); + for (let requiredField of reqiredFields) + expect(error.message).toMatch( + new RegExp(`\"bscApplication.${requiredField}\\" is required`) + ); + }); + + it("should require required mscApplication-specific fields", () => { + const application = { + ...mockApplication, + type: ApplicationType.MSC, + mscApplication: {}, + }; + const reqiredFields = [ + "mscStudyPath", + "mscGradesAvg", + "mscAcademicYear", + "cfu", + ]; + const optionalFields = ["bscStudyPath", "bscGradesAvg"]; + const { error } = createApplicationSchema.validate(application); + expect(error).toBeDefined(); + for (let requiredField of reqiredFields) + expect(error.message).toMatch( + new RegExp(`\"mscApplication.${requiredField}\\" is required`) + ); + for (let optionalField of optionalFields) + expect(error.message).not.toMatch( + new RegExp(`\"mscApplication.${optionalField}\\" is required`) + ); + }); + + it("should not accept Bsc cfu, years, and grades lower than minimum threshold", () => { + const application: Partial = { + ...mockApplication, + type: ApplicationType.BSC, + bscApplication: { + bscStudyPath: "Electronic Engineering", + bscAcademicYear: -1, + bscGradesAvg: -1, + cfu: -1, + }, + }; + const expectedMinValues = { + bscAcademicYear: applicationsConfig.BSC.MIN_ACADEMIC_YEAR, + bscGradesAvg: applicationsConfig.BSC.MIN_GRADE, + cfu: applicationsConfig.BSC.MIN_CFU, + }; + const { error } = createApplicationSchema.validate(application); + expect(error).toBeDefined(); + for (let [field, minValue] of Object.entries(expectedMinValues)) + expect(error.message).toMatch( + new RegExp( + `\"bscApplication.${field}\\" must be greater than or equal to ${minValue}` + ) + ); + }); + + it("should not accept Msc cfu, years, and grades lower than minimum threshold", () => { + const application: Partial = { + ...mockApplication, + type: ApplicationType.MSC, + mscApplication: { + bscStudyPath: "Electronic Engineering", + mscStudyPath: "Electronic Engineering II", + bscGradesAvg: -1, + mscGradesAvg: -1, + mscAcademicYear: -1, + cfu: -1, + }, + }; + const expectedMinValues = { + bscGradesAvg: applicationsConfig.BSC.MIN_GRADE, + mscGradesAvg: applicationsConfig.MSC.MIN_GRADE, + mscAcademicYear: applicationsConfig.MSC.MIN_ACADEMIC_YEAR, + cfu: applicationsConfig.MSC.MIN_CFU, + }; + const { error } = createApplicationSchema.validate(application); + expect(error).toBeDefined(); + for (let [field, minValue] of Object.entries(expectedMinValues)) + expect(error.message).toMatch( + new RegExp( + `\"mscApplication.${field}\\" must be greater than or equal to ${minValue}` + ) + ); + }); + + it("should not accept Bsc cfu, years, and grades higher than maximum threshold", () => { + const application: Partial = { + ...mockApplication, + type: ApplicationType.BSC, + bscApplication: { + bscStudyPath: "Electronic Engineering", + bscAcademicYear: 999, + bscGradesAvg: 999, + cfu: 999, + }, + }; + const expectedMaxValues = { + bscAcademicYear: applicationsConfig.BSC.MAX_ACADEMIC_YEAR, + bscGradesAvg: applicationsConfig.BSC.MAX_GRADE, + cfu: applicationsConfig.BSC.MAX_CFU, + }; + const { error } = createApplicationSchema.validate(application); + expect(error).toBeDefined(); + for (let [field, maxValue] of Object.entries(expectedMaxValues)) + expect(error.message).toMatch( + new RegExp( + `\"bscApplication.${field}\\" must be less than or equal to ${maxValue}` + ) + ); + }); + + it("should not accept Msc cfu, years, and grades higher than maximum threshold", () => { + const application: Partial = { + ...mockApplication, + type: ApplicationType.MSC, + mscApplication: { + bscStudyPath: "Electronic Engineering", + mscStudyPath: "Electronic Engineering II", + bscGradesAvg: 999, + mscGradesAvg: 999, + mscAcademicYear: 999, + cfu: 999, + }, + }; + const expectedMaxValues = { + bscGradesAvg: applicationsConfig.BSC.MAX_GRADE, + mscGradesAvg: applicationsConfig.MSC.MAX_GRADE, + mscAcademicYear: applicationsConfig.MSC.MAX_ACADEMIC_YEAR, + cfu: applicationsConfig.MSC.MAX_CFU, + }; + const { error } = createApplicationSchema.validate(application); + expect(error).toBeDefined(); + for (let [field, maxValue] of Object.entries(expectedMaxValues)) + expect(error.message).toMatch( + new RegExp( + `\"mscApplication.${field}\\" must be less than or equal to ${maxValue}` + ) + ); + }); + + describe("updateApplicationSchema", () => { + it("should allow a valid update", () => { + const mockUpdate: Partial = { + notes: "NOTES", + state: ApplicationState.Finalized, + }; + expect(updateApplicationSchema.validate(mockUpdate)).not.toHaveProperty( + "error" + ); + }); + + it("should allow to not set optional fields", () => { + const mockUpdate: Partial = {}; + expect(updateApplicationSchema.validate(mockUpdate)).not.toHaveProperty( + "error" + ); + }); + }); + + describe("applyAbilitiesOnApplication", () => { + const mockAbilityForApplication = (user: UserAuth) => + createMockAbility((builder) => { + applyAbilitiesOnApplication(user, builder); + }); + + it("should allow admins to perform all operations (except delete) on applications", () => { + const mockAbility = mockAbilityForApplication({ + role: Role.Admin, + sub: "123", + }); + + const application = { + ...mockBscApplication, + applicantId: "456", + }; + + const expectedAllowedActions = Object.values(Action).filter( + (action) => action != Action.Delete + ); + for (const action of expectedAllowedActions) + expect( + checkAbility(mockAbility, action, application, "Application") + ).toBe(true); + }); + + it("should allow to read own application", () => { + const mockAbility = mockAbilityForApplication({ + role: Role.Applicant, + sub: "123", + }); + + const application = { + ...mockBscApplication, + applicantId: "123", + }; + + expect( + checkAbility(mockAbility, Action.Read, application, "Application") + ).toBe(true); + }); + + it("should not allow non-members to read not own applications", () => { + const nonMemberRoles = [Role.None, Role.Applicant]; + for (const role of nonMemberRoles) { + const mockAbility = mockAbilityForApplication({ + role: role, + sub: "123", + }); + + const application = { + ...mockBscApplication, + applicantId: "567", + }; + + expect( + checkAbility(mockAbility, Action.Read, application, "Application") + ).toBe(false); + } + }); + + it("should allow only applicants to submit new applications", () => { + for (const role of Object.values(Role)) { + const mockAbility = mockAbilityForApplication({ + role: role, + sub: "123", + }); + + const application = { + type: ApplicationType.PHD, + itaLevel: LangLevel.B2, + phdApplication: { + mscStudyPath: "mscStudyPath", + phdDescription: "phdDescription", + }, + }; + + const expected = role == Role.Applicant || role == Role.Admin; + expect( + checkAbility(mockAbility, Action.Create, application, "Application") + ).toBe(expected); + } + }); + + it("should not allow applicants to submit invalid applications data", () => { + const mockAbility = mockAbilityForApplication({ + role: Role.Applicant, + sub: "123", + }); + + const application = { + ...mockBscApplication, + // Invalid application fields + state: ApplicationState.Finalized, + }; + + expect( + checkAbility(mockAbility, Action.Create, application, "Application") + ).toBe(false); + }); + + it("should allow to update own application", () => { + const mockAbility = mockAbilityForApplication({ + role: Role.Applicant, + sub: "123", + }); + + const application = { + state: ApplicationState.RefusedByApplicant, + applicantId: "123", + }; + + expect( + checkAbility(mockAbility, Action.Update, application, "Application", [ + "applicantId", + ]) + ).toBe(true); + }); + + it("should not allow to update other's application", () => { + const mockAbility = mockAbilityForApplication({ + role: Role.Applicant, + sub: "123", + }); + + const application = { + state: ApplicationState.RefusedByApplicant, + applicantId: "456", + }; + + expect( + checkAbility(mockAbility, Action.Update, application, "Application", [ + "applicantId", + ]) + ).toBe(false); + }); + + it("should allow members to update an application", () => { + const memberRoles = [Role.Clerk, Role.Member, Role.Supervisor]; + for (const memberRole of memberRoles) { + const mockAbility = mockAbilityForApplication({ + role: memberRole, + sub: "123", + }); + + const application = { + state: ApplicationState.Accepted, + notes: "No comments", + }; + + expect( + checkAbility( + mockAbility, + Action.Update, + application, + "Application", + ["applicantId"] + ) + ).toBe(true); + } + }); + + it("should not allow members to update invalid fields of an application", () => { + const memberRoles = [Role.Clerk, Role.Member, Role.Supervisor]; + for (const memberRole of memberRoles) { + const mockAbility = mockAbilityForApplication({ + role: memberRole, + sub: "123", + }); + + const application = { + itaLevel: LangLevel.B2, + type: ApplicationType.PHD, + }; + + expect( + checkAbility( + mockAbility, + Action.Update, + application, + "Application", + ["applicantId"] + ) + ).toBe(false); + } + }); + + it("should not allow anyone to delete applications", () => { + for (const role of Object.values(Role)) { + const mockAbility = mockAbilityForApplication({ + role: role, + sub: "123", + }); + + const application = { + ...mockBscApplication, + applicantId: "567", + }; + + expect( + checkAbility(mockAbility, Action.Delete, application, "Application") + ).toBe(false); + } + }); + }); + }); +}); diff --git a/shared/src/application.ts b/shared/src/application.ts new file mode 100644 index 0000000..c5683cf --- /dev/null +++ b/shared/src/application.ts @@ -0,0 +1,207 @@ +import { Action, ApplyAbilities } from "./abilities"; +import { Role } from "./person"; +import * as Joi from "joi"; + +export const applicationsConfig = { + BSC: { + MIN_GRADE: 18, + MAX_GRADE: 30, + MIN_CFU: 48, + MAX_CFU: 180, + MIN_ACADEMIC_YEAR: 1, + MAX_ACADEMIC_YEAR: 3, + }, + MSC: { + MIN_GRADE: 18, + MAX_GRADE: 30, + MIN_CFU: 20, + MAX_CFU: 120, + MIN_ACADEMIC_YEAR: 1, + MAX_ACADEMIC_YEAR: 2, + }, + PHD: { + DESC_LENGTH: 255, + }, +}; + +export enum ApplicationState { + New = "new", + Accepted = "accepted", + Rejected = "rejected", + Confirmed = "confirmed", + Finalized = "finalized", + RefusedByApplicant = "refused_by_applicant", +} + +export enum ApplicationType { + BSC = "bsc", + MSC = "msc", + PHD = "phd", +} + +export enum LangLevel { + B2 = "B2", + C1 = "C1", + C2 = "C2", + NativeSpeaker = "native_speaker", +} + +export interface Application { + type: ApplicationType; + id: number; + state: ApplicationState; + notes?: string; + cv: any; // CV file + grades?: any; // Grades file + itaLevel: LangLevel; + // TODO: Add slot + bscApplication?: BscApplication; + mscApplication?: MscApplication; + phdApplication?: PhdApplication; +} + +export interface BscApplication { + bscStudyPath: string; + bscAcademicYear: number; + bscGradesAvg: number; + cfu: number; +} + +export interface MscApplication { + bscStudyPath: string; + bscGradesAvg: number; + mscStudyPath: string; + mscGradesAvg: number; + mscAcademicYear: number; + cfu: number; +} + +export interface PhdApplication { + mscStudyPath: string; + phdDescription: string; +} + +/* Validation schemas */ + +const BaseApplication = Joi.object({ + type: Joi.string() + .valid(...Object.values(ApplicationType)) + .required(), + notes: Joi.string().optional(), + itaLevel: Joi.string() + .valid(...Object.values(LangLevel)) + .required(), +}); + +const createBscApplication = Joi.object({ + bscStudyPath: Joi.string().max(255).required(), + bscGradesAvg: Joi.number() + .min(applicationsConfig.BSC.MIN_GRADE) + .max(applicationsConfig.BSC.MAX_GRADE) + .required(), + bscAcademicYear: Joi.number() + .integer() + .min(applicationsConfig.BSC.MIN_ACADEMIC_YEAR) + .max(applicationsConfig.BSC.MAX_ACADEMIC_YEAR) + .required(), + cfu: Joi.number() + .integer() + .min(applicationsConfig.BSC.MIN_CFU) + .max(applicationsConfig.BSC.MAX_CFU) + .required(), +}); + +const createMscApplication = Joi.object({ + bscStudyPath: Joi.string().optional(), + bscGradesAvg: Joi.number() + .min(applicationsConfig.BSC.MIN_GRADE) + .max(applicationsConfig.BSC.MAX_GRADE) + .optional(), + mscStudyPath: Joi.string().required(), + mscGradesAvg: Joi.number() + .min(applicationsConfig.MSC.MIN_GRADE) + .max(applicationsConfig.MSC.MAX_GRADE) + .required(), + mscAcademicYear: Joi.number() + .integer() + .min(applicationsConfig.MSC.MIN_ACADEMIC_YEAR) + .max(applicationsConfig.MSC.MAX_ACADEMIC_YEAR) + .required(), + cfu: Joi.number() + .integer() + .min(applicationsConfig.MSC.MIN_CFU) + .max(applicationsConfig.MSC.MAX_CFU) + .required(), +}); + +const createPhdApplication = Joi.object({ + mscStudyPath: Joi.string().required(), + phdDescription: Joi.string() + .max(applicationsConfig.PHD.DESC_LENGTH) + .required(), +}); + +export const createApplicationSchema = BaseApplication.keys({ + bscApplication: createBscApplication.when("type", { + is: "bsc", + then: Joi.required(), + otherwise: Joi.forbidden(), + }), + mscApplication: createMscApplication.when("type", { + is: "msc", + then: Joi.required(), + otherwise: Joi.forbidden(), + }), + phdApplication: createPhdApplication.when("type", { + is: "phd", + then: Joi.required(), + otherwise: Joi.forbidden(), + }), +}) + .label("type") + .options({ + stripUnknown: true, + abortEarly: false, + presence: "required", + }); + +export const updateApplicationSchema = Joi.object({ + notes: Joi.string().optional(), + state: Joi.string() + .valid(...Object.values(ApplicationState)) + .optional(), +}).options({ + stripUnknown: true, + abortEarly: false, + presence: "required", +}); + +/* Abilities */ + +export const applyAbilitiesOnApplication: ApplyAbilities = ( + user, + { can, cannot } +) => { + if (user.role === Role.Admin) { + can(Action.Manage, "Application"); // Admin can do anything on any application + } else if (user.role === Role.Applicant) { + can(Action.Read, "Application", { applicantId: user.sub }); + + can(Action.Create, "Application", [ + "type", + "notes", + "itaLevel", + "bscApplication", + "mscApplication", + "phdApplication", + ]); + + can(Action.Update, "Application", ["state"], { applicantId: user.sub }); + } else if (user.role !== Role.None) { + // Every other authenticated user can read and update applications + can(Action.Read, "Application"); + can(Action.Update, "Application", ["state", "notes"], {}); + } + + cannot(Action.Delete, "Application"); // No one can delete applications +}; diff --git a/shared/src/availability.spec.ts b/shared/src/availability.spec.ts new file mode 100644 index 0000000..212a245 --- /dev/null +++ b/shared/src/availability.spec.ts @@ -0,0 +1,40 @@ +import { + AvailabilityState, + AvailabilityType, + Availability, + updateAvailabilitySchema, +} from "./availability"; +import { createMockAbility } from "./abilities.spec"; +import { Action, UserAuth, checkAbility } from "./abilities"; +import { Role } from "./person"; + +describe("Availability", () => { + describe("updateAvailabilitySchema", () => { + it("should allow a valid update", () => { + const updateAvailability = { + state: AvailabilityState.Confirmed, + timeSlotId: 123, + }; + const { error } = updateAvailabilitySchema.validate(updateAvailability); + expect(error).toBeDefined(); + }); + + it("should not allow updating with an invalid state", () => { + const updateAvailability = { + state: "Non_Existent_State", + timeSlotId: 123, + }; + const { error } = updateAvailabilitySchema.validate(updateAvailability); + expect(error).toBeDefined(); + }); + + it("should not allow updating with an invalid timeSlotId", () => { + const updateAvailability = { + state: AvailabilityState.Confirmed, + timeSlotId: -321, + }; + const { error } = updateAvailabilitySchema.validate(updateAvailability); + expect(error).toBeDefined(); + }); + }); +}); diff --git a/shared/src/availability.ts b/shared/src/availability.ts new file mode 100644 index 0000000..1cd0e38 --- /dev/null +++ b/shared/src/availability.ts @@ -0,0 +1,61 @@ +import { Action, ApplyAbilities } from "./abilities"; +import { Person, Role } from "./person"; +import * as Joi from "joi"; + +export enum AvailabilityState { + Subscribed = "subscribed", + Confirmed = "confirmed", + Cancelled = "cancelled", +} + +export enum AvailabilityType { + Available = "available", + Unavailable = "unavailable", +} + +export interface Availability { + state: AvailabilityState; + timeSlotId: number; + member: Person; + // assignedAt?: Date; + // confirmedAt?: Date; + // cancelledAt?: Date; +} + +/* Validation schemas */ + +export const updateAvailabilitySchema = Joi.object({ + state: Joi.string() + .valid(...Object.values(AvailabilityType)) + .required(), + timeSlotId: Joi.number().positive().required(), +}).options({ + stripUnknown: true, + abortEarly: false, + presence: "required", +}); + +/* Abilities */ + +export const applyAbilitiesOnAvailability: ApplyAbilities = ( + user, + { can, cannot } +) => { + switch (user.role) { + case Role.Admin: + case Role.Supervisor: + can(Action.Manage, "Availability"); + break; + case Role.Member: + case Role.Clerk: + can(Action.Read, "Availability"); + can(Action.Update, "Availability", { userId: user.sub }); + break; + case Role.Applicant: + can(Action.Read, "Availability", { userId: user.sub }); + can(Action.Update, "Availability", { userId: user.sub }); + break; + default: + cannot(Action.Manage, "Availability"); + } +}; diff --git a/shared/src/index.ts b/shared/src/index.ts index 482f0ff..3402b34 100644 --- a/shared/src/index.ts +++ b/shared/src/index.ts @@ -1,2 +1,6 @@ export * from "./person"; export * from "./abilities"; +export * from "./application"; +export * from "./availability"; +export * from "./timeslot"; +export * from "./slot"; diff --git a/shared/src/slot.ts b/shared/src/slot.ts new file mode 100644 index 0000000..5b31ebd --- /dev/null +++ b/shared/src/slot.ts @@ -0,0 +1,16 @@ +import { TimeSlot } from "./timeslot"; + +export enum SlotState { + Free = "free", + Assigned = "assigned", + Rejected = "rejected", + Reserved = "reserved", +} + +export interface Slot { + state: SlotState; + timeSlot: TimeSlot; + calendarId?: string; +} + +/* Validation schemas */ diff --git a/shared/src/timeslot.ts b/shared/src/timeslot.ts new file mode 100644 index 0000000..fea1951 --- /dev/null +++ b/shared/src/timeslot.ts @@ -0,0 +1,47 @@ +import { Action, ApplyAbilities } from "./abilities"; +import { Role } from "./person"; +import DateExtension from "@joi/date"; +import * as Joi from "joi"; +const JoiDate = Joi.extend(DateExtension); + +// import BaseJoi from "joi"; +// const Joi = BaseJoi.extend(JoiDate); + +export interface TimeSlot { + start: Date; + end: Date; +} + +/* Validation schemas */ + +export const createTimeSlotSchema = Joi.object({ + start: JoiDate.date().format("YYYY-MM-DD HH:mm").required(), + end: JoiDate.date().format("YYYY-MM-DD HH:mm").required(), +}).options({ + stripUnknown: true, + abortEarly: false, + presence: "required", +}); + +/* Abilities */ + +export const applyAbilitiesOnTimeSlot: ApplyAbilities = ( + user, + { can, cannot } +) => { + can(Action.Manage, "TimeSlot"); + // switch (user.role) { + // case Role.Admin: + // case Role.Supervisor: + // case Role.Clerk: + // // TODO: Decide who can create/delete timeslots + // can(Action.Manage, "TimeSlot"); + // break; + // case Role.Member: + // case Role.Applicant: + // can(Action.Read, "TimeSlot"); + // break; + // default: + // cannot(Action.Manage, "TimeSlot"); + // } +};