diff --git a/.atlassian/OWNER b/.atlassian/OWNER new file mode 100644 index 0000000..60c0106 --- /dev/null +++ b/.atlassian/OWNER @@ -0,0 +1 @@ +sbland2 \ No newline at end of file diff --git a/.eslintignore b/.eslintignore new file mode 100644 index 0000000..9d35886 --- /dev/null +++ b/.eslintignore @@ -0,0 +1,12 @@ +# Required to ignore subpackage's node_modules folders. +**/node_modules/**/* + +# Ignore all built artefacts. +**/dist/**/* + +# Sidekick container in bitbucket pipelines (authless pipelines) +.artifactory/* + +# UI +/ui/build/**/* +/ui/public/**/* diff --git a/.eslintrc.js b/.eslintrc.js new file mode 100644 index 0000000..a2df2b7 --- /dev/null +++ b/.eslintrc.js @@ -0,0 +1,52 @@ +module.exports = { + extends: ['airbnb-base', 'plugin:@typescript-eslint/recommended', 'prettier'], + settings: { + 'import/extensions': ['.js', '.jsx', '.ts', '.tsx'], + 'import/resolver': { + node: { + extensions: ['.js', '.jsx', '.ts', '.tsx'], + }, + }, + 'import/external-module-folders': ['node_modules'], + }, + parser: '@typescript-eslint/parser', + plugins: ['prettier', '@typescript-eslint'], + rules: { + 'max-len': [ + 'warn', + { + code: 120, + }, + ], + 'import/extensions': 'off', + 'no-shadow': 'off', + '@typescript-eslint/no-shadow': ['error'], + 'no-restricted-syntax': 'off', + 'no-underscore-dangle': 'off', + 'no-await-in-loop': 'off', // https://softwareteams.atlassian.net/browse/COMPASS-2945 + 'import/no-extraneous-dependencies': [ + 'error', + { + devDependencies: ['**/*.test.ts', '**/*.test.tsx', '**/__tests__/**/*'], + }, + ], + 'import/no-unresolved': 'off', // https://softwareteams.atlassian.net/browse/COMPASS-2948 + 'react/react-in-jsx-scope': 'off', + 'react/jsx-filename-extension': 'off', + 'react/require-default-props': 'off', + 'import/prefer-default-export': 'off', + 'no-console': 'off', + 'prettier/prettier': [ + 'error', + { + singleQuote: true, + trailingComma: 'all', + tabWidth: 2, + jsxSingleQuote: true, + printWidth: 120, + }, + ], + 'arrow-body-style': 'off', + 'prefer-arrow-callback': 'off', + }, +}; diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c3c6e6f --- /dev/null +++ b/.gitignore @@ -0,0 +1,56 @@ +# These are some examples of commonly ignored file patterns. +# You should customize this list as applicable to your project. +# Learn more about .gitignore: +# https://www.atlassian.com/git/tutorials/saving-changes/gitignore + +# Node artifact files +node_modules/ +dist/ + +# Compiled Java class files +*.class + +# Compiled Python bytecode +*.py[cod] + +# Log files +*.log + +# Package files +*.jar + +# Maven +target/ +dist/ + +# IDE +.idea/ +.vscode/ + +# Unit test reports +TEST*.xml + +# Generated by MacOS +.DS_Store + +# Generated by Windows +Thumbs.db + +# Applications +*.app +*.exe +*.war + +# Large media files +*.mp4 +*.tiff +*.avi +*.flv +*.mov +*.wmv + +# Env files +.env* + +# Test reports +test-results/ diff --git a/.husky/.gitignore b/.husky/.gitignore new file mode 100644 index 0000000..31354ec --- /dev/null +++ b/.husky/.gitignore @@ -0,0 +1 @@ +_ diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100755 index 0000000..d2ae35e --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,4 @@ +#!/bin/sh +. "$(dirname "$0")/_/husky.sh" + +yarn lint-staged diff --git a/.nvmrc b/.nvmrc new file mode 100644 index 0000000..28c34d2 --- /dev/null +++ b/.nvmrc @@ -0,0 +1 @@ +14.19.0 diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 0000000..fcd421e --- /dev/null +++ b/.prettierignore @@ -0,0 +1,4 @@ +.artifactory/* +ui/build +ui/public +src/generated diff --git a/.prettierrc.json b/.prettierrc.json new file mode 100644 index 0000000..f59ef7e --- /dev/null +++ b/.prettierrc.json @@ -0,0 +1,7 @@ +{ + "singleQuote": true, + "trailingComma": "all", + "tabWidth": 2, + "jsxSingleQuote": true, + "printWidth": 120 +} diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..1c8292d --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,27 @@ +# Contributor Code of Conduct + +As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities. + +We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality. + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery +* Personal attacks +* Trolling or insulting/derogatory comments +* Public or private harassment +* Publishing other's private information, such as physical or electronic addresses, without explicit permission +* Submitting contributions or comments that you know to violate the intellectual property or privacy rights of others +* Other unethical or unprofessional conduct + +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. +By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team. + +This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting a project maintainer. Complaints will result in a response and be reviewed and investigated in a way that is deemed necessary and appropriate to the circumstances. Maintainers are obligated to maintain confidentiality with regard to the reporter of an incident. + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.3.0, available at [http://contributor-covenant.org/version/1/3/0/][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/3/0/ \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..9ddd8dd --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,18 @@ +# Contributing to gitlab-for-compass + +Thank you for considering a contribution to [Project name]! Pull requests, issues and comments are welcome. For pull requests, please: + +* Add tests for new features and bug fixes +* Follow the existing style +* Separate unrelated changes into multiple pull requests + +See the existing issues for things to start contributing. + +For bigger changes, please make sure you start a discussion first by creating an issue and explaining the intended change. + +Atlassian requires contributors to sign a Contributor License Agreement, known as a CLA. This serves as a record stating that the contributor is entitled to contribute the code/documentation/translation to the project and is willing to have it used in distributions and derivative works (or is willing to transfer ownership). + +Prior to accepting your contributions we ask that you please follow the appropriate link below to digitally sign the CLA. The Corporate CLA is for those who are contributing as a member of an organization and the individual CLA is for those contributing as an individual. + +* [CLA for corporate contributors](https://opensource.atlassian.com/corporate) +* [CLA for individuals](https://opensource.atlassian.com/individual) \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..778b07e --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2022 Atlassian Pty Ltd + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..fc50f06 --- /dev/null +++ b/README.md @@ -0,0 +1,87 @@ +# Compass GitLab Integration + +[![Atlassian license](https://img.shields.io/badge/license-Apache%202.0-blue.svg?style=flat-square)](LICENSE) [![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square)](CONTRIBUTING.md) + +This Forge app seamlessly integrates Compass with GitLab, a web-based Git repository that provides free open and private repositories, issue-following capabilities, and wikis. The integration currently supports the following operations: + +- import components from GitLab projects to track them in Compass +- automate component management from an external tool with config-as-code +- sync component data, such as deployment activity, from GitLab to Compass +- automatically calculate metrics associated with component activity + +## Usage + +To integrate Compass with GitLab, you must first install the GitLab app in Compass. Then, you must create and retrieve your GitLab group access token to connect Compass to the GitLab account to finish setup and begin importing components from GitLab and/or managing components via config-as-code. + +## Installation + +Install the dependencies: +```bash + nvm use + yarn + + npm install -g @forge/cli # if you don't have it already +``` + +Set up the Custom UI Frontend +```bash + # in a new tab + yarn ui:install + + # build the frontend + yarn ui:build + + # watch the frontend + yarn ui:start +``` + +Set up the Forge App +```bash + # login to Forge (will require an API token) + forge login + + # register the app (this will change the app ID in the manifest) + forge register + + # deploy the app + forge deploy [-f] + # -f, or --no-verify , allows you to include modules in your manifest that aren't officially published in Forge yet + + # install the app on your site + forge install [--upgrade] + # pick "Compass" and enter your site. <*.atlassian.net> + # --upgrade will attempt to upgrade existing installations if the scopes or permissions have changed + + # run the tunnel which will listen for changes + forge tunnel +``` + +### Notes + +- Use the `forge deploy` command when you want to persist code changes. +- Use the `forge install` command when you want to install the app on a new site. +- Once the app is installed on a site, the site picks up the new app changes you deploy without needing to rerun the install command. + + +## Documentation + +Documentation for the Compass GitLab integration can be found [here](https://developer.atlassian.com/cloud/compass/integrations/integrate-Compass-with-Gitlab/). For more information about building integrations on Compass, see [here](https://developer.atlassian.com/cloud/compass/integrations/get-started-integrating-with-Compass/). + + +## Tests + +Use `yarn ui:test` for UI tests and `yarn test` for all other tests. + +## Contributions + +Contributions to the Compass GitLab Integration are welcome! Please see [CONTRIBUTING.md](CONTRIBUTING.md) for details. + +## License + +Copyright (c) 2022 Atlassian and others. +Apache 2.0 licensed, see [LICENSE](LICENSE) file. + +
+ + +[![With ❤️ from Atlassian](https://raw.githubusercontent.com/atlassian-internal/oss-assets/master/banner-cheers.png)](https://www.atlassian.com) \ No newline at end of file diff --git a/jest.config.js b/jest.config.js new file mode 100644 index 0000000..2410b66 --- /dev/null +++ b/jest.config.js @@ -0,0 +1,17 @@ +/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */ +module.exports = { + roots: ['src'], + preset: 'ts-jest', + testEnvironment: 'node', + testPathIgnorePatterns: ['/node_modules/', '/typings/', '/support/', '/dist/', '/fixtures/', '/helpers/'], + collectCoverageFrom: ['src/**/*'], + globals: { + 'ts-jest': { + tsconfig: 'tsconfig.json', + }, + }, + transform: { + '^.+\\.(ts|tsx)$': 'ts-jest', + }, + coverageDirectory: 'coverage', +}; diff --git a/manifest.yml b/manifest.yml new file mode 100644 index 0000000..8ece830 --- /dev/null +++ b/manifest.yml @@ -0,0 +1,83 @@ +modules: + compass:componentImporter: + - key: import-page-ui + resolver: + function: import-resolver + resource: main + title: GitLab + icon: https://about.gitlab.com/images/press/logo/png/gitlab-icon-rgb.png + compass:adminPage: + - key: admin-page-ui + resolver: + function: admin-resolver + resource: main + title: GitLab + icon: https://about.gitlab.com/images/press/logo/png/gitlab-icon-rgb.png + compass:configValidator: + - key: config-validator + function: config-validator-fn + compass:dataProvider: + - key: data-provider + function: data-provider-fn + callback: + function: callback-fn + domains: + - 'gitlab.com' + linkTypes: + - repository + compass:preUninstall: + - key: pre-uninstall-ep + function: pre-uninstall + webtrigger: + - key: gitlab-event-webtrigger + function: process-gitlab-event + function: + - key: admin-resolver + handler: index.resolver + - key: import-resolver + handler: index.resolver + - key: process-gitlab-event + handler: index.processGitlabEvent + - key: import-projects + handler: import-queue-resolver.run + - key: pre-uninstall + handler: index.preUninstall + - key: data-provider-fn + handler: index.dataProvider + - key: callback-fn + handler: index.callback + - key: config-validator-fn + handler: index.configValidator + consumer: + - key: import-consumer + queue: import-queue + resolver: + function: import-projects + method: import +app: + id: ari:cloud:ecosystem::app/883d6844-ebba-4a7a-b816-a4f5cc162ddb +resources: + - key: main + path: ui/build + tunnel: + port: 3001 +permissions: + # For more info on manifest permissions, see https://developer.atlassian.com/platform/forge/manifest-reference/permissions/ + scopes: + - storage:app + - read:component:compass + - write:component:compass + - read:event:compass + - write:event:compass + - read:scorecard:compass + - write:scorecard:compass + - write:metric:compass + - read:metric:compass + external: + fetch: + backend: + - '*.services.atlassian.com' + - 'https://gitlab.com' + content: + styles: + - 'unsafe-inline' diff --git a/package.json b/package.json new file mode 100644 index 0000000..7ebdca3 --- /dev/null +++ b/package.json @@ -0,0 +1,68 @@ +{ + "name": "insert-name-here", + "version": "1.0.0", + "main": "index.ts", + "repository": "insert-repo-here", + "author": "Atlassian", + "license": "MIT", + "private": true, + "devDependencies": { + "@forge/cli": "^4.3.2", + "@types/jest": "^27.4.1", + "@types/js-yaml": "^4.0.5", + "@types/lodash": "^4.14.182", + "@types/url-parse": "^1.4.8", + "@typescript-eslint/eslint-plugin": "^5.14.0", + "@typescript-eslint/parser": "^5.14.0", + "eslint": "^8.11.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-config-prettier": "^8.5.0", + "eslint-plugin-import": "^2.25.4", + "eslint-plugin-jsx-a11y": "^6.5.1", + "eslint-plugin-no-only-tests": "^2.6.0", + "eslint-plugin-prettier": "^4.0.0", + "eslint-plugin-react": "^7.29.4", + "eslint-plugin-react-hooks": "^4.3.0", + "husky": "^7.0.4", + "jest": "^27.5.1", + "jest-fetch-mock": "^3.0.3", + "jest-mock": "^28.1.0", + "lint-staged": "^12.3.5", + "prettier": "^2.6.2", + "ts-jest": "^27.1.3", + "typescript": "~4.5.5" + }, + "dependencies": { + "@atlassian/forge-graphql": "^8.7.6", + "@forge/api": "^2.6.1", + "@forge/bridge": "^2.3.0", + "@forge/events": "^0.5.3", + "@forge/resolver": "^1.4.2", + "@forge/ui": "^1.1.0", + "exponential-backoff": "^3.1.0", + "js-yaml": "^4.1.0", + "lodash": "^4.17.21", + "react-router-dom": "^6.3.0", + "url-parse": "^1.5.10" + }, + "scripts": { + "compile": "tsc --noEmit", + "prepare": "husky install", + "test": "jest", + "lint": "yarn lint:eslint && yarn lint:prettier --check", + "fix:prettier:and:lint": "yarn lint:prettier:fix && yarn lint:eslint:fix", + "lint:eslint": "eslint '**/*.{tsx,js,ts}'", + "lint:eslint:fix": "yarn lint:eslint --fix", + "lint:prettier": "prettier '**/*.{tsx,js,ts}'", + "lint:prettier:fix": "yarn lint:prettier --write", + "ui:install": "cd ui && yarn install", + "ui:start": "cd ui && yarn start", + "ui:build": "cd ui && yarn build", + "ui:test": "cd ui && yarn test --watchAll=false --passWithNoTests" + }, + "lint-staged": { + "**/*.{tsx,js,ts}": [ + "yarn run fix:prettier:and:lint" + ] + } +} diff --git a/src/__tests__/contract/__snapshots__/process-gitlab-event-webtrigger.test.ts.snap b/src/__tests__/contract/__snapshots__/process-gitlab-event-webtrigger.test.ts.snap new file mode 100644 index 0000000..c85966c --- /dev/null +++ b/src/__tests__/contract/__snapshots__/process-gitlab-event-webtrigger.test.ts.snap @@ -0,0 +1,25 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Process gitlab webtrigger successfully parse webhook event 1`] = ` +Object { + "body": "{\\"message\\":\\"Processed webhook event\\",\\"success\\":true}", + "headers": Object { + "Content-Type": Array [ + "application/json", + ], + }, + "statusCode": 200, +} +`; + +exports[`Process gitlab webtrigger successfully parse webhook pipeline event 1`] = ` +Object { + "body": "{\\"message\\":\\"Processed webhook event\\",\\"success\\":true}", + "headers": Object { + "Content-Type": Array [ + "application/json", + ], + }, + "statusCode": 200, +} +`; diff --git a/src/__tests__/contract/process-gitlab-event-webtrigger.test.ts b/src/__tests__/contract/process-gitlab-event-webtrigger.test.ts new file mode 100644 index 0000000..0d1ed19 --- /dev/null +++ b/src/__tests__/contract/process-gitlab-event-webtrigger.test.ts @@ -0,0 +1,43 @@ +/* eslint-disable import/order */ +import { storage, mockForgeApi } from '../helpers/forge-helper'; +/* eslint-disable import/first */ +mockForgeApi(); + +import { processGitlabEvent } from '../../entry/webtriggers'; +import { pipelineWebhookFixture } from '../fixtures/build-webhook-payload'; + +const MOCK_TOKEN_SECRET = 'kokokokokokokokok'; + +describe('Process gitlab webtrigger', () => { + beforeEach(() => { + jest.resetAllMocks(); + }); + + test('successfully parse webhook event', async () => { + storage.get.mockResolvedValue(MOCK_TOKEN_SECRET); + const resp = await processGitlabEvent( + { + body: JSON.stringify({ body: 'some data' }), + headers: { 'x-gitlab-token': [MOCK_TOKEN_SECRET] }, + queryParameters: { groupId: [12345] }, + }, + { installContext: 'ari:cloud:compass::site/00000000-0000-0000-0000-000000000000', principal: undefined }, + ); + + expect(resp).toMatchSnapshot(); + }); + + test('successfully parse webhook pipeline event', async () => { + storage.get.mockResolvedValue(MOCK_TOKEN_SECRET); + const resp = await processGitlabEvent( + { + body: JSON.stringify({ body: pipelineWebhookFixture }), + headers: { 'x-gitlab-token': [MOCK_TOKEN_SECRET] }, + queryParameters: { groupId: [12345] }, + }, + { installContext: 'ari:cloud:compass::site/00000000-0000-0000-0000-000000000000', principal: undefined }, + ); + + expect(resp).toMatchSnapshot(); + }); +}); diff --git a/src/__tests__/fixtures/build-webhook-payload.ts b/src/__tests__/fixtures/build-webhook-payload.ts new file mode 100644 index 0000000..076df38 --- /dev/null +++ b/src/__tests__/fixtures/build-webhook-payload.ts @@ -0,0 +1,136 @@ +import { PipelineEvent } from 'src/types'; + +export const latestDate = new Date().toDateString(); + +export const pipelineWebhookFixture = { + object_kind: 'pipeline', + object_attributes: { + id: 563913989, + ref: 'main', + tag: false, + sha: 'c5b40d0e1885276eb93f1a6e5e3c84fe672288f0', + before_sha: '7bd9e29915aa5e4e50d528c92173b8be5c40b01b', + source: 'push', + status: 'success', + detailed_status: 'passed', + stages: ['build', 'test', 'deploy'], + created_at: '2022-06-14 22:45:56 UTC', + finished_at: '2022-06-14 22:46:49 UTC', + duration: 51, + queued_duration: null as any, + variables: [] as any[], + }, + merge_request: null as any, + user: { + id: 10793434, + name: 'Patrick Brady', + username: 'pbrady2', + avatar_url: 'https://gitlab.com/uploads/-/system/user/avatar/10793434/avatar.png', + email: '[REDACTED]', + }, + project: { + id: 36867443, + name: 'stringray', + description: + 'Stingrays are a group of sea rays, which are cartilaginous fish related to sharks. They are classified in the suborder Myliobatoidei of the order Myliobatiformes and consist of eight families: Hexatrygonidae (sixgill stingray), Plesiobatidae (deepwat', + web_url: + 'https://gitlab.com/gitlab-com/alliances/atlassian/shared-projects/compass-electromagnets-testing/patrick-subgroup/stringray', + avatar_url: null as any, + git_ssh_url: + 'git@gitlab.com:gitlab-com/alliances/atlassian/shared-projects/compass-electromagnets-testing/patrick-subgroup/stringray.git', + git_http_url: + 'https://gitlab.com/gitlab-com/alliances/atlassian/shared-projects/compass-electromagnets-testing/patrick-subgroup/stringray.git', + namespace: 'patrick-subgroup', + visibility_level: 0, + path_with_namespace: + 'gitlab-com/alliances/atlassian/shared-projects/compass-electromagnets-testing/patrick-subgroup/stringray', + default_branch: 'main', + ci_config_path: '', + }, + commit: { + id: 'c5b40d0e1885276eb93f1a6e5e3c84fe672288f0', + message: 'Update .gitlab-ci.yml file', + title: 'Update .gitlab-ci.yml file', + timestamp: '2022-06-14T22:45:54+00:00', + url: 'https://gitlab.com/gitlab-com/alliances/atlassian/shared-projects/compass-electromagnets-testing/patrick-subgroup/stringray/-/commit/c5b40d0e1885276eb93f1a6e5e3c84fe672288f0', + author: { name: 'Patrick Brady', email: 'pbrady@atlassian.com' }, + }, + builds: [ + { + id: 2591053758, + stage: 'build', + name: 'build-job', + status: 'success', + created_at: '2022-06-14 22:45:56 UTC', + started_at: '2022-06-14 22:45:56 UTC', + finished_at: '2022-06-14 22:46:11 UTC', + duration: 14.853819, + queued_duration: 0.092718, + when: 'on_success', + manual: false, + allow_failure: false, + user: null, + runner: null, + artifacts_file: null, + environment: null, + }, + { + id: 2591053759, + stage: 'test', + name: 'unit-test-job', + status: 'success', + created_at: '2022-06-14 22:45:56 UTC', + started_at: '2022-06-14 22:46:12 UTC', + finished_at: '2022-06-14 22:46:36 UTC', + duration: 23.887026, + queued_duration: 0.564249, + when: 'on_success', + manual: false, + allow_failure: false, + user: null, + runner: null, + artifacts_file: null, + environment: null, + }, + { + id: 2591053761, + stage: 'test', + name: 'lint-test-job', + status: 'success', + created_at: '2022-06-14 22:45:56 UTC', + started_at: '2022-06-14 22:46:12 UTC', + finished_at: '2022-06-14 22:46:30 UTC', + duration: 18.361969, + queued_duration: 0.82745, + when: 'on_success', + manual: false, + allow_failure: false, + user: null, + runner: null, + artifacts_file: null, + environment: null, + }, + { + id: 2591053762, + stage: 'deploy', + name: 'deploy-job', + status: 'success', + created_at: '2022-06-14 22:45:56 UTC', + started_at: '2022-06-14 22:46:36 UTC', + finished_at: latestDate, + duration: 12.437945, + queued_duration: 0.383398, + when: 'on_success', + manual: false, + allow_failure: false, + user: null, + runner: null, + artifacts_file: null, + environment: { + name: 'patrick production', + action: 'start', + deployment_tier: 'production', + }, + }, + ], +} as PipelineEvent; diff --git a/src/__tests__/fixtures/data-provider-deployment-payload.ts b/src/__tests__/fixtures/data-provider-deployment-payload.ts new file mode 100644 index 0000000..73d7eb8 --- /dev/null +++ b/src/__tests__/fixtures/data-provider-deployment-payload.ts @@ -0,0 +1,28 @@ +import { + CompassDeploymentEventEnvironmentCategory, + CompassDeploymentEventState, + DataProviderDeploymentEvent, +} from '@atlassian/forge-graphql'; + +export const createMockDataProviderDeployment = ( + daysFromNow: number, + environment = 'production', +): DataProviderDeploymentEvent => ({ + environment: { + category: environment.toUpperCase() as CompassDeploymentEventEnvironmentCategory, + displayName: environment, + environmentId: '1234', + }, + pipeline: { + displayName: 'koko pipeline', + pipelineId: '2345', + url: 'https://koko.momo', + }, + sequenceNumber: 1, + state: CompassDeploymentEventState.Successful, + description: 'koko deployment', + displayName: 'koko', + lastUpdated: new Date(new Date('2022-01-29T01:15:42.960Z').valueOf() - 1000 * 86400 * daysFromNow).toISOString(), + updateSequenceNumber: new Date(new Date('2022-01-29T01:15:42.960Z').valueOf() - 1000 * 86400 * daysFromNow).getTime(), + url: 'https://koko.momo', +}); diff --git a/src/__tests__/fixtures/gitlab-data.ts b/src/__tests__/fixtures/gitlab-data.ts new file mode 100644 index 0000000..edfa67d --- /dev/null +++ b/src/__tests__/fixtures/gitlab-data.ts @@ -0,0 +1,42 @@ +import { MergeRequest } from '../../types'; + +export const TEST_COMPONENT_ID = + 'ari:cloud:compass:4958bb5d-3970-4a13-bebc-62bbca57f370:component/5ce8c075-7b72-4455-9be9-7f0a1c6e6db4/23b718b0-26a9-4654-9a48-4390a3e811dd'; + +export const TEST_TOKEN = 'glpat-geTHYDSDGHJJ'; + +export const TEST_GET_PROJECT_BY_ID_RESPONSE = { + id: 1, + name: 'koko', + description: 'description', + default_branch: 'default_branch', + topics: ['koko', 'momo'], + web_url: 'web_url', + namespace: { + id: 1, + full_path: 'path/group/koko', + path: 'group/koko', + name: 'group/koko', + }, + created_at: expect.anything(), +}; + +export const mergeRequests = [ + { + created_at: '2022-02-10T15:49:16Z', + merged_at: '2022-02-10T15:57:02Z', + }, + { + created_at: '2022-02-08T15:49:37Z', + merged_at: '2022-02-08T16:06:44Z', + }, +] as MergeRequest[]; + +export const MOCK_CLOUD_ID = '0a44684d-52c3-4c0c-99f8-9d89ec294759'; + +export const MOCK_GROUP_DATA = { + name: 'koko', + id: 123, + full_name: 'GitLab/koko', + path: 'koko/momo', +}; diff --git a/src/__tests__/fixtures/successful-deployment-payload.ts b/src/__tests__/fixtures/successful-deployment-payload.ts new file mode 100644 index 0000000..45a4407 --- /dev/null +++ b/src/__tests__/fixtures/successful-deployment-payload.ts @@ -0,0 +1,20 @@ +import { Deployment } from '../../types'; + +export const createMockDeployment = (daysFromNow: number, environment = 'production'): Deployment => ({ + id: 1, + updated_at: new Date(new Date('2022-01-29T01:15:42.960Z').valueOf() - 1000 * 86400 * daysFromNow).toISOString(), + created_at: '2021-12-08T17:10:12.034483Z', + deployable: { + status: 'success', + finished_at: '2021-12-08T17:30:12.034483Z', + pipeline: { + id: 1, + web_url: 'https://koko.momo', + }, + }, + environment: { + name: environment, + id: 1, + }, + status: 'success', +}); diff --git a/src/__tests__/helpers/forge-helper.ts b/src/__tests__/helpers/forge-helper.ts new file mode 100644 index 0000000..c788a12 --- /dev/null +++ b/src/__tests__/helpers/forge-helper.ts @@ -0,0 +1,52 @@ +import fetch, { enableFetchMocks } from 'jest-fetch-mock'; + +export const storage = { + set: jest.fn(), + get: jest.fn(), + delete: jest.fn(), + query: jest.fn(), + setSecret: jest.fn(), + getSecret: jest.fn(), + deleteSecret: jest.fn(), +}; + +export const startsWith = jest.fn().mockImplementation(() => { + return { + condition: 'STARTS_WITH', + value: '', + }; +}); + +export const webTrigger = { + getUrl: jest.fn(), +}; + +// This function is used to mock Forge's fetch API by using the mocked version +// of `fetch` provided in the jest-fetch-mock library. +// eslint-disable-next-line import/prefer-default-export +export const mockForgeApi = (): void => { + const requestGraph = jest.fn(); + + // Global API mock + (global as any).api = { + asApp: () => ({ + requestGraph, + }), + }; + + jest.mock('@forge/api', () => ({ + __esModule: true, + default: 'mockedDefaultExport', + fetch, // assign the fetch import to return the jest-fetch-mock version of fetch + storage, + webTrigger, + startsWith, + })); + enableFetchMocks(); // enable jest-fetch-mock +}; + +export const mockForgeEvents = (): void => { + jest.mock('@forge/events', () => ({ + Queue: jest.fn(), + })); +}; diff --git a/src/__tests__/helpers/gitlab-helper.ts b/src/__tests__/helpers/gitlab-helper.ts new file mode 100644 index 0000000..ab819b5 --- /dev/null +++ b/src/__tests__/helpers/gitlab-helper.ts @@ -0,0 +1,335 @@ +import { + CompassBuildEventState, + CompassComponentType, + Component, + ComponentPayload, + DataProviderBuildEvent, + Link, +} from '@atlassian/forge-graphql'; +import { pipelineWebhookFixture } from '../fixtures/build-webhook-payload'; +import { + CommitFileDiff, + DeploymentEvent, + Environment, + EnvironmentTier, + GitlabAPIProject, + MergeRequestEvent, + Metric, + PipelineEvent, + ProjectReadyForImport, + PushEvent, +} from '../../types'; +import { TEST_COMPONENT_ID } from '../fixtures/gitlab-data'; + +export const generatePushEvent = (overrideEvent: Partial = {}): PushEvent => { + return { + object_kind: 'push', + before: 'before', + after: 'after', + ref: 'refs/heads/main', + checkout_sha: 'checkout_sha', + project: { + id: 1, + name: 'test', + default_branch: 'main', + web_url: 'https://test', + }, + commits: { + added: [], + modified: [], + removed: [], + }, + ...overrideEvent, + }; +}; + +export const generatePipelineEvent = (overrideEvent: Partial = {}): PipelineEvent => { + return { + ...pipelineWebhookFixture, + ...overrideEvent, + }; +}; + +export const generateMergeRequestEvent = (overrideEvent: Partial = {}): MergeRequestEvent => { + return { + object_kind: 'merge_request', + project: { + id: 1, + name: 'test', + default_branch: 'main', + web_url: 'https://test', + }, + object_attributes: { + target_branch: 'main', + }, + ...overrideEvent, + }; +}; + +export const generateDeploymentEvent = (overrideEvent: Partial = {}): DeploymentEvent => { + return { + object_kind: 'deployment', + deployment_id: 1, + project: { + id: 1, + name: 'test', + default_branch: 'main', + web_url: 'https://test', + }, + environment: 'production', + ...overrideEvent, + }; +}; + +export const generateEnvironmentEvent = (tier: Partial = EnvironmentTier.PRODUCTION): Environment => ({ + id: 1, + name: 'production', + tier, +}); + +export const generateMetric = (metricAri: string, value = 13): Metric => ({ + metricAri, + value, + timestamp: expect.anything(), +}); + +export const generateMetricInput = (metrics: Metric[], projectID = '1') => ({ + projectID, + metrics, +}); +export const createCommitFileDiff = (overrideCommitFileDiff: Partial = {}): CommitFileDiff => ({ + diff: 'diff', + new_path: 'new/path', + old_path: 'old/path', + new_file: false, + renamed_file: false, + deleted_file: false, + ...overrideCommitFileDiff, +}); + +export const generateComponent = (overrideComponent: Partial = {}): ComponentPayload => { + return { + component: { + id: TEST_COMPONENT_ID, + name: 'koko', + type: CompassComponentType.Service, + typeId: 'service', + changeMetadata: {}, + ...overrideComponent, + }, + }; +}; + +export const generateProjectsWithStatuses = ( + hasComponent: boolean, + isManaged: boolean, + override: Partial = {}, +) => [ + { + id: 1, + name: 'koko', + description: 'description', + defaultBranch: 'default_branch', + labels: ['label', 'language:javascript'], + url: 'web_url', + componentId: TEST_COMPONENT_ID, + componentLinks: [] as unknown as Link[], + componentType: CompassComponentType.Service, + hasComponent, + isCompassFilePrOpened: false, + isManaged, + groupFullPath: 'path/group/koko', + groupName: 'group/koko', + groupPath: 'group/koko', + ...override, + }, +]; + +export const generateGitlabProject = (override: Partial = {}): GitlabAPIProject => ({ + id: 1, + description: 'description', + name: 'name', + topics: ['topic'], + default_branch: 'main', + web_url: 'web_url', + namespace: { + id: 1, + full_path: 'full_path', + name: 'name', + path: 'path', + }, + created_at: expect.anything(), + ...override, +}); + +export const builds = [ + { + description: 'Pipeline run 571288088 for project subgroup-project', + displayName: 'subgroup-project pipeline 571288088', + state: CompassBuildEventState.Successful, + startedAt: '2022-06-23T12:55:47.054Z', + lastUpdated: '2022-06-23T12:57:29.654Z', + updateSequenceNumber: 1655989049654, + url: '', + }, + { + description: 'Pipeline run 571274787 for project subgroup-project', + displayName: 'subgroup-project pipeline 571274787', + state: CompassBuildEventState.Successful, + startedAt: '2022-06-23T12:42:12.898Z', + lastUpdated: '2022-06-23T12:43:13.773Z', + updateSequenceNumber: 1655988193773, + url: '', + }, + { + description: 'Pipeline run 571269170 for project subgroup-project', + displayName: 'subgroup-project pipeline 571269170', + state: CompassBuildEventState.Successful, + startedAt: '2022-06-23T12:36:29.405Z', + lastUpdated: '2022-06-23T12:37:32.957Z', + updateSequenceNumber: 1655987852957, + url: '', + }, +] as unknown as DataProviderBuildEvent[]; + +export const unsortedProjects = [ + { + id: 1, + name: 'b', + description: 'description', + defaultBranch: 'default_branch', + labels: ['label', 'language:javascript'], + url: 'web_url', + componentId: '', + componentLinks: [] as unknown as Link[], + componentType: CompassComponentType.Application, + hasComponent: true, + isCompassFilePrOpened: false, + isManaged: true, + groupFullPath: 'koko', + groupName: 'koko', + groupPath: 'koko', + }, + { + id: 2, + name: 'a', + description: 'description', + defaultBranch: 'default_branch', + labels: ['label', 'language:javascript'], + url: 'web_url', + componentId: '', + componentLinks: [] as unknown as Link[], + componentType: CompassComponentType.Application, + hasComponent: true, + isCompassFilePrOpened: false, + isManaged: true, + groupFullPath: 'koko', + groupName: 'koko', + groupPath: 'koko', + }, + { + id: 3, + name: 'b', + description: 'description', + defaultBranch: 'default_branch', + labels: ['label', 'language:javascript'], + url: 'web_url', + componentId: '', + componentLinks: [] as unknown as Link[], + componentType: CompassComponentType.Application, + hasComponent: true, + isCompassFilePrOpened: false, + isManaged: true, + groupFullPath: 'momo', + groupName: 'momo', + groupPath: 'momo', + }, + { + id: 4, + name: 'a', + description: 'description', + defaultBranch: 'default_branch', + labels: ['label', 'language:javascript'], + url: 'web_url', + componentId: '', + componentLinks: [] as unknown as Link[], + componentType: CompassComponentType.Application, + hasComponent: true, + isCompassFilePrOpened: false, + isManaged: true, + groupFullPath: 'momo', + groupName: 'momo', + groupPath: 'momo', + }, +]; + +export const sortedProjects = [ + { + id: 2, + name: 'a', + description: 'description', + defaultBranch: 'default_branch', + labels: ['label', 'language:javascript'], + url: 'web_url', + componentId: '', + componentLinks: [] as unknown as Link[], + componentType: CompassComponentType.Application, + hasComponent: true, + isCompassFilePrOpened: false, + isManaged: true, + groupFullPath: 'koko', + groupName: 'koko', + groupPath: 'koko', + }, + { + id: 1, + name: 'b', + description: 'description', + defaultBranch: 'default_branch', + labels: ['label', 'language:javascript'], + url: 'web_url', + componentId: '', + componentLinks: [] as unknown as Link[], + componentType: CompassComponentType.Application, + hasComponent: true, + isCompassFilePrOpened: false, + isManaged: true, + groupFullPath: 'koko', + groupName: 'koko', + groupPath: 'koko', + }, + { + id: 4, + name: 'a', + description: 'description', + defaultBranch: 'default_branch', + labels: ['label', 'language:javascript'], + url: 'web_url', + componentId: '', + componentLinks: [] as unknown as Link[], + componentType: CompassComponentType.Application, + hasComponent: true, + isCompassFilePrOpened: false, + isManaged: true, + groupFullPath: 'momo', + groupName: 'momo', + groupPath: 'momo', + }, + { + id: 3, + name: 'b', + description: 'description', + defaultBranch: 'default_branch', + labels: ['label', 'language:javascript'], + url: 'web_url', + componentId: '', + componentLinks: [] as unknown as Link[], + componentType: CompassComponentType.Application, + hasComponent: true, + isCompassFilePrOpened: false, + isManaged: true, + groupFullPath: 'momo', + groupName: 'momo', + groupPath: 'momo', + }, +]; diff --git a/src/__tests__/helpers/mock-agg.ts b/src/__tests__/helpers/mock-agg.ts new file mode 100644 index 0000000..bfa287b --- /dev/null +++ b/src/__tests__/helpers/mock-agg.ts @@ -0,0 +1,37 @@ +import { ApiPayload } from '@atlassian/forge-graphql'; +import { mockForgeApi } from './forge-helper'; + +const defaultImpl = async (): Promise> => ({ + success: true, + errors: [], + data: {}, +}); + +export const mockDeleteExternalAlias = jest.fn(defaultImpl); +export const mockDetachDataManager = jest.fn(defaultImpl); +export const mockGetComponent = jest.fn(defaultImpl); +export const mockCreateExternalAlias = jest.fn(defaultImpl); +export const mockUpdateComponent = jest.fn(defaultImpl); +export const mockUpdateComponentDataManager = jest.fn(defaultImpl); +export const mockCreateEvent = jest.fn(defaultImpl); +export const mockInsertMetricValueByExternalId = jest.fn(defaultImpl); + +export function mockAgg() { + mockForgeApi(); + + jest.mock('@atlassian/forge-graphql', () => ({ + ...(jest.requireActual('@atlassian/forge-graphql') as any), + compass: { + asApp: () => ({ + getComponent: mockGetComponent, + deleteExternalAlias: mockDeleteExternalAlias, + detachDataManager: mockDetachDataManager, + createExternalAlias: mockCreateExternalAlias, + updateComponent: mockUpdateComponent, + updateDataManager: mockUpdateComponentDataManager, + createEvent: mockCreateEvent, + insertMetricValueByExternalId: mockInsertMetricValueByExternalId, + }), + }, + })); +} diff --git a/src/client/compass.ts b/src/client/compass.ts new file mode 100644 index 0000000..9c03410 --- /dev/null +++ b/src/client/compass.ts @@ -0,0 +1,149 @@ +import graphqlGateway, { + CompassLinkType, + CompassCreateEventInput, + Component, + ComponentPayload, + CreateCompassComponentExternalAliasInput, + DeleteCompassComponentExternalAliasInput, + DetachCompassComponentDataManagerInput, + GetComponentByExternalAliasInput, + UpdateCompassComponentDataManagerMetadataInput, + UpdateComponentInput, + SdkError, +} from '@atlassian/forge-graphql'; +import { ImportableProject, COMPASS_GATEWAY_MESSAGES, Metric } from '../types'; +import { EXTERNAL_SOURCE, IMPORT_LABEL } from '../constants'; +import { UNKNOWN_EXTERNAL_ALIAS_ERROR_MESSAGE } from '../models/error-messages'; +import { AggClientError, GraphqlGatewayError } from '../models/errors'; + +const throwIfErrors = function throwIfSdkErrors(method: string, errors: SdkError[]) { + // Checking if any invalid config errors to report. + // Certain client errors cannot be validated before calling the compass API, but should be raised as user errors. + const clientErrors = errors.filter((e) => e?.statusCode === 400); + if (clientErrors.length > 0) { + console.warn({ message: 'invalid request error', method, errors: clientErrors.map((e) => e.message) }); + throw new AggClientError(clientErrors.map((e) => e.message)); + } + if (errors.length > 0) { + console.warn({ message: 'GraphqlGateway request error', method, errors: errors.map((e) => e.message) }); + throw new GraphqlGatewayError(method, errors); + } +}; + +export const createComponent = async (cloudId: string, project: ImportableProject): Promise => { + const { name, description, type, labels, url } = project; + const formattedLabels = labels.map((label) => label.split(' ').join('-').toLowerCase()); + const component = { + name, + description, + type, + labels: [IMPORT_LABEL, ...formattedLabels], + links: [ + { + type: CompassLinkType.Repository, + url, + }, + ], + externalAlias: { + externalId: project.id.toString(), + externalSource: EXTERNAL_SOURCE, + }, + cloudId, + }; + + const { data, errors } = await graphqlGateway.compass.asApp().createComponent(component); + + throwIfErrors('createComponent', errors); + + return data.component; +}; + +export async function getComponent(componentId: string): Promise { + const { data, errors } = await graphqlGateway.compass + .asApp() + .getComponent({ componentId, options: { includeLinks: true } }); + + throwIfErrors('getComponent', errors); + return data.component; +} + +export async function updateComponent(input: UpdateComponentInput): Promise { + const { data, errors } = await graphqlGateway.compass.asApp().updateComponent(input); + + throwIfErrors('updateComponent', errors); + return data.component; +} + +export async function createExternalAlias(input: CreateCompassComponentExternalAliasInput): Promise { + const { errors } = await graphqlGateway.compass.asApp().createExternalAlias(input); + throwIfErrors('createExternalAlias', errors); +} + +export async function updateDataManager(input: UpdateCompassComponentDataManagerMetadataInput): Promise { + const { errors } = await graphqlGateway.compass.asApp().updateDataManager(input); + throwIfErrors('updateDataManager', errors); +} + +export async function detachDataManager(input: DetachCompassComponentDataManagerInput): Promise { + const { errors } = await graphqlGateway.compass.asApp().detachDataManager(input); + throwIfErrors('detachDataManager', errors); +} + +export async function deleteExternalAlias(input: DeleteCompassComponentExternalAliasInput): Promise { + const { errors } = await graphqlGateway.compass.asApp().deleteExternalAlias(input); + + if (errors.length === 1 && errors[0].message === UNKNOWN_EXTERNAL_ALIAS_ERROR_MESSAGE) { + console.log('Could not find external alias to delete.'); + return; + } + + throwIfErrors('deleteExternalAlias', errors); +} + +export async function unlinkCompassComponents(cloudId: string, ecosystemAppId: string): Promise { + const { errors } = await graphqlGateway.compass.asApp().unlinkExternalSource({ + cloudId, + ecosystemAppId, + externalSource: EXTERNAL_SOURCE, + }); + + throwIfErrors('unlinkExternalSource', errors); +} + +export async function getComponentByExternalAlias(input: GetComponentByExternalAliasInput): Promise { + const { errors, data } = await graphqlGateway.compass + .asApp() + .getComponentByExternalAlias({ ...input, externalSource: EXTERNAL_SOURCE }); + + if (errors[0]?.message === COMPASS_GATEWAY_MESSAGES.COMPONENT_NOT_FOUND) { + return { component: null }; + } + + throwIfErrors('getComponentByExternalAlias', errors); + + return data; +} + +export async function sendEvents(eventPayload: CompassCreateEventInput | CompassCreateEventInput[]): Promise { + const { errors, data } = await graphqlGateway.compass.asApp().createEvent(eventPayload); + + throwIfErrors('createEvent', errors); + + return data; +} + +export async function insertMetricValueByExternalId(cloudId: string, projectID: string, metric: Metric): Promise { + const { errors, data } = await graphqlGateway.compass.asApp().insertMetricValueByExternalId({ + cloudId, + externalMetricSourceId: projectID, + metricDefinitionId: metric.metricAri, + value: { + value: metric.value, + timestamp: metric.timestamp ?? new Date().toISOString(), + }, + }); + + throwIfErrors('insertMetricValueByExternalId', errors); + + return data; +} diff --git a/src/client/gitlab.ts b/src/client/gitlab.ts new file mode 100644 index 0000000..94c89b3 --- /dev/null +++ b/src/client/gitlab.ts @@ -0,0 +1,434 @@ +import { fetch } from '@forge/api'; +import yaml from 'js-yaml'; + +import { BASE_URL } from '../constants'; +import { + GitlabAPIGroup, + GroupAccessToken, + RegisterWebhookPayload, + CommitFileDiff, + CompassYaml, + GitlabAPIProject, + ProjectBranch, + MergeRequestState, + MergeRequest, + GitlabApiPipeline, + Deployment, + Environment, + GitlabPipelineStates, +} from '../types'; +import { GitlabHttpMethodError, InvalidConfigFileError } from '../models/errors'; +import { INVALID_YAML_ERROR } from '../models/error-messages'; +import { queryParamsGenerator } from '../utils/url-utils'; + +export enum HttpMethod { + GET = 'GET', + POST = 'POST', + PUT = 'PUT', + PATCH = 'PATCH', + DELETE = 'DELETE', +} + +export enum GitLabContentType { + JSON = 'application/json', + RAW = 'text/plain; charset=utf-8', +} +type CallGitLabConfig = { + method?: HttpMethod; + contentType?: GitLabContentType; +}; + +export enum GitLabHeaders { + PAGINATION_TOTAL = 'x-total', +} + +export type GitlabPaginatedFetch = ( + page: number, + perPage: number, + fetchFnParameters: Record<'groupToken', string> & P, +) => Promise<{ data: D[]; headers: Headers }>; + +export enum MergeRequestWorkInProgressFilterOptions { + ONLY_WIP = 'yes', + FILTER_OUT_WIP = 'no', +} + +export enum MergeRequestResultViewOptions { + SIMPLE = 'simple', + DEFAULT = 'default', +} + +export const callGitlab = async ( + path: string, + authToken: string, + config?: CallGitLabConfig, + body?: string, +): Promise => { + const resp = await fetch(`${BASE_URL}${path}`, { + method: config?.method || HttpMethod.GET, + headers: { + 'PRIVATE-TOKEN': authToken, + Accept: config?.contentType || GitLabContentType.JSON, + }, + body, + }); + + if (resp.status === 204) { + // no content, we can just return here + return null; + } + + if (resp.status >= 300) { + console.warn(`Gitlab client received a status code of ${resp.status} while fetching ${path}`); + throw new GitlabHttpMethodError(resp.status, resp.statusText); + } + + if (config?.contentType === GitLabContentType.RAW) { + return resp.text(); + } + + return { data: await resp.json(), headers: resp.headers }; +}; + +export const getGroupsData = async ( + groupAccessToken: string, + owned?: string, + minAccessLevel?: number, +): Promise => { + const params = { + ...(owned ? { owned } : {}), + ...(minAccessLevel ? { min_access_level: minAccessLevel.toString() } : {}), + }; + + const queryParams = queryParamsGenerator(params); + + const { data } = await callGitlab(`/api/v4/groups?${queryParams}`, groupAccessToken); + + return data; +}; + +export const registerGroupWebhook = async (payload: RegisterWebhookPayload): Promise => { + const { groupId, token: groupToken, url, signature } = payload; + const { + data: { id }, + } = await callGitlab( + `/api/v4/groups/${groupId}/hooks`, + groupToken, + { method: HttpMethod.POST }, + JSON.stringify({ + url, + token: signature, + push_events: true, + merge_requests_events: true, + pipeline_events: true, + deployment_events: true, + }), + ); + + return id; +}; + +export const deleteGroupWebhook = async (groupId: number, hookId: number, groupToken: string): Promise => { + try { + await callGitlab(`/api/v4/groups/${groupId}/hooks/${hookId}`, groupToken, { method: HttpMethod.DELETE }); + } catch (e) { + if (e.message.includes('Not Found')) { + return; + } + throw e; + } +}; + +export const getGroupWebhook = async ( + groupId: number, + hookId: number, + groupToken: string, +): Promise<{ id: number } | null> => { + try { + const { data: webhook } = await callGitlab(`/api/v4/groups/${groupId}/hooks/${hookId}`, groupToken); + + return webhook; + } catch (e) { + if (e.message.includes('Not Found')) { + return null; + } + throw e; + } +}; + +export const getGroupAccessTokens = async (groupToken: string, groupId: number): Promise => { + const { data: groupAccessTokenList } = await callGitlab(`/api/v4/groups/${groupId}/access_tokens`, groupToken); + + return groupAccessTokenList; +}; + +export const getCommitDiff = async (groupToken: string, projectId: number, sha: string): Promise => { + const { data: diff } = await callGitlab(`/api/v4/projects/${projectId}/repository/commits/${sha}/diff`, groupToken); + + return diff; +}; + +export const getFileContent = async ( + groupToken: string, + projectId: number, + filePath: string, + ref: string, +): Promise => { + const params = { + ref, + }; + + const queryParams = queryParamsGenerator(params); + + const fileRaw = await callGitlab( + `/api/v4/projects/${projectId}/repository/files/${encodeURIComponent(filePath)}/raw?${queryParams}`, + groupToken, + { contentType: GitLabContentType.RAW }, + ); + + try { + return yaml.load(fileRaw); + } catch (e) { + console.warn({ message: 'Error parsing yaml file', error: e }); + throw new InvalidConfigFileError([INVALID_YAML_ERROR]); + } +}; + +export const getProjects = async ( + groupToken: string, + groupId: number, + page: number, + perPage: number, + search?: string, +): Promise<{ data: GitlabAPIProject[]; headers: Headers }> => { + const params = { + include_subgroups: 'true', + page: page.toString(), + per_page: perPage.toString(), + ...(search ? { search } : {}), + }; + + const queryParams = queryParamsGenerator(params); + const { data, headers } = await callGitlab(`/api/v4/groups/${groupId}/projects?${queryParams}`, groupToken); + + return { data, headers }; +}; + +export const getProjectById = async (groupToken: string, projectId: number): Promise => { + const { data: project } = await callGitlab(`/api/v4/projects/${projectId}`, groupToken); + + return project; +}; + +export const searchFileByPath = async (groupToken: string, projectId: number, path: string, branch: string) => { + const params = { + ref: branch, + }; + + const queryParams = queryParamsGenerator(params); + + const file = await callGitlab(`/api/v4/projects/${projectId}/repository/files/${path}?${queryParams}`, groupToken); + + return file; +}; + +export const getProjectLanguages = async (groupToken: string, projectId: number) => { + const { data: languages } = await callGitlab(`/api/v4/projects/${projectId}/languages`, groupToken); + + return languages; +}; + +export const getProjectVariable = async ( + groupToken: string, + projectId: number, + variable: string, +): Promise => { + const { + data: { value }, + } = await callGitlab(`/api/v4/projects/${projectId}/variables/${variable}`, groupToken); + return value; +}; + +export const getProjectBranch = async ( + groupToken: string, + projectId: number, + branchName: string, +): Promise => { + const { data: branch } = await callGitlab( + `/api/v4/projects/${projectId}/repository/branches/${branchName}`, + groupToken, + ); + return branch; +}; + +export const getOwnedProjectsBySearchCriteria = async ( + search: string, + groupToken: string, +): Promise => { + const params = { + owned: 'true', + search, + }; + + const queryParams = queryParamsGenerator(params); + + const { data } = await callGitlab(`/api/v4/projects?${queryParams}`, groupToken); + + return data; +}; + +export const getProjectRecentDeployments: GitlabPaginatedFetch< + Deployment, + { + projectId: number; + dateAfter: string; // in iso date time format, e.g. 2019-03-15T08:00:00Z + environmentName: string; + dateBefore?: string; // in iso date time format, e.g. 2019-03-15T08:00:00Z + } +> = async (page, perPage, fetchParameters) => { + const { groupToken, projectId, dateAfter, dateBefore, environmentName } = fetchParameters; + const params = { + updated_after: dateAfter, + environment: environmentName, + page: page.toString(), + per_page: perPage.toString(), + ...(dateBefore ? { updated_before: dateBefore } : {}), + }; + + const queryParams = queryParamsGenerator(params); + const path = `/api/v4/projects/${projectId}/deployments?${queryParams}`; + + const { data, headers } = await callGitlab(path, groupToken); + + return { data, headers }; +}; + +export const getMergeRequests: GitlabPaginatedFetch< + MergeRequest, + { + groupToken: string; + projectId: number; + state: MergeRequestState; + scope: string; + targetBranch?: string; + orderBy?: string; + wip?: MergeRequestWorkInProgressFilterOptions; + isSimpleView?: boolean; + sourceBranch?: string; + } +> = async (page, perPage, fetchParameters) => { + const { state, scope, targetBranch, orderBy, wip, isSimpleView, projectId, groupToken, sourceBranch } = + fetchParameters; + + const params = { + state, + page: page.toString(), + per_page: perPage.toString(), + scope, + ...(targetBranch ? { target_branch: targetBranch } : {}), + ...(orderBy ? { order_by: orderBy } : {}), + ...(wip ? { wip } : {}), + ...(isSimpleView ? { view: 'simple' } : {}), + ...(sourceBranch ? { source_branch: sourceBranch } : {}), + }; + + const queryParams = queryParamsGenerator(params); + const path = `/api/v4/projects/${projectId}/merge_requests?${queryParams}`; + + const { data, headers } = await callGitlab(path, groupToken); + + return { data, headers }; +}; + +export const getProjectDeploymentById = async (projectId: number, deploymentId: number, groupToken: string) => { + const { data } = await callGitlab(`/api/v4/projects/${projectId}/deployments/${deploymentId}`, groupToken); + + return data; +}; + +export const getEnvironments = async (projectId: number, groupToken: string): Promise => { + const { data } = await callGitlab(`/api/v4/projects/${projectId}/environments`, groupToken); + + return data; +}; + +export const getProjectRecentPipelines: GitlabPaginatedFetch< + GitlabApiPipeline, + { + projectId: number; + dateAfter?: string; // in iso date time format, e.g. 2019-03-15T08:00:00Z + branchName: string; + status?: GitlabPipelineStates; + } +> = async (page, perPage, fetchParameters) => { + const { groupToken, projectId, dateAfter, branchName, status } = fetchParameters; + const params = { + ref: branchName, + page: page.toString(), + per_page: perPage.toString(), + ...(status ? { status } : {}), + ...(dateAfter ? { updated_after: dateAfter } : {}), + }; + + const queryParams = queryParamsGenerator(params); + const path = `/api/v4/projects/${projectId}/pipelines?${queryParams}`; + + const { data, headers } = await callGitlab(path, groupToken); + + return { data, headers }; +}; + +export const createFileInProject = async ( + groupToken: string, + projectId: number, + filePath: string, + branchName: string, + startBranchName: string, + encoding: string, + content: string, + commitMessage: string, +) => { + const path = `/api/v4/projects/${projectId}/repository/files/${filePath}`; + + const { data } = await callGitlab( + path, + groupToken, + { method: HttpMethod.POST }, + JSON.stringify({ + branch: branchName, + start_branch: startBranchName, + encoding, + content, + commit_message: commitMessage, + }), + ); + + return data; +}; + +export const createMergeRequest = async ( + groupToken: string, + projectId: number, + sourceBranch: string, + targetBranch: string, + title: string, + description: string, + removeSourceBranch: boolean, +) => { + const path = `/api/v4/projects/${projectId}/merge_requests`; + + const { data } = await callGitlab( + path, + groupToken, + { method: HttpMethod.POST }, + JSON.stringify({ + source_branch: sourceBranch, + target_branch: targetBranch, + title, + description, + remove_source_branch: removeSourceBranch, + }), + ); + + return data; +}; diff --git a/src/constants.ts b/src/constants.ts new file mode 100644 index 0000000..35494e4 --- /dev/null +++ b/src/constants.ts @@ -0,0 +1,48 @@ +import { IBackOffOptions } from 'exponential-backoff'; + +export const BASE_URL = 'https://gitlab.com'; +export const GITLAB_EVENT_WEBTRIGGER = 'gitlab-event-webtrigger'; + +export const STORAGE_KEYS = { + GROUP_KEY_PREFIX: 'group-', + WEBHOOK_KEY_PREFIX: 'webhook-id-', + WEBHOOK_SIGNATURE_PREFIX: 'webhook-sign-id-', + LAST_SYNC_TIME: 'lastSyncTime', + CURRENT_IMPORT_TOTAL_PROJECTS: 'currentImportTotalProjects', + CURRENT_IMPORT_QUEUE_JOB_IDS: 'currentImportQueueJobIds', + CURRENT_IMPORT_FAILED_PROJECT_PREFIX: 'currentImportFailedProject-', +}; + +export const STORAGE_SECRETS = { + GROUP_TOKEN_KEY_PREFIX: 'groupToken-', +}; + +export const REQUIRED_SCOPES = ['api', 'write_repository']; +export const MAX_NAME_LENGTH = 100; +export const MAX_DESCRIPTION_LENGTH = 1000; +export const EXTERNAL_SOURCE = 'gitlab-importer'; +export const IMPORT_LABEL = 'source:gitlab'; +export const MAX_LINKS_OF_TYPE = 5; +export const BACK_OFF: Partial = { + startingDelay: 300, + timeMultiple: 3, + numOfAttempts: 4, + jitter: 'full', +}; +export const NON_DEFAULT_BRANCH_VARIABLE_KEY = 'COMPASS_BRANCH'; + +export const CLEAR_STORAGE_CHUNK_SIZE = 8; +export const CLEAR_STORAGE_DELAY = 1000; + +export const DAYS_TO_CALC = 28; +export const MILLISEC_IN_DAY = 86400000; +export const DAYS_IN_WEEK = 7; + +export const COMPASS_YML_BRANCH = 'compass-gitlab-importer'; +export const MR_TITLE = 'Add a compass.yml file to manage this repository as a Compass component'; +export const MR_DESCRIPTION = `This MR adds the compass.yml file and sets up config-as-code for your component. \ +Upon merging, you'll be able to maintain this component's data via the compass.yml file that sits alongside its source code in the repository. \ +[Learn more about managing components via config-as-code](https://developer.atlassian.com/cloud/compass/config-as-code/manage-components-with-config-as-code/).\ +This PR is automatically generated by the integration of Compass with GitLab. +`; +export const COMMIT_MESSAGE = 'Compass.yml file for config-as-code'; diff --git a/src/entry/config-validator/index.ts b/src/entry/config-validator/index.ts new file mode 100644 index 0000000..5242000 --- /dev/null +++ b/src/entry/config-validator/index.ts @@ -0,0 +1,11 @@ +import { ConfigValidatorResult, ConfigValidatorResponse } from '@atlassian/forge-graphql'; + +import { getConnectedGroups } from '../../services/group'; + +export const configValidator = async (): Promise => { + const connectedGroups = await getConnectedGroups(); + const appConfigured = connectedGroups && connectedGroups.length > 0; + + const response = new ConfigValidatorResponse(appConfigured); + return response.build(); +}; diff --git a/src/entry/config-validator/test.ts b/src/entry/config-validator/test.ts new file mode 100644 index 0000000..5e5c9a3 --- /dev/null +++ b/src/entry/config-validator/test.ts @@ -0,0 +1,34 @@ +/* eslint-disable import/first, import/order */ +import { mockAgg } from '../../__tests__/helpers/mock-agg'; + +mockAgg(); + +import { configValidator } from './index'; +import * as getConnectedGroups from '../../services/group'; + +const getConnectedGroupsSpy = jest.spyOn(getConnectedGroups, 'getConnectedGroups'); + +const MOCK_GROUP_DATA = { + name: 'koko', + id: 123, + full_name: 'GitLab/koko', + path: 'koko/momo', +}; + +describe('configValidator module', () => { + it('successfully returns true when app is configured', async () => { + getConnectedGroupsSpy.mockResolvedValue([MOCK_GROUP_DATA]); + + const result = await configValidator(); + + expect(result.result.appConfigured).toBeTruthy(); + }); + + it('successfully returns false when app is not configured', async () => { + getConnectedGroupsSpy.mockResolvedValue([]); + + const result = await configValidator(); + + expect(result.result.appConfigured).toBeFalsy(); + }); +}); diff --git a/src/entry/config-validator/types.ts b/src/entry/config-validator/types.ts new file mode 100644 index 0000000..897e056 --- /dev/null +++ b/src/entry/config-validator/types.ts @@ -0,0 +1,3 @@ +export interface ForgeTriggerContext { + installContext: string; +} diff --git a/src/entry/data-provider/__snapshots__/test.ts.snap b/src/entry/data-provider/__snapshots__/test.ts.snap new file mode 100644 index 0000000..3a60e85 --- /dev/null +++ b/src/entry/data-provider/__snapshots__/test.ts.snap @@ -0,0 +1,71 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`dataProvider module successfully returns events and metrics in the expected format 1`] = ` +Object { + "builtIn": Object { + "ari:cloud:compass::metric-definition/builtin/build-success-rate": Object { + "derived": true, + "initialValue": null, + }, + "ari:cloud:compass::metric-definition/builtin/build-time-avg-last-10": Object { + "derived": true, + "initialValue": null, + }, + "ari:cloud:compass::metric-definition/builtin/open-pull-requests": Object { + "initialValue": 3, + }, + "ari:cloud:compass::metric-definition/builtin/pull-request-cycle-time-avg-last-10": Object { + "initialValue": 1, + }, + "ari:cloud:compass::metric-definition/builtin/weekly-deployment-frequency-28d": Object { + "derived": true, + "initialValue": null, + }, + }, + "custom": undefined, +} +`; + +exports[`dataProvider module successfully returns events and metrics in the expected format 2`] = ` +Object { + "builds": Object { + "initialValues": Array [ + Object { + "completedAt": "completed-at", + "description": "mock description", + "displayName": "mock display name", + "lastUpdated": "updated", + "pipeline": Object { + "pipelineId": "pipeline-id", + }, + "startedAt": "started-at", + "state": "SUCCESSFUL", + "updateSequenceNumber": 1, + "url": "url", + }, + ], + }, + "deployments": Object { + "initialValues": Array [ + Object { + "displayName": "name", + "environment": Object { + "category": "PRODUCTION", + "displayName": "prod", + "environmentId": "id", + }, + "lastUpdated": "mock", + "pipeline": Object { + "displayName": "pipeline", + "pipelineId": "1", + "url": "url", + }, + "sequenceNumber": 1, + "state": "SUCCESSFUL", + "updateSequenceNumber": "1", + "url": "url", + }, + ], + }, +} +`; diff --git a/src/entry/data-provider/callback.ts b/src/entry/data-provider/callback.ts new file mode 100644 index 0000000..2beb89f --- /dev/null +++ b/src/entry/data-provider/callback.ts @@ -0,0 +1,16 @@ +import { CallbackPayload } from './types'; +import { serverResponse } from '../../utils/webtrigger-utils'; + +export const callback = (input: CallbackPayload) => { + const { success, url, errorMessage } = input; + + if (!success) { + console.error({ + message: 'Error processing dataProvider module', + url, + errorMessage, + }); + } + + return serverResponse('Callback finished'); +}; diff --git a/src/entry/data-provider/index.ts b/src/entry/data-provider/index.ts new file mode 100644 index 0000000..0bec084 --- /dev/null +++ b/src/entry/data-provider/index.ts @@ -0,0 +1,73 @@ +import { + BuiltinMetricDefinitions, + DataProviderEventTypes, + DataProviderResponse, + DataProviderResult, +} from '@atlassian/forge-graphql'; + +import { DataProviderPayload } from './types'; +import { getProjectDataFromUrl } from '../../services/data-provider-link-parser'; +import { getTrackingBranchName } from '../../services/get-tracking-branch'; +import { getBackfillData } from '../../services/get-backfill-data'; +import { parse } from '../../utils/parse-ari'; + +export const dataProvider = async (request: DataProviderPayload): Promise => { + try { + parse(request.ctx.cloudId); + } catch { + console.error('Invalid cloudId.'); + return null; + } + + const { + project: { id: projectId, default_branch: defaultBranch, name: projectName }, + groupToken, + } = await getProjectDataFromUrl(request.url); + + if (!projectId) { + console.warn('Cannot get GitLab project data by provided link.'); + return null; + } + + const trackingBranch = await getTrackingBranchName(groupToken, projectId, defaultBranch); + + const { + builds, + deployments, + metrics: { mrCycleTime, openMergeRequestsCount }, + } = await getBackfillData(groupToken, projectId, projectName, trackingBranch); + + const response = new DataProviderResponse(projectId.toString(), { + eventTypes: [DataProviderEventTypes.BUILDS, DataProviderEventTypes.DEPLOYMENTS], + builtInMetricDefinitions: [ + { + name: BuiltinMetricDefinitions.WEEKLY_DEPLOYMENT_FREQUENCY_28D, + derived: true, + }, + { + name: BuiltinMetricDefinitions.PULL_REQUEST_CYCLE_TIME_AVG_LAST_10, + derived: false, + }, + { + name: BuiltinMetricDefinitions.BUILD_TIME_AVG_LAST_10, + derived: true, + }, + { + name: BuiltinMetricDefinitions.OPEN_PULL_REQUESTS, + derived: false, + }, + { + name: BuiltinMetricDefinitions.BUILD_SUCCESS_RATE, + derived: true, + }, + ], + customMetricDefinitions: [], + }); + + return response + .addBuilds(builds) + .addDeployments(deployments) + .addBuiltInMetricValue(BuiltinMetricDefinitions.PULL_REQUEST_CYCLE_TIME_AVG_LAST_10, mrCycleTime) + .addBuiltInMetricValue(BuiltinMetricDefinitions.OPEN_PULL_REQUESTS, openMergeRequestsCount) + .build(); +}; diff --git a/src/entry/data-provider/test.ts b/src/entry/data-provider/test.ts new file mode 100644 index 0000000..9350173 --- /dev/null +++ b/src/entry/data-provider/test.ts @@ -0,0 +1,108 @@ +/* eslint-disable import/first, import/order */ +import { mockAgg } from '../../__tests__/helpers/mock-agg'; + +mockAgg(); + +import { + DataProviderBuildEvent, + DataProviderDeploymentEvent, + CompassBuildEventState, + CompassDeploymentEventEnvironmentCategory, + CompassDeploymentEventState, +} from '@atlassian/forge-graphql'; +import { dataProvider } from './index'; +import * as getBackfillEvents from '../../services/get-backfill-data'; +import * as getProjectDataFromUrl from '../../services/data-provider-link-parser'; +import * as getTrackingBranchName from '../../services/get-tracking-branch'; +import { GitlabAPIProject } from 'src/types'; + +const getEventsSpy = jest.spyOn(getBackfillEvents, 'getBackfillData'); +const projectDataSpy = jest.spyOn(getProjectDataFromUrl, 'getProjectDataFromUrl'); +const trackingBranchSpy = jest.spyOn(getTrackingBranchName, 'getTrackingBranchName'); + +const MOCK_BUILD_EVENT: DataProviderBuildEvent = { + pipeline: { + pipelineId: 'pipeline-id', + }, + startedAt: 'started-at', + completedAt: 'completed-at', + state: CompassBuildEventState.Successful, + description: 'mock description', + displayName: 'mock display name', + updateSequenceNumber: 1, + lastUpdated: 'updated', + url: 'url', +}; + +export const MOCK_DEPLOY_EVENT: DataProviderDeploymentEvent = { + displayName: 'name', + lastUpdated: 'mock', + updateSequenceNumber: '1', + url: 'url', + environment: { + category: CompassDeploymentEventEnvironmentCategory.Production, + displayName: 'prod', + environmentId: 'id', + }, + pipeline: { pipelineId: '1', displayName: 'pipeline', url: 'url' }, + sequenceNumber: 1, + state: CompassDeploymentEventState.Successful, +}; + +const MOCK_EVENTS_RESPONSE: { + builds: DataProviderBuildEvent[]; + deployments: DataProviderDeploymentEvent[]; + metrics: { + mrCycleTime: number; + buildDuration: number; + openMergeRequestsCount: number; + }; +} = { + builds: [MOCK_BUILD_EVENT], + deployments: [MOCK_DEPLOY_EVENT], + metrics: { + mrCycleTime: 1, + buildDuration: 2, + openMergeRequestsCount: 3, + }, +}; +const MOCK_PROJECT_URL = 'https://gitlab.com/test/repo-name?testParam=test'; +const MOCK_PROJECT: GitlabAPIProject = { + id: 1, + description: 'description', + name: 'name', + topics: ['topic'], + default_branch: 'main', + web_url: 'web_url', + namespace: { + id: 2, + full_path: 'full_path', + name: 'name', + path: 'path', + }, + created_at: 'abc', +}; + +describe('dataProvider module', () => { + it('successfully returns events and metrics in the expected format', async () => { + getEventsSpy.mockResolvedValue(MOCK_EVENTS_RESPONSE); + projectDataSpy.mockResolvedValue({ + project: MOCK_PROJECT, + groupToken: 'mock-group-token', + }); + + trackingBranchSpy.mockResolvedValue('branch'); + + const result = await dataProvider({ + url: MOCK_PROJECT_URL, + ctx: { + cloudId: 'ari:cloud:compass:122345:component/12345/12345', + extensionId: 'mock-extension-id', + }, + }); + + expect(result.externalSourceId).toEqual(MOCK_PROJECT.id.toString()); + expect(result.metrics).toMatchSnapshot(); + expect(result.events).toMatchSnapshot(); + }); +}); diff --git a/src/entry/data-provider/types.ts b/src/entry/data-provider/types.ts new file mode 100644 index 0000000..3ab4d2f --- /dev/null +++ b/src/entry/data-provider/types.ts @@ -0,0 +1,15 @@ +type DataProviderPayload = { + url: string; + ctx: { + cloudId: string; + extensionId: string; + }; +}; + +type CallbackPayload = { + success: boolean; + url: string; + errorMessage?: string; +}; + +export { DataProviderPayload, CallbackPayload }; diff --git a/src/entry/extension-points/pre-uninstall.ts b/src/entry/extension-points/pre-uninstall.ts new file mode 100644 index 0000000..76a17d8 --- /dev/null +++ b/src/entry/extension-points/pre-uninstall.ts @@ -0,0 +1,21 @@ +import { InvocationContext } from '../../types'; +import { disconnectGroup } from '../../services/disconnect-group'; +import { getForgeAppId } from '../../utils/get-forge-app-id'; +import { getGroupIds } from '../../utils/storage-utils'; + +export default async function preUninstall( + payload: Record, + { installContext }: InvocationContext, +): Promise { + console.log(`Performing preUninstall for site ${installContext}`); + + const cloudId = installContext.split('/')[1]; + const forgeAppId = getForgeAppId(); + const groupIds = await getGroupIds(); + + try { + await Promise.all(groupIds.map((groupId) => disconnectGroup(groupId, cloudId, forgeAppId))); + } catch (e) { + console.error({ message: 'Error performing preUninstall', error: e }); + } +} diff --git a/src/entry/webtriggers/gitlab-event-handlers/handle-deployment-event.test.ts b/src/entry/webtriggers/gitlab-event-handlers/handle-deployment-event.test.ts new file mode 100644 index 0000000..95cbfcf --- /dev/null +++ b/src/entry/webtriggers/gitlab-event-handlers/handle-deployment-event.test.ts @@ -0,0 +1,112 @@ +/* eslint-disable import/first, import/order */ +import { mocked } from 'jest-mock'; +import { mockForgeApi } from '../../../__tests__/helpers/forge-helper'; + +mockForgeApi(); + +import { + CompassCreateEventInput, + CompassDeploymentEventEnvironmentCategory, + CompassDeploymentEventState, +} from '@atlassian/forge-graphql'; +import { generateDeploymentEvent, generateEnvironmentEvent } from '../../../__tests__/helpers/gitlab-helper'; +import { MOCK_CLOUD_ID, TEST_TOKEN } from '../../../__tests__/fixtures/gitlab-data'; +import { getEnvironments } from '../../../client/gitlab'; +import { getDeployment, getRecentDeployments } from '../../../services/deployment'; +import { sendEventToCompass } from '../../../services/send-compass-events'; +import { createMockDeployment } from '../../../__tests__/fixtures/successful-deployment-payload'; +import { insertMetricValues } from '../../../services/insert-metric-values'; +import { handleDeploymentEvent } from './handle-deployment-event'; +import { EnvironmentTier } from '../../../types'; +import { hasDeploymentAfter28Days } from '../../../utils/has-deployment-after-28days'; + +jest.mock('../../../services/send-compass-events'); +jest.mock('../../../services/deployment', () => { + return { + // eslint-disable-next-line @typescript-eslint/ban-types + ...(jest.requireActual('../../../services/deployment') as {}), + getDeployment: jest.fn(), + getRecentDeployments: jest.fn(), + }; +}); +jest.mock('../../../client/compass'); +jest.mock('../../../client/gitlab'); +jest.mock('../../../services/insert-metric-values'); +jest.mock('../../../utils/has-deployment-after-28days'); + +const mockedGetEnvironments = mocked(getEnvironments); +const mockedGetDeployment = mocked(getDeployment); +const mockedSendEventsToCompass = mocked(sendEventToCompass); +const mockedGetRecentDeployments = mocked(getRecentDeployments); +const mockedInsertMetricValues = mocked(insertMetricValues); +const mockedHasDeploymentAfter28Days = mocked(hasDeploymentAfter28Days); + +const MOCK_DEPLOYMENT_EVENT = generateDeploymentEvent(); +const MOCK_ENVIRONMENTS_EVENT = generateEnvironmentEvent(); +const PROJECT_ID = 123; +const MOCK_DATE = Date.parse('2022-01-29T01:15:42.960Z'); + +const MOCK_DEPLOYMENT_EVENT_INPUT: CompassCreateEventInput = { + cloudId: MOCK_CLOUD_ID, + event: { + deployment: { + description: 'description', + displayName: 'production', + deploymentProperties: { + environment: { + category: 'PRODUCTION' as CompassDeploymentEventEnvironmentCategory, + displayName: 'production', + environmentId: '123', + }, + pipeline: { + pipelineId: '123', + url: 'https://test', + displayName: 'production pipeline', + }, + state: CompassDeploymentEventState.Successful, + sequenceNumber: 134, + }, + externalEventSourceId: PROJECT_ID.toString(), + lastUpdated: expect.anything(), + updateSequenceNumber: expect.anything(), + url: 'https://example', + }, + }, +}; + +const MOCK_DEPLOYMENT = createMockDeployment(1); +const MOCK_RECENT_DEPLOYMENTS = { + deployments: [MOCK_DEPLOYMENT], + headers: { + get: jest.fn().mockResolvedValue('x-total'), + } as unknown as Headers, +}; + +describe('GitLab deployment event', () => { + beforeEach(() => { + jest.resetAllMocks(); + const dateNowStub = jest.fn(() => MOCK_DATE); + global.Date.now = dateNowStub; + }); + + it('sends deployment event successfully and does not send metric value', async () => { + mockedGetEnvironments.mockResolvedValue([MOCK_ENVIRONMENTS_EVENT]); + mockedGetDeployment.mockResolvedValue(MOCK_DEPLOYMENT_EVENT_INPUT); + mockedGetRecentDeployments.mockResolvedValue(MOCK_RECENT_DEPLOYMENTS.deployments); + mockedHasDeploymentAfter28Days.mockResolvedValue(false); + + await handleDeploymentEvent(MOCK_DEPLOYMENT_EVENT, TEST_TOKEN, MOCK_CLOUD_ID); + + expect(mockedSendEventsToCompass).toHaveBeenCalledWith(MOCK_DEPLOYMENT_EVENT_INPUT); + expect(mockedInsertMetricValues).not.toHaveBeenCalled(); + }); + + it('do not send deployment event when environment is not production', async () => { + const MOCK_STAGING_ENVIRONMENTS_EVENT = generateEnvironmentEvent(EnvironmentTier.STAGING); + mockedGetEnvironments.mockResolvedValue([MOCK_STAGING_ENVIRONMENTS_EVENT]); + + await handleDeploymentEvent(MOCK_DEPLOYMENT_EVENT, TEST_TOKEN, MOCK_CLOUD_ID); + + expect(mockedSendEventsToCompass).not.toHaveBeenCalled(); + }); +}); diff --git a/src/entry/webtriggers/gitlab-event-handlers/handle-deployment-event.ts b/src/entry/webtriggers/gitlab-event-handlers/handle-deployment-event.ts new file mode 100644 index 0000000..af90b13 --- /dev/null +++ b/src/entry/webtriggers/gitlab-event-handlers/handle-deployment-event.ts @@ -0,0 +1,23 @@ +import { DeploymentEvent, EnvironmentTier } from '../../../types'; +import { getDeployment } from '../../../services/deployment'; +import { getEnvironmentTier, getProjectEnvironments } from '../../../services/environment'; +import { sendEventToCompass } from '../../../services/send-compass-events'; + +export const handleDeploymentEvent = async ( + event: DeploymentEvent, + groupToken: string, + cloudId: string, +): Promise => { + const { + environment, + project: { id: projectId }, + } = event; + const environments = await getProjectEnvironments(projectId, groupToken); + + const environmentTier = await getEnvironmentTier(environments, environment); + + if (environmentTier === EnvironmentTier.PRODUCTION) { + const deployment = await getDeployment(event, groupToken, environmentTier, cloudId); + await sendEventToCompass(deployment); + } +}; diff --git a/src/entry/webtriggers/gitlab-event-handlers/handle-merge-request-event.test.ts b/src/entry/webtriggers/gitlab-event-handlers/handle-merge-request-event.test.ts new file mode 100644 index 0000000..6732c55 --- /dev/null +++ b/src/entry/webtriggers/gitlab-event-handlers/handle-merge-request-event.test.ts @@ -0,0 +1,48 @@ +/* eslint-disable import/first, import/order */ + +import { mocked } from 'jest-mock'; +import { mockForgeApi } from '../../../__tests__/helpers/forge-helper'; + +mockForgeApi(); + +import { BuiltinMetricDefinitions } from '@atlassian/forge-graphql'; +import { insertMetricValues } from '../../../services/insert-metric-values'; +import { getLastMergedMergeRequests, getOpenMergeRequests } from '../../../services/mergeRequest'; +import { getTrackingBranchName } from '../../../services/get-tracking-branch'; +import { mergeRequests, MOCK_CLOUD_ID, TEST_TOKEN } from '../../../__tests__/fixtures/gitlab-data'; +import { handleMergeRequestEvent } from './handle-merge-request-event'; +import { + generateMergeRequestEvent, + generateMetric, + generateMetricInput, +} from '../../../__tests__/helpers/gitlab-helper'; + +jest.mock('../../../services/get-tracking-branch'); +jest.mock('../../../services/mergeRequest', () => ({ + getLastMergedMergeRequests: jest.fn(), + getOpenMergeRequests: jest.fn(), +})); +jest.mock('../../../services/insert-metric-values'); + +const mockedGetTrackingBranchName = mocked(getTrackingBranchName); +const mockedGetLastMergedMergeRequests = mocked(getLastMergedMergeRequests); +const mockedGetOpenMergeRequests = mocked(getOpenMergeRequests); +const mockedInsertMetricValues = mocked(insertMetricValues); + +const MOCK_MERGE_REQUEST_EVENT = generateMergeRequestEvent(); +const MOCK_METRIC_INPUT = generateMetricInput([ + generateMetric(BuiltinMetricDefinitions.PULL_REQUEST_CYCLE_TIME_AVG_LAST_10), + generateMetric(BuiltinMetricDefinitions.OPEN_PULL_REQUESTS, 2), +]); + +describe('Gitlab merge request', () => { + it('handles merge request event', async () => { + mockedGetTrackingBranchName.mockResolvedValue(MOCK_MERGE_REQUEST_EVENT.project.default_branch); + mockedGetLastMergedMergeRequests.mockResolvedValue(mergeRequests); + mockedGetOpenMergeRequests.mockResolvedValue(mergeRequests); + + await handleMergeRequestEvent(MOCK_MERGE_REQUEST_EVENT, TEST_TOKEN, MOCK_CLOUD_ID); + + expect(mockedInsertMetricValues).toHaveBeenCalledWith(MOCK_METRIC_INPUT, MOCK_CLOUD_ID); + }); +}); diff --git a/src/entry/webtriggers/gitlab-event-handlers/handle-merge-request-event.ts b/src/entry/webtriggers/gitlab-event-handlers/handle-merge-request-event.ts new file mode 100644 index 0000000..4b958d9 --- /dev/null +++ b/src/entry/webtriggers/gitlab-event-handlers/handle-merge-request-event.ts @@ -0,0 +1,42 @@ +import { BuiltinMetricDefinitions } from '@atlassian/forge-graphql'; +import { getTrackingBranchName } from '../../../services/get-tracking-branch'; +import { MergeRequestEvent } from '../../../types'; +import { insertMetricValues } from '../../../services/insert-metric-values'; +import { getMRCycleTime, getOpenMergeRequestsCount } from '../../../services/compute-event-and-metrics'; + +export const handleMergeRequestEvent = async ( + event: MergeRequestEvent, + groupToken: string, + cloudId: string, +): Promise => { + console.log('Merge request event received'); + const { + project: { id, default_branch: defaultBranch }, + object_attributes: { target_branch: targetBranch }, + } = event; + const trackingBranch = await getTrackingBranchName(groupToken, id, defaultBranch); + + if (trackingBranch === targetBranch) { + const [cycleTime, openMergeRequestsCount] = await Promise.all([ + getMRCycleTime(groupToken, id, trackingBranch), + getOpenMergeRequestsCount(groupToken, id, trackingBranch), + ]); + + const metricInput = { + projectID: id.toString(), + metrics: [ + { + metricAri: BuiltinMetricDefinitions.PULL_REQUEST_CYCLE_TIME_AVG_LAST_10, + value: cycleTime, + timestamp: new Date().toISOString(), + }, + { + metricAri: BuiltinMetricDefinitions.OPEN_PULL_REQUESTS, + value: openMergeRequestsCount, + timestamp: new Date().toISOString(), + }, + ], + }; + await insertMetricValues(metricInput, cloudId); + } +}; diff --git a/src/entry/webtriggers/gitlab-event-handlers/handle-pipeline-event.test.ts b/src/entry/webtriggers/gitlab-event-handlers/handle-pipeline-event.test.ts new file mode 100644 index 0000000..ea0b4e8 --- /dev/null +++ b/src/entry/webtriggers/gitlab-event-handlers/handle-pipeline-event.test.ts @@ -0,0 +1,76 @@ +/* eslint-disable import/first, import/order */ +import { mocked } from 'jest-mock'; + +import { mockAgg } from '../../../__tests__/helpers/mock-agg'; + +mockAgg(); + +import { generatePipelineEvent } from '../../../__tests__/helpers/gitlab-helper'; +import { handlePipelineEvent } from './handle-pipeline-event'; +import { getTrackingBranchName } from '../../../services/get-tracking-branch'; +import { sendEventToCompass } from '../../../services/send-compass-events'; +import { TEST_TOKEN, MOCK_CLOUD_ID } from '../../../__tests__/fixtures/gitlab-data'; +import { webhookPipelineEventToCompassBuildEvent } from '../../../services/builds'; +import { insertMetricValues } from '../../../services/insert-metric-values'; + +jest.mock('../../../services/get-tracking-branch'); +jest.mock('../../../services/send-compass-events'); +jest.mock('../../../client/gitlab'); +jest.mock('../../../services/compute-event-and-metrics'); +jest.mock('../../../services/insert-metric-values'); + +describe('Gitlab events', () => { + const event = generatePipelineEvent(); + + const eventWithIncorrectRef = generatePipelineEvent({ + object_attributes: { + ...generatePipelineEvent().object_attributes, + ref: 'wrong', + }, + }); + const getTrackingBranchNameMock = mocked(getTrackingBranchName); + const sendEventToCompassMock = mocked(sendEventToCompass); + const insertMetricValuesMock = mocked(insertMetricValues); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('ignores event if the branch is not default and non-default branch wasn`t set', async () => { + getTrackingBranchNameMock.mockResolvedValue(eventWithIncorrectRef.project.default_branch); + + await handlePipelineEvent(eventWithIncorrectRef, TEST_TOKEN, MOCK_CLOUD_ID); + + expect(sendEventToCompassMock).not.toBeCalled(); + expect(insertMetricValuesMock).not.toBeCalled(); + }); + + it('ingests build event for main branch', async () => { + getTrackingBranchNameMock.mockResolvedValue(event.project.default_branch); + + await handlePipelineEvent(event, TEST_TOKEN, MOCK_CLOUD_ID); + + expect(sendEventToCompassMock).toBeCalledTimes(1); + expect(sendEventToCompassMock).toBeCalledWith(webhookPipelineEventToCompassBuildEvent(event, MOCK_CLOUD_ID)); + expect(insertMetricValuesMock).not.toBeCalled(); + }); + + it('ingests build events from a non-default branch which was set via project variable', async () => { + const BRANCH_NAME = 'koko'; + getTrackingBranchNameMock.mockResolvedValue(BRANCH_NAME); + const nonDefaultBranchEvent = generatePipelineEvent({ + object_attributes: { + ...generatePipelineEvent().object_attributes, + ref: BRANCH_NAME, + }, + }); + + await handlePipelineEvent(nonDefaultBranchEvent, TEST_TOKEN, MOCK_CLOUD_ID); + + expect(sendEventToCompassMock).toBeCalledTimes(1); + expect(sendEventToCompassMock).toBeCalledWith( + webhookPipelineEventToCompassBuildEvent(nonDefaultBranchEvent, MOCK_CLOUD_ID), + ); + expect(insertMetricValuesMock).not.toBeCalled(); + }); +}); diff --git a/src/entry/webtriggers/gitlab-event-handlers/handle-pipeline-event.ts b/src/entry/webtriggers/gitlab-event-handlers/handle-pipeline-event.ts new file mode 100644 index 0000000..24e84fb --- /dev/null +++ b/src/entry/webtriggers/gitlab-event-handlers/handle-pipeline-event.ts @@ -0,0 +1,29 @@ +import { PipelineEvent } from '../../../types'; +import { getTrackingBranchName } from '../../../services/get-tracking-branch'; +import { sendEventToCompass } from '../../../services/send-compass-events'; +import { webhookPipelineEventToCompassBuildEvent } from '../../../services/builds'; + +export const isEventForTrackingBranch = (event: PipelineEvent, trackingBranch: string): boolean => { + return event.object_attributes.ref === trackingBranch; +}; + +export const handlePipelineEvent = async (event: PipelineEvent, groupToken: string, cloudId: string): Promise => { + const { + project: { id: projectId, default_branch: defaultBranch }, + object_attributes: { ref }, + } = event; + + const trackingBranch = await getTrackingBranchName(groupToken, projectId, defaultBranch); + + if (!isEventForTrackingBranch(event, trackingBranch)) { + console.log({ + message: 'Received push event for non-tracking branch', + ref, + trackingBranch, + }); + return; + } + + await sendEventToCompass(webhookPipelineEventToCompassBuildEvent(event, cloudId)); + console.log('Build event sent for pipeline.'); +}; diff --git a/src/entry/webtriggers/gitlab-event-handlers/handle-push-event.test.ts b/src/entry/webtriggers/gitlab-event-handlers/handle-push-event.test.ts new file mode 100644 index 0000000..403032f --- /dev/null +++ b/src/entry/webtriggers/gitlab-event-handlers/handle-push-event.test.ts @@ -0,0 +1,135 @@ +/* eslint-disable import/first */ +import { mocked } from 'jest-mock'; +import { mockForgeApi } from '../../../__tests__/helpers/forge-helper'; + +mockForgeApi(); +import { generatePushEvent } from '../../../__tests__/helpers/gitlab-helper'; +import { handlePushEvent } from './handle-push-event'; +import { + findConfigAsCodeFileChanges, + syncComponent, + unlinkComponent, +} from '../../../services/sync-component-with-file'; +import { getTrackingBranchName } from '../../../services/get-tracking-branch'; +import { TEST_TOKEN } from '../../../__tests__/fixtures/gitlab-data'; + +jest.mock('../../../services/sync-component-with-file', () => { + return { + syncComponent: jest.fn(), + unlinkComponent: jest.fn(), + findConfigAsCodeFileChanges: jest.fn(), + }; +}); + +jest.mock('../../../services/get-tracking-branch'); + +describe('Gitlab push events', () => { + const event = generatePushEvent(); + const eventWithIncorrectRef = generatePushEvent({ + ref: 'wrong', + }); + const updates = mocked(syncComponent); + const removals = mocked(unlinkComponent); + const findConfigChanges = mocked(findConfigAsCodeFileChanges); + const getNonDefaultBranchNameMock = mocked(getTrackingBranchName); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('ignores event if the branch is not tracking', async () => { + getNonDefaultBranchNameMock.mockResolvedValue(eventWithIncorrectRef.project.default_branch); + + await handlePushEvent(eventWithIncorrectRef, TEST_TOKEN); + + expect(updates).not.toBeCalled(); + expect(removals).not.toBeCalled(); + }); + + it('ignores event if no config as code file updates present', async () => { + getNonDefaultBranchNameMock.mockResolvedValue(event.project.default_branch); + findConfigChanges.mockResolvedValue({ componentsToSync: [], componentsToUnlink: [] }); + await handlePushEvent(event, TEST_TOKEN); + + expect(updates).not.toBeCalled(); + expect(removals).not.toBeCalled(); + }); + + it('performs config as code file updates for default branch', async () => { + const mockComponentsToSync = [ + { + componentYaml: { id: 'test1' }, + absoluteFilePath: 'path/fileName1.yaml', + }, + { + componentYaml: { id: 'test2' }, + absoluteFilePath: 'path/fileName2.yaml', + }, + ]; + const mockComponentsToUnlink = [{ id: 'test1' }]; + + getNonDefaultBranchNameMock.mockResolvedValue(event.project.default_branch); + findConfigChanges.mockResolvedValue({ + componentsToSync: mockComponentsToSync, + componentsToUnlink: mockComponentsToUnlink, + }); + + await handlePushEvent(event, TEST_TOKEN); + + expect(updates).toBeCalledWith( + TEST_TOKEN, + mockComponentsToSync[0].componentYaml, + mockComponentsToSync[0].absoluteFilePath, + expect.anything(), + event.project.default_branch, + ); + expect(updates).toBeCalledWith( + TEST_TOKEN, + mockComponentsToSync[1].componentYaml, + mockComponentsToSync[1].absoluteFilePath, + expect.anything(), + event.project.default_branch, + ); + expect(removals).toBeCalledWith(mockComponentsToUnlink[0].id, expect.anything()); + }); + + it('performs config as code file updates for non-default branch which was set via project variable', async () => { + const mockComponentsToSync = [ + { + componentYaml: { id: 'test1' }, + absoluteFilePath: 'path/fileName1.yaml', + }, + { + componentYaml: { id: 'test2' }, + absoluteFilePath: 'path/fileName2.yaml', + }, + ]; + const BRANCH_NAME = 'koko'; + const mockComponentsToUnlink = [{ id: 'test1' }]; + const pushEvent = generatePushEvent({ ref: `refs/heads/${BRANCH_NAME}` }); + + getNonDefaultBranchNameMock.mockResolvedValue(BRANCH_NAME); + findConfigChanges.mockResolvedValue({ + componentsToSync: mockComponentsToSync, + componentsToUnlink: mockComponentsToUnlink, + }); + + await handlePushEvent(pushEvent, TEST_TOKEN); + + expect(updates).toBeCalledWith( + TEST_TOKEN, + mockComponentsToSync[0].componentYaml, + mockComponentsToSync[0].absoluteFilePath, + expect.anything(), + BRANCH_NAME, + ); + expect(updates).toBeCalledWith( + TEST_TOKEN, + mockComponentsToSync[1].componentYaml, + mockComponentsToSync[1].absoluteFilePath, + expect.anything(), + BRANCH_NAME, + ); + expect(removals).toBeCalledWith(mockComponentsToUnlink[0].id, expect.anything()); + }); +}); diff --git a/src/entry/webtriggers/gitlab-event-handlers/handle-push-event.ts b/src/entry/webtriggers/gitlab-event-handlers/handle-push-event.ts new file mode 100644 index 0000000..fc95e06 --- /dev/null +++ b/src/entry/webtriggers/gitlab-event-handlers/handle-push-event.ts @@ -0,0 +1,44 @@ +import { + findConfigAsCodeFileChanges, + syncComponent, + unlinkComponent, +} from '../../../services/sync-component-with-file'; +import { isEventForTrackingBranch } from '../../../utils/push-event-utils'; +import { PushEvent } from '../../../types'; +import { getTrackingBranchName } from '../../../services/get-tracking-branch'; + +export const handlePushEvent = async (event: PushEvent, groupToken: string): Promise => { + const trackingBranch = await getTrackingBranchName(groupToken, event.project.id, event.project.default_branch); + + if (!isEventForTrackingBranch(event, trackingBranch)) { + console.log({ + message: 'Received push event for non-tracking branch', + ref: event.ref, + trackingBranch, + }); + return; + } + + console.log('Received push event for tracking branch -', trackingBranch); + + const { componentsToSync, componentsToUnlink } = await findConfigAsCodeFileChanges(event, groupToken); + + if (componentsToSync.length === 0 && componentsToUnlink.length === 0) { + console.log('No config as code file updates in push event'); + return; + } + + console.log({ + message: 'Performing config as code file updates', + updatedFiles: componentsToSync.length, + removedFiles: componentsToUnlink.length, + }); + + const updates = componentsToSync.map((c) => + syncComponent(groupToken, c.componentYaml, c.absoluteFilePath, event, trackingBranch), + ); + const removals = componentsToUnlink.map((componentYaml) => + unlinkComponent(componentYaml.id, event.project.id.toString()), + ); + await Promise.all([...updates, ...removals]); +}; diff --git a/src/entry/webtriggers/gitlab-event-handlers/index.ts b/src/entry/webtriggers/gitlab-event-handlers/index.ts new file mode 100644 index 0000000..75effc5 --- /dev/null +++ b/src/entry/webtriggers/gitlab-event-handlers/index.ts @@ -0,0 +1,4 @@ +export { handlePushEvent } from './handle-push-event'; +export { handleMergeRequestEvent } from './handle-merge-request-event'; +export { handlePipelineEvent } from './handle-pipeline-event'; +export { handleDeploymentEvent } from './handle-deployment-event'; diff --git a/src/entry/webtriggers/index.ts b/src/entry/webtriggers/index.ts new file mode 100644 index 0000000..f77b0f4 --- /dev/null +++ b/src/entry/webtriggers/index.ts @@ -0,0 +1 @@ +export { processGitlabEvent } from './process-gitlab-event'; diff --git a/src/entry/webtriggers/process-gitlab-event.test.ts b/src/entry/webtriggers/process-gitlab-event.test.ts new file mode 100644 index 0000000..83ea756 --- /dev/null +++ b/src/entry/webtriggers/process-gitlab-event.test.ts @@ -0,0 +1,116 @@ +/* eslint-disable import/first */ +import { mocked } from 'jest-mock'; +import { storage, mockForgeApi } from '../../__tests__/helpers/forge-helper'; + +mockForgeApi(); +import { WebtriggerRequest } from '../../types'; +import { + handleDeploymentEvent, + handleMergeRequestEvent, + handlePipelineEvent, + handlePushEvent, +} from './gitlab-event-handlers'; +import { processGitlabEvent } from './process-gitlab-event'; +import { serverResponse } from '../../utils/webtrigger-utils'; +import { + generateDeploymentEvent, + generateMergeRequestEvent, + generatePipelineEvent, + generatePushEvent, +} from '../../__tests__/helpers/gitlab-helper'; +import { MOCK_CLOUD_ID, TEST_TOKEN } from '../../__tests__/fixtures/gitlab-data'; + +jest.mock('./gitlab-event-handlers'); +jest.mock('../../utils/webtrigger-utils'); +jest.mock('../../services/feature-flags'); + +const MOCK_CONTEXT = { + principal: undefined as undefined, + installContext: `ari:cloud:compass::site/${MOCK_CLOUD_ID}`, +}; + +const MOCK_GROUP_ID = 1; + +const generateWebtriggerRequest = (body: string, token = TEST_TOKEN): WebtriggerRequest => { + return { + body, + queryParameters: { + groupId: [MOCK_GROUP_ID], + }, + headers: { + 'x-gitlab-token': [token], + }, + }; +}; + +const mockHandlePushEvent = mocked(handlePushEvent); +const mockHandlePipelineEvent = mocked(handlePipelineEvent); +const mockHandleMergeRequestEvent = mocked(handleMergeRequestEvent); +const mockDeploymentEvent = mocked(handleDeploymentEvent); + +describe('processGitlabEvent', () => { + const MOCK_PUSH_EVENT = generatePushEvent(); + const MOCK_PIPELINE_EVENT = generatePipelineEvent(); + const MOCK_MERGE_REQUEST_EVENT = generateMergeRequestEvent(); + const MOCK_DEPLOYMENT_EVENT = generateDeploymentEvent(); + + beforeEach(() => { + jest.clearAllMocks(); + storage.getSecret.mockResolvedValue(TEST_TOKEN); + storage.get.mockResolvedValue(TEST_TOKEN); + }); + + it('handles push event', async () => { + const webtriggerRequest = generateWebtriggerRequest(JSON.stringify(MOCK_PUSH_EVENT)); + + await processGitlabEvent(webtriggerRequest, MOCK_CONTEXT); + + expect(mockHandlePushEvent).toHaveBeenCalledWith(MOCK_PUSH_EVENT, TEST_TOKEN); + expect(serverResponse).toHaveBeenCalledWith('Processed webhook event'); + }); + + it('returns server response error in case of invalid webhook event secret', async () => { + const webtriggerRequest = generateWebtriggerRequest(JSON.stringify(MOCK_PUSH_EVENT), 'invalid-token'); + + await processGitlabEvent(webtriggerRequest, MOCK_CONTEXT); + + expect(mockHandlePushEvent).not.toHaveBeenCalled(); + expect(serverResponse).toHaveBeenCalledWith('Invalid webhook secret', 403); + }); + + it('returns server response error in case of failed parsing webhook event', async () => { + const webtriggerRequest = generateWebtriggerRequest('

Invalid body

'); + + await processGitlabEvent(webtriggerRequest, MOCK_CONTEXT); + + expect(mockHandlePushEvent).not.toHaveBeenCalled(); + expect(serverResponse).toHaveBeenCalledWith('Invalid event format', 400); + }); + + it('handles pipeline event when FF is enabled', async () => { + const webtriggerRequest = generateWebtriggerRequest(JSON.stringify(MOCK_PIPELINE_EVENT)); + + await processGitlabEvent(webtriggerRequest, MOCK_CONTEXT); + + expect(mockHandlePipelineEvent).toHaveBeenCalledWith(MOCK_PIPELINE_EVENT, TEST_TOKEN, MOCK_CLOUD_ID); + expect(serverResponse).toHaveBeenCalledWith('Processed webhook event'); + }); + + it('handles merge request event', async () => { + const webtriggerRequest = generateWebtriggerRequest(JSON.stringify(MOCK_MERGE_REQUEST_EVENT)); + + await processGitlabEvent(webtriggerRequest, MOCK_CONTEXT); + + expect(mockHandleMergeRequestEvent).toHaveBeenCalledWith(MOCK_MERGE_REQUEST_EVENT, TEST_TOKEN, MOCK_CLOUD_ID); + expect(serverResponse).toHaveBeenCalledWith('Processed webhook event'); + }); + + it('handles deployment event when FF is enabled', async () => { + const webtriggerRequest = generateWebtriggerRequest(JSON.stringify(MOCK_DEPLOYMENT_EVENT)); + + await processGitlabEvent(webtriggerRequest, MOCK_CONTEXT); + + expect(mockDeploymentEvent).toHaveBeenCalledWith(MOCK_DEPLOYMENT_EVENT, TEST_TOKEN, MOCK_CLOUD_ID); + expect(serverResponse).toHaveBeenCalledWith('Processed webhook event'); + }); +}); diff --git a/src/entry/webtriggers/process-gitlab-event.ts b/src/entry/webtriggers/process-gitlab-event.ts new file mode 100644 index 0000000..a6b3985 --- /dev/null +++ b/src/entry/webtriggers/process-gitlab-event.ts @@ -0,0 +1,77 @@ +import { storage } from '@forge/api'; + +import { + DeploymentEvent, + GitlabEvent, + MergeRequestEvent, + PipelineEvent, + PushEvent, + WebtriggerRequest, + WebtriggerResponse, +} from '../../types'; +import { serverResponse } from '../../utils/webtrigger-utils'; +import { parse } from '../../utils/parse-ari'; +import { STORAGE_KEYS, STORAGE_SECRETS } from '../../constants'; +import { + handlePushEvent, + handleMergeRequestEvent, + handleDeploymentEvent, + handlePipelineEvent, +} from './gitlab-event-handlers'; +import { listFeatures } from '../../services/feature-flags'; + +type Context = { + principal: undefined; + installContext: string; +}; + +class ValidateWebhookSignatureError extends Error {} + +const validateWebhookSignature = (eventSignature: string, controlSignature: string): void | never => { + if (eventSignature !== controlSignature) { + throw new ValidateWebhookSignatureError(); + } +}; + +export const processGitlabEvent = async (event: WebtriggerRequest, context: Context): Promise => { + const { installContext } = context; + const cloudId = parse(installContext).resourceId; + const groupId = event.queryParameters.groupId[0]; + const groupToken = await storage.getSecret(`${STORAGE_SECRETS.GROUP_TOKEN_KEY_PREFIX}${groupId}`); + const eventPayload = event.body; + let parsedEvent: GitlabEvent; + + try { + validateWebhookSignature( + event.headers['x-gitlab-token'][0], + await storage.get(`${STORAGE_KEYS.WEBHOOK_SIGNATURE_PREFIX}${groupId}`), + ); + parsedEvent = JSON.parse(eventPayload); + } catch (error) { + if (error instanceof ValidateWebhookSignatureError) { + console.error({ message: 'Webhook event secret is invalid', error }); + return serverResponse('Invalid webhook secret', 403); + } + + console.error({ message: 'Failed parsing webhook event', error }); + return serverResponse('Invalid event format', 400); + } + + if (parsedEvent.object_kind === 'push') { + await handlePushEvent(parsedEvent as PushEvent, groupToken); + } + + if (parsedEvent.object_kind === 'merge_request') { + await handleMergeRequestEvent(parsedEvent as MergeRequestEvent, groupToken, cloudId); + } + + if (parsedEvent.object_kind === 'pipeline') { + await handlePipelineEvent(parsedEvent as PipelineEvent, groupToken, cloudId); + } + + if (parsedEvent.object_kind === 'deployment') { + await handleDeploymentEvent(parsedEvent as DeploymentEvent, groupToken, cloudId); + } + + return serverResponse('Processed webhook event'); +}; diff --git a/src/features.ts b/src/features.ts new file mode 100644 index 0000000..a93f520 --- /dev/null +++ b/src/features.ts @@ -0,0 +1,3 @@ +export enum GitlabFeaturesEnum {} + +export type FeaturesList = { [key in GitlabFeaturesEnum]: boolean }; diff --git a/src/import-queue-resolver.ts b/src/import-queue-resolver.ts new file mode 100644 index 0000000..e243c3e --- /dev/null +++ b/src/import-queue-resolver.ts @@ -0,0 +1,95 @@ +import { CreateLinkInput } from '@atlassian/forge-graphql'; +import Resolver from '@forge/resolver'; +import { storage } from '@forge/api'; +import { backOff, IBackOffOptions } from 'exponential-backoff'; + +import { createComponent, updateComponent } from './client/compass'; +import { STORAGE_KEYS, BACK_OFF, IMPORT_LABEL } from './constants'; +import { appendLink } from './utils/append-link'; +import { ImportableProject } from './resolverTypes'; +import { sleep } from './utils/time-utils'; +import { createMRWithCompassYML } from './services/create-mr-with-compass-yml'; + +const backOffConfig: Partial = { + startingDelay: BACK_OFF.startingDelay, + timeMultiple: BACK_OFF.timeMultiple, + numOfAttempts: BACK_OFF.numOfAttempts, + jitter: BACK_OFF.jitter, +}; + +const resolver = new Resolver(); + +type ReqPayload = { + createProjectData: string; +}; + +const setFailedRepositoriesToStore = async (project: ImportableProject) => { + try { + await backOff( + () => storage.set(`${STORAGE_KEYS.CURRENT_IMPORT_FAILED_PROJECT_PREFIX}:${project.id}`, project), + backOffConfig, + ); + } catch (err) { + console.error('Failed to stored failed project after all retries', err); + } +}; + +resolver.define('import', async (req) => { + const { createProjectData } = req.payload as ReqPayload; + + // Added this sleep to add some "jitter", and make progress more user-friendly + await sleep(Math.ceil(Math.random() * 5000)); + + const { cloudId, project, groupId } = JSON.parse(createProjectData); + const { + name, + hasComponent, + id, + isCompassFilePrOpened, + isManaged, + description, + type, + labels, + url, + componentLinks, + componentId, + shouldOpenMR, + } = project; + + try { + if (!hasComponent) { + const component = await backOff(() => createComponent(cloudId, project), backOffConfig); + console.log(`GitLab project ${name}:${id} was imported. Compass component was created - ${component.id}.`); + + if (shouldOpenMR) { + await createMRWithCompassYML(project, component, groupId); + } + } else if (hasComponent && !(isCompassFilePrOpened && isManaged)) { + const formattedLabels = labels.map((label: string) => label.split(' ').join('-').toLowerCase()); + const component = { + name, + description, + type, + labels: [IMPORT_LABEL, ...formattedLabels], + links: appendLink(url, componentLinks) as CreateLinkInput[], + }; + + const updatedComponent = await backOff(() => updateComponent({ id: componentId, ...component }), backOffConfig); + + if (shouldOpenMR) { + await createMRWithCompassYML(project, updatedComponent, groupId); + } + + console.log( + `GitLab project ${name}:${id} was imported. + Compass component - ${updatedComponent.id} was updated.`, + ); + } + } catch (err) { + console.error(`Failed to create or update compass component for "${name}" project after all retries`, err); + + await setFailedRepositoriesToStore(project); + } +}); + +export const run = resolver.getDefinitions(); diff --git a/src/index.ts b/src/index.ts new file mode 100644 index 0000000..20c23c4 --- /dev/null +++ b/src/index.ts @@ -0,0 +1,17 @@ +import resolver from './resolvers'; +import { processGitlabEvent } from './entry/webtriggers'; +import { dataProvider } from './entry/data-provider'; +import { callback } from './entry/data-provider/callback'; +import preUninstall from './entry/extension-points/pre-uninstall'; +import { configValidator } from './entry/config-validator'; + +// extension points +export { preUninstall }; +// webtriggers +export { processGitlabEvent }; +// resolvers +export { resolver }; +// dataProvider +export { dataProvider, callback }; +// configValidator +export { configValidator }; diff --git a/src/models/error-messages.ts b/src/models/error-messages.ts new file mode 100644 index 0000000..9f9dbd0 --- /dev/null +++ b/src/models/error-messages.ts @@ -0,0 +1,52 @@ +function truncate(s: string): string { + return s.toString().substring(0, 25); +} + +function arrayToSentence(arr: Array) { + const joinedArray = + arr.slice(0, -2).join('", "') + (arr.slice(0, -2).length ? '", "' : '') + arr.slice(-2).join('" and "'); + return `"${joinedArray}"`; +} + +export const missingKeyErrorMessage = (missingKeys: Array): string => + `${arrayToSentence(missingKeys)} must be included in the configuration file`; + +export const missingNestedKeyErrorMessage = (missingKeys: Array, topLevelProperty: string): string => + `the "${topLevelProperty}" property in the configuration file must include ${arrayToSentence(missingKeys)}`; + +export function invalidKeyErrorMessage(propertyName: string, validProperties?: string[]): string { + const truncatedPropertyName = truncate(propertyName); + return validProperties && validProperties.length > 0 + ? `"${truncatedPropertyName}" must be one of the following keys: ${validProperties.join(', ')}` + : `"${truncatedPropertyName}" is not a valid property`; +} + +export const emptyStringErrorMessage = (key: string): string => `"${key}" cannot be empty string.`; + +export const invalidCharactersErrorMessage = (key: string): string => + `"${key}" contains invalid characters. Remove those characters and try again.`; + +export const maxValueLengthErrorMessage = (key: string, length: number): string => + `"${key}" field is too long. Try again with a value no longer than ${length} characters.`; + +export const invalidValueTypeErrorMessage = (key: string, expectedType: string): string => + `"${key}" must be of type "${expectedType}"`; + +export const invalidFieldTypeErrorMessage = (propertyName: string, validTypes: string[]): string => + `"${propertyName}" must have a value of: ${validTypes.join(', ')}`; + +export const invalidLinkTypeErrorMessage = (type: string, validTypes: string[]): string => + `"${truncate(type)}" is not a valid link type. The accepted values are: ${validTypes.join(', ')}`; + +export const DETACH_ERROR_MESSAGE = + 'Unexpected internal server error. You may need to relink the component and try deleting it again'; + +export const INVALID_RELATIONSHIP_ERROR_MESSAGE = + 'The Atlassian resource identifier (ARI) of the component at the start node of this relationship is invalid. Try again with a valid ARI'; + +export const DEFAULT_SERVER_ERROR_MESSAGE = 'Unexpected internal server error. Try again'; + +export const INVALID_YAML_ERROR = 'Invalid YAML format. Try again with a valid YAML file'; + +export const UNKNOWN_EXTERNAL_ALIAS_ERROR_MESSAGE = + 'We couldn’t find the external alias. Check for typos and try again'; diff --git a/src/models/errors.ts b/src/models/errors.ts new file mode 100644 index 0000000..5b415dc --- /dev/null +++ b/src/models/errors.ts @@ -0,0 +1,57 @@ +/* eslint-disable max-classes-per-file */ + +import { SdkError } from '@atlassian/forge-graphql'; + +export class AggClientError extends Error { + errors: string[]; + + constructor(returnedErrors: string[], ...params: any) { + super(...params); + this.errors = returnedErrors; + this.message = returnedErrors.join(', '); + } +} + +export class InvalidConfigFileError extends Error { + errors: string[]; + + constructor(validationErrors: string[], ...params: any) { + super(...params); + this.errors = validationErrors; + this.message = validationErrors.join(', '); + } +} + +export class InvalidAriError extends Error {} + +export class GraphqlGatewayError extends Error { + method: string; + + errors: SdkError[]; + + constructor(method: string, sdkErrors: SdkError[], ...params: any) { + super(...params); + this.method = method; + this.errors = sdkErrors; + const concatenatedErrorMessages = sdkErrors.map((e) => e.message).join(', '); + this.message = `Error calling ${method} in graphql gateway. Error(s): ${concatenatedErrorMessages}`; + } +} + +export class MissingAppIdError extends Error { + constructor() { + super('No FORGE_APP_ID environment variable is set for this app'); + } +} + +export class GitlabHttpMethodError extends Error { + statusText: string; + + status: number; + + constructor(status: number, statusText: string, ...params: any) { + super(...params); + this.status = status; + this.statusText = statusText; + } +} diff --git a/src/models/expected-compass-types.ts b/src/models/expected-compass-types.ts new file mode 100644 index 0000000..f7cac32 --- /dev/null +++ b/src/models/expected-compass-types.ts @@ -0,0 +1,58 @@ +// TODO: see how much of this file is replaceable by autogen types from the sdk + +export const types = { + OPTIONAL_STRING: 'optional:string', + REQUIRED_STRING: 'required:string', + REQUIRED_ARI: 'required:ari', + OPTIONAL_OBJECT: 'optional:object', + REQUIRED_OBJECT: 'required:object', + REQUIRED_STRING_OR_NUMBER: 'required:string|number', + OPTIONAL_STRING_OR_NUMBER: 'optional:string|number', +}; + +export function isRequired(type: string) { + return type.split(':')[0] === 'required'; +} + +export function parseType(type: string) { + return type.split(':')[1]; +} + +export const configKeyTypes = { + id: types.REQUIRED_ARI, + name: types.REQUIRED_STRING, + description: types.OPTIONAL_STRING, + ownerId: types.OPTIONAL_STRING, + fields: types.OPTIONAL_OBJECT, + links: types.OPTIONAL_OBJECT, + relationships: types.OPTIONAL_OBJECT, +}; + +export const serviceConfigKeyTypes = { + ...configKeyTypes, + fields: types.REQUIRED_OBJECT, +}; + +export const fieldKeyTypes = {}; + +export const serviceFieldKeyTypes = { + tier: types.REQUIRED_STRING_OR_NUMBER, +}; + +export const linkKeyTypes = { + type: types.REQUIRED_STRING, + url: types.REQUIRED_STRING, + name: types.OPTIONAL_STRING, +}; + +export const relationshipKeyTypes = { + DEPENDS_ON: types.OPTIONAL_OBJECT, +}; + +export const validFieldKeys = ['tier']; + +export const validTierValues = ['1', '2', '3', '4']; + +export const validLinkTypes = ['DOCUMENT', 'CHAT_CHANNEL', 'REPOSITORY', 'PROJECT', 'DASHBOARD', 'OTHER_LINK']; + +export const validServiceLinkTypes = validLinkTypes.concat('ON_CALL'); diff --git a/src/resolverTypes.ts b/src/resolverTypes.ts new file mode 100644 index 0000000..a9be426 --- /dev/null +++ b/src/resolverTypes.ts @@ -0,0 +1,50 @@ +import { + GitlabAPIGroup, + ImportableProject, + ImportStatus, + ProjectReadyForImport, + ProjectImportResult, + GroupProjectsResponse, +} from './types'; +import { FeaturesList } from './features'; + +export enum DefaultErrorTypes { + UNEXPECTED_ERROR = 'UNEXPECTED_ERROR', +} + +export enum AuthErrorTypes { + INVALID_GROUP_TOKEN = 'INVALID_GROUP_TOKEN', + INVALID_GROUP_TOKEN_NAME = 'INVALID_GROUP_TOKEN_NAME', + INCORRECT_GROUP_TOKEN_SCOPES = 'INCORRECT_GROUP_TOKEN_SCOPES', + UNEXPECTED_ERROR = 'UNEXPECTED_ERROR', +} +export enum ImportErrorTypes { + ONE_TIME_IMPORT_LIMIT = 'IMPORT_PROJECTS_ONE_TIME_LIMIT', + UNEXPECTED_ERROR = 'IMPORT_PROJECTS_UNEXPECTED', + CANNOT_GET_PROGRESS_STATUS = 'CANNOT_GET_PROGRESS_STATUS', + CANNOT_GET_IMPORT_RESULT = 'CANNOT_GET_IMPORT_RESULT', + FAILED_CLEAR_IMPORT_RESULT = 'FAILED_CLEAR_IMPORT_RESULT', +} + +type ErrorTypes = AuthErrorTypes | ImportErrorTypes | DefaultErrorTypes; + +type ResponseError = { message: string; errorType?: ErrorTypes }; + +type ResolverResponse = { + success: boolean; + errors?: ResponseError[]; + data?: T; +}; + +export type { + GitlabAPIGroup, + ResponseError, + ResolverResponse, + ErrorTypes, + ImportableProject, + ProjectReadyForImport, + ProjectImportResult, + ImportStatus, + FeaturesList, + GroupProjectsResponse, +}; diff --git a/src/resolvers.ts b/src/resolvers.ts new file mode 100644 index 0000000..a004fe6 --- /dev/null +++ b/src/resolvers.ts @@ -0,0 +1,235 @@ +import Resolver from '@forge/resolver'; + +import graphqlGateway from '@atlassian/forge-graphql'; +import { getGroupProjects } from './services/fetch-projects'; +import { + AuthErrorTypes, + ImportErrorTypes, + GitlabAPIGroup, + ProjectReadyForImport, + ResolverResponse, + ProjectImportResult, + ImportStatus, + DefaultErrorTypes, + FeaturesList, +} from './resolverTypes'; +import { connectGroup, getAllExistingGroups, getConnectedGroups, InvalidGroupTokenError } from './services/group'; +import { + clearImportResult, + getImportResult, + getImportStatus, + ImportFailedError, + importProjects, +} from './services/import-projects'; +import { setupWebhook } from './services/webhooks'; +import { disconnectGroup } from './services/disconnect-group'; +import { getForgeAppId } from './utils/get-forge-app-id'; +import { getLastSyncTime } from './services/last-sync-time'; +import { listFeatures } from './services/feature-flags'; +import { GroupProjectsResponse } from './types'; + +const resolver = new Resolver(); + +resolver.define('groups/disconnect', async (req): Promise => { + try { + const { + payload: { id: groupId }, + context: { cloudId }, + } = req; + const forgeAppId = getForgeAppId(); + + await disconnectGroup(groupId, cloudId, forgeAppId); + return { success: true }; + } catch (e) { + return { + success: false, + errors: [{ message: 'Disconnect group failed.', errorType: AuthErrorTypes.UNEXPECTED_ERROR }], + }; + } +}); + +resolver.define('groups', async (): Promise> => { + try { + const connectedGroups = await getConnectedGroups(); + + return { success: true, data: connectedGroups }; + } catch (e) { + return { + success: false, + errors: [{ message: 'Get connected groups failed.', errorType: AuthErrorTypes.UNEXPECTED_ERROR }], + }; + } +}); + +resolver.define('groups/connect', async (req): Promise => { + const { + payload: { groupToken, groupTokenName }, + context: { cloudId }, + } = req; + try { + const groupId = await connectGroup(groupToken, groupTokenName); + + await setupWebhook(groupId); + + await graphqlGateway.compass.asApp().synchronizeLinkAssociations({ + cloudId, + forgeAppId: getForgeAppId(), + }); + + return { success: true }; + } catch (e) { + if (e instanceof InvalidGroupTokenError) { + return { + success: false, + errors: [{ message: e.message, errorType: e.errorType }], + }; + } + + return { + success: false, + errors: [{ message: e.message, errorType: AuthErrorTypes.UNEXPECTED_ERROR }], + }; + } +}); + +resolver.define('groups/allExisting', async (): Promise> => { + try { + const allExistingGroups = await getAllExistingGroups(); + + return { success: true, data: allExistingGroups }; + } catch (e) { + return { + success: false, + errors: [{ message: 'Get all existing groups failed.', errorType: AuthErrorTypes.UNEXPECTED_ERROR }], + }; + } +}); + +resolver.define('groups/projects', async (req): Promise> => { + const { + payload: { groupId, page, groupTokenId, search }, + context: { cloudId }, + } = req; + + try { + const { projects, total } = await getGroupProjects(cloudId, groupId, page, groupTokenId, search); + + return { success: true, data: { projects, total } }; + } catch (e) { + return { + success: false, + errors: [{ message: e.message, errorType: e.errorType }], + }; + } +}); + +resolver.define('project/import', async (req): Promise => { + const { + payload: { projectsReadyToImport, groupId }, + context: { cloudId }, + } = req; + + try { + await importProjects(cloudId, projectsReadyToImport, groupId); + return { + success: true, + }; + } catch (e) { + if (e instanceof ImportFailedError) { + return { + success: false, + errors: [{ message: e.message, errorType: e.errorType }], + }; + } + + return { + success: false, + errors: [{ message: e.message, errorType: ImportErrorTypes.UNEXPECTED_ERROR }], + }; + } +}); + +resolver.define('project/import/status', async (): Promise> => { + try { + const importStatus = await getImportStatus(); + return { success: true, data: importStatus }; + } catch (e) { + return { + success: false, + errors: [{ message: e.message, errorType: e.errorType }], + }; + } +}); + +resolver.define('project/import/result', async (): Promise> => { + try { + const importResult = await getImportResult(); + return { success: true, data: importResult }; + } catch (e) { + return { + success: false, + errors: [{ message: e.message, errorType: e.errorType }], + }; + } +}); + +resolver.define('project/import/clear', async (): Promise => { + try { + await clearImportResult(); + return { + success: true, + }; + } catch (e) { + return { + success: false, + errors: [{ message: e.message, errorType: e.errorType }], + }; + } +}); + +resolver.define('project/lastSyncTime', async (): Promise> => { + try { + const lastSyncTime = await getLastSyncTime(); + return { + success: true, + data: lastSyncTime, + }; + } catch (e) { + return { + success: false, + errors: [{ message: e.message, errorType: DefaultErrorTypes.UNEXPECTED_ERROR }], + }; + } +}); + +resolver.define('features', (): ResolverResponse => { + try { + const features = listFeatures(); + return { + success: true, + data: features, + }; + } catch (e) { + return { + success: false, + errors: [{ message: e.message, errorType: DefaultErrorTypes.UNEXPECTED_ERROR }], + }; + } +}); + +resolver.define('appId', (): ResolverResponse => { + try { + const forgeAppId = getForgeAppId(); + return { + success: true, + data: forgeAppId, + }; + } catch (e) { + return { + success: false, + errors: [{ message: e.message, errorType: DefaultErrorTypes.UNEXPECTED_ERROR }], + }; + } +}); + +export default resolver.getDefinitions(); diff --git a/src/services/__snapshots__/send-compass-event.test.ts.snap b/src/services/__snapshots__/send-compass-event.test.ts.snap new file mode 100644 index 0000000..b6d1068 --- /dev/null +++ b/src/services/__snapshots__/send-compass-event.test.ts.snap @@ -0,0 +1,37 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`send build event to compass method successfully maps gitlab pipeline event to compass build event and send it 1`] = ` +[MockFunction] { + "calls": Array [ + Array [ + Object { + "cloudId": "0a44684d-52c3-4c0c-99f8-9d89ec294759", + "event": Object { + "build": Object { + "buildProperties": Object { + "completedAt": "2022-06-14T22:46:49.000Z", + "pipeline": Object { + "pipelineId": "563913989", + }, + "startedAt": "2022-06-14T22:45:56.000Z", + "state": "SUCCESSFUL", + }, + "description": "Pipeline run 563913989 for project stringray", + "displayName": "stringray pipeline 563913989", + "externalEventSourceId": "36867443", + "lastUpdated": "2022-06-14T22:46:49.000Z", + "updateSequenceNumber": 1655246809000, + "url": "https://gitlab.com/gitlab-com/alliances/atlassian/shared-projects/compass-electromagnets-testing/patrick-subgroup/stringray/-/pipelines/563913989", + }, + }, + }, + ], + ], + "results": Array [ + Object { + "type": "return", + "value": undefined, + }, + ], +} +`; diff --git a/src/services/builds.test.ts b/src/services/builds.test.ts new file mode 100644 index 0000000..bcda74b --- /dev/null +++ b/src/services/builds.test.ts @@ -0,0 +1,54 @@ +/* eslint-disable import/first */ +/* eslint-disable import/order */ +import { mockForgeApi } from '../__tests__/helpers/forge-helper'; + +mockForgeApi(); + +import { CompassBuildEventState } from '@atlassian/forge-graphql'; +import { pipelineLastUpdated, toCompassBuildState } from './builds'; +import { latestDate, pipelineWebhookFixture } from '../__tests__/fixtures/build-webhook-payload'; + +describe('toCompassBuildState method', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('correctly gitlab build state to compass build state', async () => { + // inprogress + expect(toCompassBuildState('created')).toEqual(CompassBuildEventState.InProgress); + expect(toCompassBuildState('waiting_for_resource')).toEqual(CompassBuildEventState.InProgress); + expect(toCompassBuildState('preparing')).toEqual(CompassBuildEventState.InProgress); + expect(toCompassBuildState('pending')).toEqual(CompassBuildEventState.InProgress); + expect(toCompassBuildState('running')).toEqual(CompassBuildEventState.InProgress); + expect(toCompassBuildState('manual')).toEqual(CompassBuildEventState.InProgress); + // successful + expect(toCompassBuildState('success')).toEqual(CompassBuildEventState.Successful); + // cancelled + expect(toCompassBuildState('canceled')).toEqual(CompassBuildEventState.Cancelled); + // unknown + expect(toCompassBuildState('scheduled')).toEqual(CompassBuildEventState.Unknown); + // unknown as default + expect(toCompassBuildState('some_unknown_status_koko')).toEqual(CompassBuildEventState.Unknown); + }); +}); + +describe('pipelineLastUpdated method', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('returns correct latest time based on pipeline event top-level finished_time', async () => { + expect(pipelineWebhookFixture.object_attributes.finished_at).not.toEqual(null); + expect(pipelineLastUpdated(pipelineWebhookFixture)).toEqual( + new Date(pipelineWebhookFixture.object_attributes.finished_at), + ); + }); + + it('returns correct latest time based on pipeline event builds time', async () => { + const pipelineEventCopy = JSON.parse(JSON.stringify(pipelineWebhookFixture)); + pipelineEventCopy.object_attributes.finished_at = null; + + expect(pipelineEventCopy.object_attributes.finished_at).toEqual(null); + expect(pipelineLastUpdated(pipelineEventCopy)).toEqual(new Date(latestDate)); + }); +}); diff --git a/src/services/builds.ts b/src/services/builds.ts new file mode 100644 index 0000000..1a2ea6c --- /dev/null +++ b/src/services/builds.ts @@ -0,0 +1,86 @@ +import { CompassBuildEventState, CompassCreateEventInput, DataProviderBuildEvent } from '@atlassian/forge-graphql'; +import { max } from 'lodash'; + +import { GitlabPipelineStates, GitlabApiPipeline, PipelineEvent } from '../types'; + +export const pipelineLastUpdated = (pipeline: PipelineEvent): Date => { + if (pipeline.object_attributes.finished_at !== null) { + return new Date(pipeline.object_attributes.finished_at); + } + let latestTime = new Date(pipeline.object_attributes.created_at).getTime(); + for (const build of pipeline.builds) { + latestTime = max([new Date(build.started_at).getTime(), new Date(build.finished_at).getTime(), latestTime]); + } + return new Date(latestTime); +}; + +export const toCompassBuildState = (state: string): CompassBuildEventState => { + switch (state) { + case GitlabPipelineStates.CREATED: + case GitlabPipelineStates.WAITING_FOR_RESOURCE: + case GitlabPipelineStates.PREPARING: + case GitlabPipelineStates.PENDING: + case GitlabPipelineStates.RUNNING: + case GitlabPipelineStates.MANUAL: + return CompassBuildEventState.InProgress; + case GitlabPipelineStates.SUCCESS: + return CompassBuildEventState.Successful; + case GitlabPipelineStates.FAILED: + return CompassBuildEventState.Failed; + case GitlabPipelineStates.CANCELED: + return CompassBuildEventState.Cancelled; + case GitlabPipelineStates.SCHEDULED: + return CompassBuildEventState.Unknown; + default: + return CompassBuildEventState.Unknown; + } +}; + +export const webhookPipelineEventToCompassBuildEvent = ( + pipeline: PipelineEvent, + cloudId: string, +): CompassCreateEventInput => { + const lastUpdated = pipelineLastUpdated(pipeline); + return { + cloudId, + event: { + build: { + externalEventSourceId: pipeline.project.id.toString(), + updateSequenceNumber: lastUpdated.getTime(), + displayName: `${pipeline.project.name} pipeline ${pipeline.object_attributes.id}`, + description: `Pipeline run ${pipeline.object_attributes.id} for project ${pipeline.project.name}`, + url: `${pipeline.project.web_url}/-/pipelines/${pipeline.object_attributes.id}`, + lastUpdated: lastUpdated.toISOString(), + buildProperties: { + pipeline: { + pipelineId: pipeline.object_attributes.id.toString(), + }, + state: toCompassBuildState(pipeline.object_attributes.status), + startedAt: new Date(pipeline.object_attributes.created_at).toISOString(), + completedAt: new Date(pipeline.object_attributes.finished_at).toISOString(), + }, + }, + }, + }; +}; + +export const gitlabApiPipelineToCompassDataProviderBuildEvent = ( + pipeline: GitlabApiPipeline, + projectName: string, +): DataProviderBuildEvent => { + const isCompleted = !(toCompassBuildState(pipeline.status) === CompassBuildEventState.InProgress); + + return { + description: `Pipeline run ${pipeline.id} for project ${projectName}`, + displayName: `${projectName} pipeline ${pipeline.id}`, + state: toCompassBuildState(pipeline.status), + startedAt: new Date(pipeline.created_at).toISOString(), + completedAt: isCompleted ? new Date(pipeline.updated_at).toISOString() : null, + lastUpdated: new Date(pipeline.updated_at).toISOString(), + updateSequenceNumber: new Date(pipeline.updated_at).getTime(), + url: pipeline.web_url, + pipeline: { + pipelineId: pipeline.id.toString(), + }, + }; +}; diff --git a/src/services/clear-storage.ts b/src/services/clear-storage.ts new file mode 100644 index 0000000..ef3ad87 --- /dev/null +++ b/src/services/clear-storage.ts @@ -0,0 +1,49 @@ +import { storage, ListResult, startsWith } from '@forge/api'; +import { CLEAR_STORAGE_CHUNK_SIZE, CLEAR_STORAGE_DELAY, STORAGE_KEYS, STORAGE_SECRETS } from '../constants'; +import { deleteKeysFromStorageByChunks } from '../utils/storage-utils'; + +const getLastFailedProjectsKeys = async (): Promise => { + const lastFailedProjects: ListResult = await storage + .query() + .where('key', startsWith(STORAGE_KEYS.CURRENT_IMPORT_FAILED_PROJECT_PREFIX)) + .getMany(); + + return lastFailedProjects.results.map(({ key }) => key); +}; + +const clearStorageSecretsForGroup = async (groupId: string): Promise => { + console.log('Clearing storage secrets start'); + await storage.deleteSecret(`${STORAGE_SECRETS.GROUP_TOKEN_KEY_PREFIX}${groupId}`); + console.log('Clearing storage secrets end'); +}; + +const clearStorageEntriesForGroup = async (groupId: string): Promise => { + console.log('Clearing storage entries start'); + + const groupKeys = [ + `${STORAGE_KEYS.GROUP_KEY_PREFIX}${groupId}`, + `${STORAGE_KEYS.WEBHOOK_KEY_PREFIX}${groupId}`, + `${STORAGE_KEYS.WEBHOOK_SIGNATURE_PREFIX}${groupId}`, + ]; + + await deleteKeysFromStorageByChunks(groupKeys, CLEAR_STORAGE_CHUNK_SIZE, CLEAR_STORAGE_DELAY); + console.log('Clearing storage entries end'); +}; + +export const clearImportKeys = async (): Promise => { + console.log('Clearing storage import keys start'); + + const importKeys = [ + STORAGE_KEYS.LAST_SYNC_TIME, + STORAGE_KEYS.CURRENT_IMPORT_QUEUE_JOB_IDS, + STORAGE_KEYS.CURRENT_IMPORT_TOTAL_PROJECTS, + ...(await getLastFailedProjectsKeys()), + ]; + + await deleteKeysFromStorageByChunks(importKeys, CLEAR_STORAGE_CHUNK_SIZE, CLEAR_STORAGE_DELAY); + console.log('Clearing storage import keys end'); +}; + +export const deleteGroupDataFromStorage = async (groupId: string): Promise => { + await Promise.all([clearStorageSecretsForGroup(groupId), clearStorageEntriesForGroup(groupId), clearImportKeys()]); +}; diff --git a/src/services/compute-event-and-metrics/get-mr-cycle-time.ts b/src/services/compute-event-and-metrics/get-mr-cycle-time.ts new file mode 100644 index 0000000..d8c741b --- /dev/null +++ b/src/services/compute-event-and-metrics/get-mr-cycle-time.ts @@ -0,0 +1,18 @@ +import { mergeRequestCycleTime } from '../metric-calculations/merge-request-cycle-time'; +import { getLastMergedMergeRequests } from '../mergeRequest'; + +export const getMRCycleTime = async ( + groupToken: string, + projectId: number, + trackingBranch: string, +): Promise => { + try { + const mergeRequests = await getLastMergedMergeRequests(groupToken, projectId, trackingBranch); + + return mergeRequestCycleTime(mergeRequests); + } catch (e) { + console.error('Cannot calculate merge requests cycle time metric: ', e.message); + + return 0; + } +}; diff --git a/src/services/compute-event-and-metrics/get-open-merge-requests.ts b/src/services/compute-event-and-metrics/get-open-merge-requests.ts new file mode 100644 index 0000000..1815278 --- /dev/null +++ b/src/services/compute-event-and-metrics/get-open-merge-requests.ts @@ -0,0 +1,17 @@ +import { getOpenMergeRequests } from '../mergeRequest'; + +export const getOpenMergeRequestsCount = async ( + groupToken: string, + projectId: number, + trackingBranch: string, +): Promise => { + try { + const openMergeRequests = await getOpenMergeRequests(groupToken, projectId, trackingBranch); + + return openMergeRequests.length; + } catch (e) { + console.error('Cannot calculate open merge requests count metric: ', e.message); + + return null; + } +}; diff --git a/src/services/compute-event-and-metrics/get-recent-builds.ts b/src/services/compute-event-and-metrics/get-recent-builds.ts new file mode 100644 index 0000000..5dca889 --- /dev/null +++ b/src/services/compute-event-and-metrics/get-recent-builds.ts @@ -0,0 +1,28 @@ +import { DataProviderBuildEvent } from '@atlassian/forge-graphql'; +import { getProjectRecentPipelines } from '../../client/gitlab'; +import { gitlabApiPipelineToCompassDataProviderBuildEvent } from '../builds'; +import { getDateInThePast } from '../../utils/time-utils'; +import { fetchPaginatedData } from '../../utils/fetchPaginatedData'; + +export const getProjectBuildsFor28Days = async ( + groupToken: string, + projectId: number, + projectName: string, + branchName: string, +): Promise => { + try { + const allPipelines = await fetchPaginatedData(getProjectRecentPipelines, { + groupToken, + projectId, + dateAfter: getDateInThePast(), + branchName, + }); + + return allPipelines.map((pipeline) => gitlabApiPipelineToCompassDataProviderBuildEvent(pipeline, projectName)); + } catch (err) { + const DESCRIPTIVE_ERROR_MESSAGE = 'Error while fetching project pipelines from Gitlab.'; + + console.error(DESCRIPTIVE_ERROR_MESSAGE, err); + return []; + } +}; diff --git a/src/services/compute-event-and-metrics/get-recent-deployments.ts b/src/services/compute-event-and-metrics/get-recent-deployments.ts new file mode 100644 index 0000000..45986cd --- /dev/null +++ b/src/services/compute-event-and-metrics/get-recent-deployments.ts @@ -0,0 +1,33 @@ +import { DataProviderDeploymentEvent } from '@atlassian/forge-graphql'; + +import { Deployment, Environment, EnvironmentTier } from '../../types'; +import { getRecentDeployments, gitlabAPiDeploymentToCompassDataProviderDeploymentEvent } from '../deployment'; +import { getProjectEnvironments } from '../environment'; +import { getDateInThePast } from '../../utils/time-utils'; + +export const getDeploymentsForProductionEnvironments = async ( + groupToken: string, + projectId: number, + projectName: string, + projectEnvironments?: Environment[], +): Promise => { + const environments = projectEnvironments || (await getProjectEnvironments(projectId, groupToken)); + const getDeploymentsPromises = environments.reduce[]>( + (deploymentsPromises, currentEnvironment) => { + if (currentEnvironment.tier === EnvironmentTier.PRODUCTION) { + deploymentsPromises.push( + getRecentDeployments(groupToken, projectId, getDateInThePast(), currentEnvironment.name), + ); + } + + return deploymentsPromises; + }, + [], + ); + + const deployments = (await Promise.all(getDeploymentsPromises)).flat(); + + return deployments.map((deployment) => + gitlabAPiDeploymentToCompassDataProviderDeploymentEvent(deployment, projectName), + ); +}; diff --git a/src/services/compute-event-and-metrics/index.ts b/src/services/compute-event-and-metrics/index.ts new file mode 100644 index 0000000..a9d19dc --- /dev/null +++ b/src/services/compute-event-and-metrics/index.ts @@ -0,0 +1,11 @@ +import { getMRCycleTime } from './get-mr-cycle-time'; +import { getProjectBuildsFor28Days } from './get-recent-builds'; +import { getDeploymentsForProductionEnvironments } from './get-recent-deployments'; +import { getOpenMergeRequestsCount } from './get-open-merge-requests'; + +export { + getMRCycleTime, + getProjectBuildsFor28Days, + getDeploymentsForProductionEnvironments, + getOpenMergeRequestsCount, +}; diff --git a/src/services/create-mr-with-compass-yml.ts b/src/services/create-mr-with-compass-yml.ts new file mode 100644 index 0000000..1857974 --- /dev/null +++ b/src/services/create-mr-with-compass-yml.ts @@ -0,0 +1,37 @@ +import { storage } from '@forge/api'; +import { Component } from '@atlassian/forge-graphql'; + +import { ImportableProject } from '../types'; +import { COMMIT_MESSAGE, COMPASS_YML_BRANCH, MR_DESCRIPTION, MR_TITLE, STORAGE_SECRETS } from '../constants'; +import { getTrackingBranchName } from './get-tracking-branch'; +import { createCompassYml, generateCompassYamlData } from '../utils/create-compass-yaml'; +import { createFileInProject, createMergeRequest } from '../client/gitlab'; +import validateConfigFile from './sync-component-with-file/validate-config-file'; + +const FILE_PATH = 'compass.yml'; +const ENCODING = 'base64'; + +export const createMRWithCompassYML = async (project: ImportableProject, component: Component, groupId: number) => { + const { id, defaultBranch, url } = project; + + const groupToken = await storage.getSecret(`${STORAGE_SECRETS.GROUP_TOKEN_KEY_PREFIX}${groupId}`); + const trackingBranch = await getTrackingBranchName(groupToken, id, defaultBranch); + const compassYamlData = generateCompassYamlData(url, component); + + validateConfigFile(compassYamlData, component); + + const content = createCompassYml(compassYamlData); + + await createFileInProject( + groupToken, + id, + FILE_PATH, + COMPASS_YML_BRANCH, + trackingBranch, + ENCODING, + content, + COMMIT_MESSAGE, + ); + + await createMergeRequest(groupToken, id, COMPASS_YML_BRANCH, trackingBranch, MR_TITLE, MR_DESCRIPTION, true); +}; diff --git a/src/services/data-provider-link-parser.test.ts b/src/services/data-provider-link-parser.test.ts new file mode 100644 index 0000000..491b9f8 --- /dev/null +++ b/src/services/data-provider-link-parser.test.ts @@ -0,0 +1,102 @@ +/* eslint-disable import/order, import/first */ +import { storage, mockForgeApi } from '../__tests__/helpers/forge-helper'; + +mockForgeApi(); + +import { mocked } from 'jest-mock'; +import { generateGitlabProject } from '../__tests__/helpers/gitlab-helper'; +import { getOwnedProjectsBySearchCriteria } from '../client/gitlab'; +import { extractProjectInformation, getProjectDataFromUrl } from './data-provider-link-parser'; +import { getGroupIds } from '../utils/storage-utils'; + +jest.mock('../client/gitlab'); +jest.mock('../utils/storage-utils'); + +const mockedGetOwnedProjectsBySearchCriteria = mocked(getOwnedProjectsBySearchCriteria); +const mockedGetGroupIds = mocked(getGroupIds); + +const mockProjectUrl = 'https://gitlab.com/test/repo-name?testParam=test'; +const testToken1 = 'token1'; +const testToken2 = 'token2'; +const projectName1 = 'project1'; +const projectName2 = 'project2'; +const projectName3 = 'project3'; +const projectName4 = 'project4'; + +describe('data-provider-link-parser', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('should extract project information from the url', () => { + const result = extractProjectInformation(mockProjectUrl); + + const expectedResult = { + projectName: 'repo-name', + pathName: '/test/repo-name', + }; + + expect(result).toEqual(expectedResult); + }); + + it('should throw error if link is not a Gitlab url', () => { + const mockWrongProjectUrl = 'https://bitbucket.org/test/repo-name?testParam=test'; + + const funcWrapper = () => extractProjectInformation(mockWrongProjectUrl); + + const expectedError = new Error('Provided link is not Gitlab url'); + + expect(funcWrapper).toThrow(expectedError); + }); + + it('should get project data from URL', async () => { + const expectedProjectData = generateGitlabProject({ id: 4, name: projectName4, web_url: mockProjectUrl }); + + mockedGetGroupIds.mockResolvedValue([1, 2]); + + storage.getSecret.mockResolvedValueOnce(testToken1); + storage.getSecret.mockResolvedValueOnce(testToken2); + + mockedGetOwnedProjectsBySearchCriteria.mockResolvedValueOnce([ + generateGitlabProject({ id: 1, name: projectName1 }), + generateGitlabProject({ id: 2, name: projectName2 }), + ]); + mockedGetOwnedProjectsBySearchCriteria.mockResolvedValueOnce([ + generateGitlabProject({ id: 3, name: projectName3 }), + expectedProjectData, + ]); + + const result = await getProjectDataFromUrl(mockProjectUrl); + + const expectedResult = { project: expectedProjectData, groupToken: testToken2 }; + + expect(result).toEqual(expectedResult); + }); + + it('should return null if project not found', async () => { + mockedGetGroupIds.mockResolvedValue([1, 2]); + + storage.getSecret.mockResolvedValueOnce(testToken1); + storage.getSecret.mockResolvedValueOnce(testToken2); + + mockedGetOwnedProjectsBySearchCriteria.mockResolvedValueOnce([ + generateGitlabProject({ id: 1, name: projectName1 }), + generateGitlabProject({ id: 2, name: projectName2 }), + ]); + mockedGetOwnedProjectsBySearchCriteria.mockResolvedValueOnce([ + generateGitlabProject({ id: 3, name: projectName3 }), + generateGitlabProject({ id: 4, name: projectName4 }), + ]); + + const result = await getProjectDataFromUrl(mockProjectUrl); + + expect(result).toBeNull(); + }); + + it('should return null if groupToken not found', async () => { + mockedGetGroupIds.mockResolvedValue([]); + const result = await getProjectDataFromUrl(mockProjectUrl); + + expect(result).toBeNull(); + }); +}); diff --git a/src/services/data-provider-link-parser.ts b/src/services/data-provider-link-parser.ts new file mode 100644 index 0000000..c33e15d --- /dev/null +++ b/src/services/data-provider-link-parser.ts @@ -0,0 +1,67 @@ +import { storage } from '@forge/api'; +import parse from 'url-parse'; + +import { getOwnedProjectsBySearchCriteria } from '../client/gitlab'; +import { STORAGE_SECRETS } from '../constants'; +import { getGroupIds } from '../utils/storage-utils'; +import { GitlabAPIProject } from '../types'; + +export const extractProjectInformation = (projectUrl: string): { projectName: string; pathName: string } | null => { + const parsedUrl = parse(projectUrl); + const splitPath = parsedUrl.pathname.split('/'); + + if (!parsedUrl.hostname.match(/gitlab\.com/)) { + throw new Error('Provided link is not Gitlab url'); + } + + return { projectName: splitPath[splitPath.length - 1], pathName: parsedUrl.pathname }; +}; + +export const getAllGroupTokens = async (): Promise => { + const groupIds = await getGroupIds(); + const groupTokens = await Promise.all( + groupIds.map((groupId) => storage.getSecret(`${STORAGE_SECRETS.GROUP_TOKEN_KEY_PREFIX}${groupId}`)), + ); + + return groupTokens; +}; + +export const getProjectDataFromUrl = async ( + url: string, +): Promise<{ project: GitlabAPIProject; groupToken: string }> => { + try { + const { projectName, pathName } = extractProjectInformation(url); + const groupTokens = await getAllGroupTokens(); + + const projectsPromiseResults = await Promise.allSettled( + groupTokens.map((token) => getOwnedProjectsBySearchCriteria(projectName, token)), + ); + const projectsResult = projectsPromiseResults.reduce<{ projects: GitlabAPIProject[]; projectIndex: number | null }>( + (result, currentProjectResult, index) => { + if (currentProjectResult.status === 'fulfilled') { + result.projects.push(...currentProjectResult.value); + + return { + ...result, + projectIndex: index, + }; + } + + return result; + }, + { projects: [], projectIndex: null }, + ); + + const groupToken = groupTokens[projectsResult.projectIndex]; + const project = projectsResult.projects.find(({ web_url: webUrl }) => webUrl.includes(pathName)); + + if (!groupToken || !project) { + throw new Error('Project not found'); + } + + return { project, groupToken }; + } catch (e) { + console.log('Data provider link parser failed', e.message); + return null; + } +}; diff --git a/src/services/deployment.test.ts b/src/services/deployment.test.ts new file mode 100644 index 0000000..8ebeabd --- /dev/null +++ b/src/services/deployment.test.ts @@ -0,0 +1,118 @@ +/* eslint-disable import/first, import/order */ +import { mockAgg } from '../__tests__/helpers/mock-agg'; + +mockAgg(); + +import { CompassDeploymentEventEnvironmentCategory, CompassDeploymentEventState } from '@atlassian/forge-graphql'; +import { + gitLabStateToCompassFormat, + gitlabApiDeploymentToCompassDeploymentEvent, + gitlabAPiDeploymentToCompassDataProviderDeploymentEvent, +} from './deployment'; +import { Deployment, EnvironmentTier } from '../types'; +import { MOCK_CLOUD_ID } from '../__tests__/fixtures/gitlab-data'; + +const mockDeployment: Deployment = { + id: 12345, + created_at: new Date().toISOString(), + updated_at: new Date().toISOString(), + deployable: { + status: 'CREATED', + finished_at: 'finished_at', + pipeline: { + id: 123, + web_url: 'https://www.google.com/', + }, + }, + environment: { + name: 'name', + id: 123, + }, + status: 'string', +}; + +const mockProjectName = 'projectName'; + +describe('deployment', () => { + it('should map gitlab deployment state to compass format', async () => { + expect(gitLabStateToCompassFormat('CREATED')).toBe(CompassDeploymentEventState.Pending); + expect(gitLabStateToCompassFormat('BLOCKED')).toBe(CompassDeploymentEventState.Pending); + expect(gitLabStateToCompassFormat('SUCCESS')).toBe(CompassDeploymentEventState.Successful); + expect(gitLabStateToCompassFormat('RUNNING')).toBe(CompassDeploymentEventState.InProgress); + expect(gitLabStateToCompassFormat('FAILED')).toBe(CompassDeploymentEventState.Failed); + expect(gitLabStateToCompassFormat('CANCELED')).toBe(CompassDeploymentEventState.Cancelled); + expect(gitLabStateToCompassFormat('Not valid state')).toBe(CompassDeploymentEventState.Unknown); + }); + + it('should create valid compass deployment input', async () => { + const mockProjectId = 12345; + + const deploymentInput = gitlabApiDeploymentToCompassDeploymentEvent( + mockDeployment, + mockProjectId, + mockProjectName, + EnvironmentTier.PRODUCTION, + MOCK_CLOUD_ID, + ); + + const expectedResult = { + cloudId: MOCK_CLOUD_ID, + event: { + deployment: { + externalEventSourceId: mockProjectId.toString(), + lastUpdated: expect.anything(), + updateSequenceNumber: expect.anything(), + displayName: `${mockProjectName} deployment ${mockDeployment.id}`, + url: mockDeployment.deployable.pipeline.web_url, + description: `${mockProjectName} deployment`, + deploymentProperties: { + startedAt: mockDeployment.created_at, + completedAt: null as any, + pipeline: { + pipelineId: mockDeployment.deployable.pipeline.id.toString(), + url: mockDeployment.deployable.pipeline.web_url, + displayName: `${mockProjectName} pipeline`, + }, + environment: { + category: EnvironmentTier.PRODUCTION.toUpperCase() as CompassDeploymentEventEnvironmentCategory, + displayName: mockDeployment.environment.name, + environmentId: mockDeployment.environment.id.toString(), + }, + state: gitLabStateToCompassFormat(mockDeployment.deployable.status), + sequenceNumber: mockDeployment.id, + }, + }, + }, + }; + + expect(deploymentInput).toEqual(expectedResult); + }); + + it('should create valid data provider deployment input', async () => { + const deploymentInput = gitlabAPiDeploymentToCompassDataProviderDeploymentEvent(mockDeployment, mockProjectName); + + const { deployable, environment, id, updated_at: updatedAt } = mockDeployment; + + const expectedResult = { + environment: { + category: EnvironmentTier.PRODUCTION.toUpperCase() as CompassDeploymentEventEnvironmentCategory, + displayName: environment.name, + environmentId: environment.id.toString(), + }, + pipeline: { + displayName: `${mockProjectName} pipeline`, + pipelineId: deployable.pipeline.id.toString(), + url: deployable.pipeline.web_url, + }, + sequenceNumber: id, + state: CompassDeploymentEventState.Pending, + description: `${mockProjectName} deployment`, + displayName: `${mockProjectName} deployment ${id}`, + lastUpdated: updatedAt, + updateSequenceNumber: expect.anything(), + url: deployable.pipeline.web_url, + }; + + expect(deploymentInput).toEqual(expectedResult); + }); +}); diff --git a/src/services/deployment.ts b/src/services/deployment.ts new file mode 100644 index 0000000..b16a3f7 --- /dev/null +++ b/src/services/deployment.ts @@ -0,0 +1,165 @@ +import { + CompassCreateEventInput, + CompassDeploymentEventEnvironmentCategory, + CompassDeploymentEventState, + DataProviderDeploymentEvent, +} from '@atlassian/forge-graphql'; + +import { getProjectDeploymentById, getProjectRecentDeployments } from '../client/gitlab'; +import { Deployment, DeploymentEvent, EnvironmentTier } from '../types'; +import { fetchPaginatedData } from '../utils/fetchPaginatedData'; +import { getProjectEnvironments } from './environment'; + +export const gitLabStateToCompassFormat = (state: string): CompassDeploymentEventState => { + switch (state) { + case 'CREATED': + case 'BLOCKED': + return CompassDeploymentEventState.Pending; + case 'SUCCESS': + return CompassDeploymentEventState.Successful; + case 'RUNNING': + return CompassDeploymentEventState.InProgress; + case 'FAILED': + return CompassDeploymentEventState.Failed; + case 'CANCELED': + return CompassDeploymentEventState.Cancelled; + default: + return CompassDeploymentEventState.Unknown; + } +}; + +const isCompletedDeployment = (state: CompassDeploymentEventState) => { + return state === CompassDeploymentEventState.Failed || state === CompassDeploymentEventState.Successful; +}; + +export const gitlabApiDeploymentToCompassDeploymentEvent = ( + deployment: Deployment, + projectId: number, + projectName: string, + environmentTier: EnvironmentTier, + cloudId: string, +): CompassCreateEventInput => { + const deploymentState = gitLabStateToCompassFormat(deployment.deployable.status.toUpperCase()); + return { + cloudId, + event: { + deployment: { + description: `${projectName} deployment`, + externalEventSourceId: projectId.toString(), + updateSequenceNumber: new Date(deployment.updated_at).getTime(), + displayName: `${projectName} deployment ${deployment.id}`, + url: deployment.deployable.pipeline.web_url, + lastUpdated: new Date(deployment.updated_at).toISOString(), + deploymentProperties: { + startedAt: new Date(deployment.created_at).toISOString(), + completedAt: isCompletedDeployment(deploymentState) ? new Date(deployment.updated_at).toISOString() : null, + environment: { + category: environmentTier.toUpperCase() as CompassDeploymentEventEnvironmentCategory, + displayName: deployment.environment.name, + environmentId: deployment.environment.id.toString(), + }, + pipeline: { + pipelineId: deployment.deployable.pipeline.id.toString(), + url: deployment.deployable.pipeline.web_url, + displayName: `${projectName} pipeline`, + }, + state: deploymentState, + sequenceNumber: deployment.id, + }, + }, + }, + }; +}; + +export const getDeployment = async ( + event: DeploymentEvent, + groupToken: string, + environmentTier: EnvironmentTier, + cloudId: string, +): Promise => { + const deployment = await getProjectDeploymentById(event.project.id, event.deployment_id, groupToken); + + return gitlabApiDeploymentToCompassDeploymentEvent( + deployment as Deployment, + event.project.id, + event.project.name, + environmentTier, + cloudId, + ); +}; + +export const getRecentDeployments = async ( + groupToken: string, + projectId: number, + dateAfter: string, + environmentName: string, +) => { + try { + return fetchPaginatedData(getProjectRecentDeployments, { groupToken, projectId, dateAfter, environmentName }); + } catch (err) { + const ERROR_MESSAGE = 'Error while fetching recent deployments from Gitlab!'; + + console.error(ERROR_MESSAGE, err); + return []; + } +}; + +export const getDeploymentAfter28Days = async ( + groupToken: string, + projectId: number, + dateAfter: string, + dateBefore: string, +): Promise => { + const PAGE = 1; + const PER_PAGE = 1; + const environments = await getProjectEnvironments(projectId, groupToken); + const getDeploymentsPromises = environments.reduce[]>( + (deploymentsPromises, currentEnvironment) => { + if (currentEnvironment.tier === EnvironmentTier.PRODUCTION) { + deploymentsPromises.push( + getProjectRecentDeployments(PAGE, PER_PAGE, { + groupToken, + projectId, + environmentName: currentEnvironment.name, + dateAfter, + dateBefore, + }), + ); + } + + return deploymentsPromises; + }, + [], + ); + + const promisesResponse = await Promise.all(getDeploymentsPromises); + + return promisesResponse ? promisesResponse.map((deployment) => deployment.data).flat() : []; +}; + +export const gitlabAPiDeploymentToCompassDataProviderDeploymentEvent = ( + deployment: Deployment, + projectName: string, +): DataProviderDeploymentEvent => { + const { environment, deployable } = deployment; + + return { + environment: { + category: EnvironmentTier.PRODUCTION.toUpperCase() as CompassDeploymentEventEnvironmentCategory, + displayName: environment.name, + environmentId: environment.id.toString(), + }, + pipeline: { + displayName: `${projectName} pipeline`, + pipelineId: deployable.pipeline.id.toString(), + url: deployable.pipeline.web_url, + }, + sequenceNumber: deployment.id, + state: gitLabStateToCompassFormat(deployable.status.toUpperCase()), + description: `${projectName} deployment`, + displayName: `${projectName} deployment ${deployment.id}`, + lastUpdated: new Date(deployment.updated_at).toISOString(), + updateSequenceNumber: new Date(deployment.updated_at).getTime(), + url: deployable.pipeline.web_url, + }; +}; diff --git a/src/services/disconnect-group.ts b/src/services/disconnect-group.ts new file mode 100644 index 0000000..7e77744 --- /dev/null +++ b/src/services/disconnect-group.ts @@ -0,0 +1,9 @@ +import { unlinkCompassComponents } from '../client/compass'; +import { deleteWebhook } from './webhooks'; +import { deleteGroupDataFromStorage } from './clear-storage'; + +export const disconnectGroup = async (groupId: number, cloudId: string, forgeAppId: string): Promise => { + await unlinkCompassComponents(cloudId, `ari:cloud:ecosystem::app/${forgeAppId}`); + await deleteWebhook(groupId); + await deleteGroupDataFromStorage(groupId.toString()); +}; diff --git a/src/services/environment.test.ts b/src/services/environment.test.ts new file mode 100644 index 0000000..d3f736c --- /dev/null +++ b/src/services/environment.test.ts @@ -0,0 +1,26 @@ +/* eslint-disable import/first, import/order */ +import { mockAgg } from '../__tests__/helpers/mock-agg'; + +mockAgg(); + +import { EnvironmentTier } from '../types'; +import { getEnvironmentTier } from './environment'; +import { generateEnvironmentEvent } from '../__tests__/helpers/gitlab-helper'; + +const environmentName = 'production'; +const ENVIRONMENTS_MOCK = [generateEnvironmentEvent()]; + +describe('Environment Service', () => { + it('returns environment tier', async () => { + const foundEnvironment = await getEnvironmentTier(ENVIRONMENTS_MOCK, environmentName); + + expect(foundEnvironment).toEqual(EnvironmentTier.PRODUCTION); + }); + + it('throws error in case when environment is not found', async () => { + const invalidEnvironmentName = 'testing'; + const errorMsg = `Environment with name "${invalidEnvironmentName}" not found`; + + await expect(getEnvironmentTier(ENVIRONMENTS_MOCK, invalidEnvironmentName)).rejects.toThrow(new Error(errorMsg)); + }); +}); diff --git a/src/services/environment.ts b/src/services/environment.ts new file mode 100644 index 0000000..9e666f8 --- /dev/null +++ b/src/services/environment.ts @@ -0,0 +1,19 @@ +import { getEnvironments } from '../client/gitlab'; +import { Environment, EnvironmentTier } from '../types'; + +export const getProjectEnvironments = (projectId: number, groupToken: string): Promise => { + return getEnvironments(projectId, groupToken); +}; + +export const getEnvironmentTier = async ( + environments: Environment[], + environmentName: string, +): Promise => { + const foundEnvironment = environments.find((environment) => environment.name === environmentName); + + if (!foundEnvironment) { + throw new Error(`Environment with name "${environmentName}" not found`); + } + + return foundEnvironment.tier; +}; diff --git a/src/services/feature-flags.ts b/src/services/feature-flags.ts new file mode 100644 index 0000000..114346a --- /dev/null +++ b/src/services/feature-flags.ts @@ -0,0 +1,5 @@ +import { FeaturesList } from '../features'; + +export const listFeatures = (): FeaturesList => { + return {}; +}; diff --git a/src/services/fetch-projects.test.ts b/src/services/fetch-projects.test.ts new file mode 100644 index 0000000..f863ff9 --- /dev/null +++ b/src/services/fetch-projects.test.ts @@ -0,0 +1,146 @@ +/* eslint-disable import/order, import/first */ +import { storage, mockForgeApi } from '../__tests__/helpers/forge-helper'; + +mockForgeApi(); + +import { mocked } from 'jest-mock'; + +import { getMergeRequests, getProjects, GitLabHeaders } from '../client/gitlab'; +import { getGroupProjects, sortProjects } from './fetch-projects'; +import { getComponentByExternalAlias } from '../client/compass'; +import { getProjectLabels } from './get-labels'; +import { + generateComponent, + generateProjectsWithStatuses, + unsortedProjects, + sortedProjects, +} from '../__tests__/helpers/gitlab-helper'; +import { MergeRequest } from '../types'; + +jest.mock('../client/gitlab'); +jest.mock('../client/compass'); +jest.mock('./get-labels'); + +const mockGetProjects = mocked(getProjects); +const mockGetProjectLabels = mocked(getProjectLabels); +const mockGetComponentByExternalAlias = mocked(getComponentByExternalAlias); +const mockGetMergeRequests = mocked(getMergeRequests); + +const MOCK_CLOUD_ID = '0a44684d-52c3-4c0c-99f8-9d89ec294759'; +const MOCK_GROUP_ID = 12443; +const MOCK_PROJECT_TOPICS = ['topic-1', 'topic-2']; +const MOCK_GET_PROJECTS_RESPONSE = { + data: [ + { + id: 1, + name: 'koko', + description: 'description', + default_branch: 'default_branch', + topics: MOCK_PROJECT_TOPICS, + web_url: 'web_url', + namespace: { + id: 1, + full_path: 'path/group/koko', + path: 'group/koko', + name: 'group/koko', + }, + created_at: expect.anything(), + }, + ], + headers: { + get: jest.fn().mockResolvedValue(GitLabHeaders.PAGINATION_TOTAL), + } as unknown as Headers, +}; + +const MOCK_GET_PROJECT_LABELS = [...MOCK_PROJECT_TOPICS, 'language:javascript']; +const mergeRequestMock: MergeRequest[] = [ + { + merged_at: null, + created_at: new Date().toString(), + }, +]; + +describe('Fetch Projects Service', () => { + beforeEach(() => { + jest.clearAllMocks(); + mockGetProjects.mockResolvedValue(MOCK_GET_PROJECTS_RESPONSE); + mockGetProjectLabels.mockResolvedValue(MOCK_GET_PROJECT_LABELS); + mockGetMergeRequests.mockResolvedValue({ data: [], headers: {} as Headers }); + storage.get.mockResolvedValue('token'); + }); + + it('returns group projects data with isManaged and hasComponent true', async () => { + mockGetComponentByExternalAlias.mockResolvedValue( + generateComponent({ dataManager: { externalSourceURL: 'https://gitlab.com/blob/koko/compass.yml' } }), + ); + + const result = await getGroupProjects(MOCK_CLOUD_ID, MOCK_GROUP_ID, 1, 1); + + expect(result).toStrictEqual({ + projects: generateProjectsWithStatuses(true, true, { labels: MOCK_GET_PROJECT_LABELS }), + total: expect.anything(), + }); + }); + + it('returns group projects data with isManaged and hasComponent false', async () => { + mockGetComponentByExternalAlias.mockResolvedValue({ component: null }); + + const result = await getGroupProjects(MOCK_CLOUD_ID, MOCK_GROUP_ID, 1, 1); + + expect(result).toStrictEqual({ + projects: generateProjectsWithStatuses(false, false, { + componentId: undefined, + labels: MOCK_GET_PROJECT_LABELS, + componentType: undefined, + }), + total: expect.anything(), + }); + }); + + it('returns group projects data with isManaged false and hasComponent true', async () => { + mockGetComponentByExternalAlias.mockResolvedValue(generateComponent()); + + const result = await getGroupProjects(MOCK_CLOUD_ID, MOCK_GROUP_ID, 1, 1); + + expect(result).toStrictEqual({ + projects: generateProjectsWithStatuses(true, false, { labels: MOCK_GET_PROJECT_LABELS }), + total: expect.anything(), + }); + }); + + it('returns group projects data with isCompassFilePrOpened true and hasComponent true', async () => { + mockGetComponentByExternalAlias.mockResolvedValue(generateComponent()); + mockGetMergeRequests.mockResolvedValue({ data: mergeRequestMock, headers: {} as Headers }); + + const result = await getGroupProjects(MOCK_CLOUD_ID, MOCK_GROUP_ID, 1, 1); + + expect(result).toStrictEqual({ + projects: generateProjectsWithStatuses(true, false, { + labels: MOCK_GET_PROJECT_LABELS, + isCompassFilePrOpened: true, + }), + + total: expect.anything(), + }); + }); + + it('returns error in case when fetchAllProjects fails', async () => { + mockGetProjects.mockRejectedValue(undefined); + + await expect(getGroupProjects(MOCK_CLOUD_ID, MOCK_GROUP_ID, 1, 1)).rejects.toThrow( + new Error('Error while fetching group projects from Gitlab!'), + ); + }); + + it('returns error in case when getComponentByExternalAlias fails', async () => { + mockGetComponentByExternalAlias.mockRejectedValue(undefined); + + await expect(getGroupProjects(MOCK_CLOUD_ID, MOCK_GROUP_ID, 1, 1)).rejects.toThrow( + new Error('Error: Error while getting repository additional fields.'), + ); + }); + + it('returns sorted projects', () => { + expect(sortProjects(unsortedProjects)).toEqual(sortedProjects); + }); +}); diff --git a/src/services/fetch-projects.ts b/src/services/fetch-projects.ts new file mode 100644 index 0000000..d7a6833 --- /dev/null +++ b/src/services/fetch-projects.ts @@ -0,0 +1,125 @@ +import { CreateLinkInput, Link } from '@atlassian/forge-graphql'; +import { storage } from '@forge/api'; + +import { getComponentByExternalAlias } from '../client/compass'; +import { COMPASS_YML_BRANCH, STORAGE_SECRETS } from '../constants'; +import { getMergeRequests, getProjects, GitLabHeaders } from '../client/gitlab'; +import { GroupProjectsResponse, MergeRequestState, Project, ProjectReadyForImport } from '../types'; +import { getProjectLabels } from './get-labels'; + +const mapComponentLinks = (links: Link[] = []): CreateLinkInput[] => + links.map((link) => { + return { url: link.url, type: link.type }; + }); + +const fetchProjects = async ( + groupToken: string, + groupId: number, + page: number, + search?: string, +): Promise<{ total: number; projects: Project[] }> => { + try { + const PER_PAGE = 10; + const { data: projects, headers } = await getProjects(groupToken, groupId, page, PER_PAGE, search); + + const generatedProjectsWithLanguages = await Promise.all( + projects.map(async (project) => { + const labels = await getProjectLabels(project.id, groupToken, project.topics); + + return { + id: project.id, + name: project.name, + description: project.description, + url: project.web_url, + defaultBranch: project.default_branch, + groupName: project.namespace.name, + groupPath: project.namespace.path, + groupFullPath: project.namespace.full_path, + labels, + }; + }), + ); + + return { total: Number(headers.get(GitLabHeaders.PAGINATION_TOTAL)), projects: generatedProjectsWithLanguages }; + } catch (err) { + const ERROR_MESSAGE = 'Error while fetching group projects from Gitlab!'; + + console.error(ERROR_MESSAGE, err); + throw new Error(ERROR_MESSAGE); + } +}; + +const compareProjectWithExistingComponent = async (cloudId: string, projectId: number, groupToken: string) => { + try { + const [{ component }, { data: mergeRequestWithCompassYML }] = await Promise.all([ + getComponentByExternalAlias({ + cloudId, + externalId: projectId.toString(), + options: { includeLinks: true }, + }), + getMergeRequests(1, 1, { + projectId, + groupToken, + scope: 'all', + sourceBranch: COMPASS_YML_BRANCH, + state: MergeRequestState.OPENED, + }), + ]); + + return { + isManaged: Boolean(component?.dataManager), + hasComponent: Boolean(component?.id) || Boolean(mergeRequestWithCompassYML.length), + isCompassFilePrOpened: Boolean(mergeRequestWithCompassYML.length), + componentId: component?.id, + componentLinks: mapComponentLinks(component?.links), + componentType: component?.type, + }; + } catch (err) { + const ERROR_MESSAGE = 'Error while getting repository additional fields.'; + + console.error(ERROR_MESSAGE, err); + throw new Error(ERROR_MESSAGE); + } +}; + +export const sortProjects = (projects: ProjectReadyForImport[]): ProjectReadyForImport[] => { + const groupedProjects: { [key: string]: ProjectReadyForImport[] } = {}; + + projects.forEach((project) => { + groupedProjects[project.groupFullPath] = groupedProjects[project.groupFullPath] + ? [...groupedProjects[project.groupFullPath], project] + : [project]; + }); + + return Object.values(groupedProjects) + .map((group) => { + return group.sort((a, b) => a.name.localeCompare(b.name)); + }) + .flat(); +}; + +export const getGroupProjects = async ( + cloudId: string, + groupId: number, + page: number, + groupTokenId: number, + search?: string, +): Promise => { + const groupToken = await storage.getSecret(`${STORAGE_SECRETS.GROUP_TOKEN_KEY_PREFIX}${groupTokenId}`); + + const { projects, total } = await fetchProjects(groupToken, groupId, page, search); + + const checkedDataWithExistingComponents = await Promise.all( + projects.map(({ id: projectId }) => { + return compareProjectWithExistingComponent(cloudId, projectId, groupToken); + }), + ).catch((err) => { + throw new Error(err); + }); + + const resultProjects = projects.map((project, i) => { + return { ...project, ...checkedDataWithExistingComponents[i] }; + }); + + return { total, projects: resultProjects }; +}; diff --git a/src/services/get-backfill-data.ts b/src/services/get-backfill-data.ts new file mode 100644 index 0000000..e52b00a --- /dev/null +++ b/src/services/get-backfill-data.ts @@ -0,0 +1,39 @@ +import { DataProviderBuildEvent, DataProviderDeploymentEvent } from '@atlassian/forge-graphql'; +import { + getDeploymentsForProductionEnvironments, + getMRCycleTime, + getOpenMergeRequestsCount, + getProjectBuildsFor28Days, +} from './compute-event-and-metrics'; +import { hasDeploymentAfter28Days } from '../utils/has-deployment-after-28days'; + +export const getBackfillData = async ( + groupToken: string, + projectId: number, + projectName: string, + branchName: string, +): Promise<{ + builds: DataProviderBuildEvent[]; + deployments: DataProviderDeploymentEvent[]; + metrics: { + mrCycleTime: number; + openMergeRequestsCount: number; + }; +}> => { + const [allBuildsFor28Days, mrCycleTime, deployments, openMergeRequestsCount] = await Promise.all([ + getProjectBuildsFor28Days(groupToken, projectId, projectName, branchName), + getMRCycleTime(groupToken, projectId, branchName), + getDeploymentsForProductionEnvironments(groupToken, projectId, projectName), + getOpenMergeRequestsCount(groupToken, projectId, branchName), + hasDeploymentAfter28Days(projectId, groupToken), + ]); + + return { + builds: allBuildsFor28Days, + deployments, + metrics: { + mrCycleTime, + openMergeRequestsCount, + }, + }; +}; diff --git a/src/services/get-labels.test.ts b/src/services/get-labels.test.ts new file mode 100644 index 0000000..3de7fc9 --- /dev/null +++ b/src/services/get-labels.test.ts @@ -0,0 +1,40 @@ +/* eslint-disable import/first */ +import { mocked } from 'jest-mock'; +import { mockForgeApi } from '../__tests__/helpers/forge-helper'; + +mockForgeApi(); + +import { getProjectLanguages } from '../client/gitlab'; +import { getProjectLabels } from './get-labels'; +import { TEST_TOKEN } from '../__tests__/fixtures/gitlab-data'; + +jest.mock('../client/gitlab'); +const mockGetProjectLanguages = mocked(getProjectLanguages); +const MOCK_TOPICS = ['topic-1', 'topic-2']; +const MOCK_PROJECT_ID = 12345; + +describe('get project labels', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('returns correct labels in case getProjectLanguages fails', async () => { + mockGetProjectLanguages.mockRejectedValue(new Error('Ooops!')); + + const result = await getProjectLabels(MOCK_PROJECT_ID, TEST_TOKEN, MOCK_TOPICS); + + expect(result).toEqual(MOCK_TOPICS); + }); + + it('returns correct labels and calculate main language', async () => { + mockGetProjectLanguages.mockResolvedValue({ + bash: 8.2, + javascript: 89.8, + html: 2, + }); + + const result = await getProjectLabels(MOCK_PROJECT_ID, TEST_TOKEN, MOCK_TOPICS); + + expect(result).toEqual([...MOCK_TOPICS, 'language:javascript']); + }); +}); diff --git a/src/services/get-labels.ts b/src/services/get-labels.ts new file mode 100644 index 0000000..3a0bbc1 --- /dev/null +++ b/src/services/get-labels.ts @@ -0,0 +1,18 @@ +import { getProjectLanguages } from '../client/gitlab'; + +const calculatePrimaryProjectLanguage = async (groupToken: string, projectId: number): Promise => { + try { + const languages = await getProjectLanguages(groupToken, projectId); + + return Object.keys(languages).sort((a, b) => languages[b] - languages[a])[0]; + } catch (err) { + console.error(err); + return undefined; + } +}; + +export const getProjectLabels = async (projectId: number, groupToken: string, topics: string[]): Promise => { + const language = await calculatePrimaryProjectLanguage(groupToken, projectId); + + return [...topics, ...(language ? [`language:${language.toLocaleLowerCase()}`] : [])]; +}; diff --git a/src/services/get-tracking-branch.test.ts b/src/services/get-tracking-branch.test.ts new file mode 100644 index 0000000..c32d004 --- /dev/null +++ b/src/services/get-tracking-branch.test.ts @@ -0,0 +1,34 @@ +/* eslint-disable import/first */ +import { mocked } from 'jest-mock'; +import { mockForgeApi } from '../__tests__/helpers/forge-helper'; + +mockForgeApi(); + +import { getProjectVariable, getProjectBranch } from '../client/gitlab'; +import { getTrackingBranchName } from './get-tracking-branch'; + +jest.mock('../client/gitlab'); + +const mockGetProjectVariable = mocked(getProjectVariable); +const mockGetProjectBranch = mocked(getProjectBranch); + +const MOCK_NON_DEFAULT_BRANCH_NAME = 'non-default-branch'; +const MOCK_DEFAULT_BRANCH_NAME = 'main'; + +describe('getTrackingBranchName', () => { + it('returns non-default branch name if the variable and branch exist in Gitlab', async () => { + mockGetProjectVariable.mockResolvedValue(MOCK_NON_DEFAULT_BRANCH_NAME); + mockGetProjectBranch.mockResolvedValue({ name: MOCK_NON_DEFAULT_BRANCH_NAME }); + + expect(await getTrackingBranchName('groupToken', 1234, MOCK_DEFAULT_BRANCH_NAME)).toBe( + MOCK_NON_DEFAULT_BRANCH_NAME, + ); + }); + + it('returns default branch name if non-default branch does not exist', async () => { + mockGetProjectVariable.mockResolvedValue(MOCK_NON_DEFAULT_BRANCH_NAME); + mockGetProjectBranch.mockRejectedValue('404 Branch Not Found'); + + expect(await getTrackingBranchName('groupToken', 1234, MOCK_DEFAULT_BRANCH_NAME)).toEqual(MOCK_DEFAULT_BRANCH_NAME); + }); +}); diff --git a/src/services/get-tracking-branch.ts b/src/services/get-tracking-branch.ts new file mode 100644 index 0000000..5d338c4 --- /dev/null +++ b/src/services/get-tracking-branch.ts @@ -0,0 +1,18 @@ +import { getProjectBranch, getProjectVariable } from '../client/gitlab'; +import { NON_DEFAULT_BRANCH_VARIABLE_KEY } from '../constants'; + +export const getTrackingBranchName = async ( + groupToken: string, + projectId: number, + defaultBranch: string, +): Promise => { + try { + const branchName = await getProjectVariable(groupToken, projectId, NON_DEFAULT_BRANCH_VARIABLE_KEY); + await getProjectBranch(groupToken, projectId, branchName); + + return branchName; + } catch (e) { + console.log('Non-default branch not found.', e.message); + return defaultBranch; + } +}; diff --git a/src/services/group.test.ts b/src/services/group.test.ts new file mode 100644 index 0000000..d9c3077 --- /dev/null +++ b/src/services/group.test.ts @@ -0,0 +1,156 @@ +/* eslint-disable import/first */ + +import { mocked } from 'jest-mock'; +import { storage, mockForgeApi } from '../__tests__/helpers/forge-helper'; + +mockForgeApi(); + +import { STORAGE_KEYS, STORAGE_SECRETS } from '../constants'; +import { getGroupAccessTokens, getGroupsData } from '../client/gitlab'; +import { connectGroup, getConnectedGroups, InvalidGroupTokenError } from './group'; +import { AuthErrorTypes, GitlabAPIGroup } from '../resolverTypes'; +import { GroupAccessToken } from '../types'; + +jest.mock('../client/gitlab'); + +const mockGetGroupsData = mocked(getGroupsData); +const mockGetGroupAccessTokens = mocked(getGroupAccessTokens); + +const MOCK_GROUP_DATA = { + name: 'koko', + id: 123, + full_name: 'GitLab/koko', + path: 'koko/momo', +}; + +const generateMockGroupAccessToken = (tokenPropertiesOverride: Partial = {}) => { + return { + user_id: 123, + scopes: ['api', 'write_repository'], + name: 'koko', + expires_at: '', + id: 567, + active: true, + created_at: '', + revoked: false, + access_level: 50, + ...tokenPropertiesOverride, + }; +}; + +const MOCK_TOKEN = 'glpat-geTHYDSDGHJJ'; + +const MOCK_CONNECTED_GROUPS = [ + { name: 'koko', id: 1234 }, + { name: 'momo', id: 2345 }, +]; +const storageQuerySuccess = jest.fn().mockImplementation(() => { + return { + where: () => { + return { + getMany: async () => { + return { + results: [ + { + key: `${STORAGE_KEYS.GROUP_KEY_PREFIX}${MOCK_CONNECTED_GROUPS[0].id}`, + value: MOCK_CONNECTED_GROUPS[0].name, + }, + { + key: `${STORAGE_KEYS.GROUP_KEY_PREFIX}${MOCK_CONNECTED_GROUPS[1].id}`, + value: MOCK_CONNECTED_GROUPS[1].name, + }, + ], + }; + }, + }; + }, + }; +}); + +describe('Group service', () => { + describe('connectGroup', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('saves token to storage and returns valid groupId', async () => { + const mockGroupAccessToken = generateMockGroupAccessToken(); + mockGetGroupsData.mockResolvedValue([MOCK_GROUP_DATA]); + mockGetGroupAccessTokens.mockResolvedValue([mockGroupAccessToken]); + + const result = await connectGroup(MOCK_TOKEN, mockGroupAccessToken.name); + + expect(storage.set).toHaveBeenCalledWith( + `${STORAGE_KEYS.GROUP_KEY_PREFIX}${MOCK_GROUP_DATA.id}`, + MOCK_GROUP_DATA.name, + ); + expect(storage.setSecret).toHaveBeenCalledWith( + `${STORAGE_SECRETS.GROUP_TOKEN_KEY_PREFIX}${MOCK_GROUP_DATA.id}`, + MOCK_TOKEN, + ); + + expect(result).toBe(MOCK_GROUP_DATA.id); + }); + + it('throws error in case of invalid group token', async () => { + const mockGroupAccessToken = generateMockGroupAccessToken(); + mockGetGroupsData.mockRejectedValue(undefined); + + await expect(connectGroup(MOCK_TOKEN, mockGroupAccessToken.name)).rejects.toThrow( + new InvalidGroupTokenError(AuthErrorTypes.INVALID_GROUP_TOKEN), + ); + expect(storage.set).not.toHaveBeenCalled(); + }); + + it('throws error in case of invalid group token name', async () => { + const mockGroupAccessToken = generateMockGroupAccessToken(); + mockGetGroupsData.mockResolvedValue([MOCK_GROUP_DATA]); + mockGetGroupAccessTokens.mockResolvedValue([mockGroupAccessToken]); + + await expect(connectGroup(MOCK_TOKEN, 'momo')).rejects.toThrow( + new InvalidGroupTokenError(AuthErrorTypes.INVALID_GROUP_TOKEN_NAME), + ); + expect(storage.set).not.toHaveBeenCalled(); + }); + + it('throws error in case of invalid group token scopes', async () => { + const mockGroupAccessToken = generateMockGroupAccessToken({ scopes: ['api'] }); + mockGetGroupsData.mockResolvedValue([MOCK_GROUP_DATA]); + mockGetGroupAccessTokens.mockResolvedValue([mockGroupAccessToken]); + + await expect(connectGroup(MOCK_TOKEN, mockGroupAccessToken.name)).rejects.toThrow( + new InvalidGroupTokenError(AuthErrorTypes.INCORRECT_GROUP_TOKEN_SCOPES), + ); + expect(storage.set).not.toHaveBeenCalled(); + }); + }); + + describe('getConnectedGroups', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('returns connected groups', async () => { + storage.query = storageQuerySuccess; + storage.getSecret = jest.fn().mockImplementation((tokenKey: string): Promise => { + return tokenKey === `${STORAGE_SECRETS.GROUP_TOKEN_KEY_PREFIX}${MOCK_CONNECTED_GROUPS[0].id}` + ? Promise.resolve('koko-token') + : Promise.resolve('momo-token'); + }); + mockGetGroupsData.mockImplementation((groupAccessToken: string): Promise => { + return groupAccessToken === 'koko-token' + ? Promise.resolve([MOCK_GROUP_DATA]) + : Promise.reject(new Error('Unauthorized')); + }); + + const result = await getConnectedGroups(); + + expect(storage.query).toHaveBeenCalled(); + expect(storage.delete).toHaveBeenCalledWith(`${STORAGE_KEYS.GROUP_KEY_PREFIX}${MOCK_CONNECTED_GROUPS[1].id}`); + expect(storage.deleteSecret).toHaveBeenCalledWith( + `${STORAGE_SECRETS.GROUP_TOKEN_KEY_PREFIX}${MOCK_CONNECTED_GROUPS[1].id}`, + ); + expect(result).toEqual([MOCK_GROUP_DATA]); + }); + }); +}); diff --git a/src/services/group.ts b/src/services/group.ts new file mode 100644 index 0000000..1fbb0dc --- /dev/null +++ b/src/services/group.ts @@ -0,0 +1,106 @@ +import { startsWith, storage } from '@forge/api'; + +import { GitLabAccessLevels, GitlabAPIGroup, GroupAccessToken } from '../types'; +import { getGroupAccessTokens, getGroupsData } from '../client/gitlab'; +import { REQUIRED_SCOPES, STORAGE_KEYS, STORAGE_SECRETS } from '../constants'; +import { AuthErrorTypes } from '../resolverTypes'; +import { deleteGroupDataFromStorage } from './clear-storage'; + +export class InvalidGroupTokenError extends Error { + constructor(public errorType: AuthErrorTypes) { + super(); + this.message = 'Token validation error.'; + } +} + +const findGroupToken = async ( + groupToken: string, + groupTokenName: string, + groupId: number, +): Promise => { + const groupAccessTokens = await getGroupAccessTokens(groupToken, groupId); + + return groupAccessTokens.find((groupAccessToken) => groupAccessToken.name === groupTokenName); +}; + +const validateGroupTokenScopes = (requiredScopes: string[], tokenScopes: string[]) => { + return requiredScopes.every((requiredScope) => tokenScopes.includes(requiredScope)); +}; + +export const connectGroup = async (token: string, tokenName: string): Promise => { + let groupId; + let groupName; + try { + const [group] = await getGroupsData(token, 'true'); + ({ id: groupId, name: groupName } = group); + } catch (e) { + throw new InvalidGroupTokenError(AuthErrorTypes.INVALID_GROUP_TOKEN); + } + + const groupToken = await findGroupToken(token, tokenName, groupId); + if (!groupToken) { + throw new InvalidGroupTokenError(AuthErrorTypes.INVALID_GROUP_TOKEN_NAME); + } + + const hasValidScopes = validateGroupTokenScopes(REQUIRED_SCOPES, groupToken.scopes); + if (!hasValidScopes) { + throw new InvalidGroupTokenError(AuthErrorTypes.INCORRECT_GROUP_TOKEN_SCOPES); + } + + await storage.set(`${STORAGE_KEYS.GROUP_KEY_PREFIX}${groupId}`, groupName); + await storage.setSecret(`${STORAGE_SECRETS.GROUP_TOKEN_KEY_PREFIX}${groupId}`, token); + + return groupId; +}; + +const getGroups = async (owned?: string, minAccessLevel?: number): Promise => { + const response = storage.query().where('key', startsWith(STORAGE_KEYS.GROUP_KEY_PREFIX)); + + const { results: groups } = await response.getMany(); + + const tokens = await Promise.all( + groups.map((group) => + storage.getSecret( + `${STORAGE_SECRETS.GROUP_TOKEN_KEY_PREFIX}${group.key.replace(STORAGE_KEYS.GROUP_KEY_PREFIX, '')}`, + ), + ), + ); + + const groupPromises = tokens.map((token) => getGroupsData(token, owned, minAccessLevel)); + + // We need to remove revoked/invalid (on Gitlab side) tokens from storage + const groupsResult = await Promise.allSettled(groupPromises); + + const reducedGroupsResult = groupsResult.reduce<{ + accessedGroups: GitlabAPIGroup[]; + invalidGroupIds: string[]; + }>( + (result, currentGroupResult, i) => { + if (currentGroupResult.status === 'rejected' && currentGroupResult.reason.toString().includes('Unauthorized')) { + result.invalidGroupIds.push(groups[i].key.replace(STORAGE_KEYS.GROUP_KEY_PREFIX, '')); + } + if (currentGroupResult.status === 'fulfilled') { + if (minAccessLevel) { + result.accessedGroups.push(...currentGroupResult.value); + } else { + const [group] = currentGroupResult.value; + result.accessedGroups.push(group); + } + } + return result; + }, + { accessedGroups: [], invalidGroupIds: [] }, + ); + + await Promise.all(reducedGroupsResult.invalidGroupIds.map((id) => deleteGroupDataFromStorage(id))); + + return reducedGroupsResult.accessedGroups; +}; + +export const getConnectedGroups = async (): Promise => { + return getGroups('true'); +}; + +export const getAllExistingGroups = async (): Promise => { + return getGroups(null, GitLabAccessLevels.OWNER); +}; diff --git a/src/services/import-projects.test.ts b/src/services/import-projects.test.ts new file mode 100644 index 0000000..0140e10 --- /dev/null +++ b/src/services/import-projects.test.ts @@ -0,0 +1,233 @@ +/* eslint-disable import/order, import/first */ +import { mockForgeApi, mockForgeEvents } from '../__tests__/helpers/forge-helper'; + +mockForgeEvents(); +mockForgeApi(); + +import { Queue } from '@forge/events'; +import { storage } from '@forge/api'; +import { + ImportFailedError, + importProjects, + clearImportResult, + getImportResult, + getImportStatus, + QUEUE_ONE_TIME_LIMIT, +} from './import-projects'; +import { setLastSyncTime } from './last-sync-time'; +import { mocked } from 'jest-mock'; +import { ImportErrorTypes } from '../resolverTypes'; + +const storageGetSuccess = jest.fn().mockReturnValue(['jobId1', 'jobId2']); +const storageGetEmptyArray = jest.fn().mockReturnValue([]); +const storageSetSuccess = jest.fn().mockImplementation(() => Promise.resolve()); +const storageDeleteSuccess = jest.fn().mockImplementation(() => Promise.resolve()); +const storageQuerySuccess = jest.fn().mockImplementation(() => { + return { + where: () => { + return { + getMany: async () => { + return { + results: [{ key: 'key', value: 'value' }], + }; + }, + }; + }, + }; +}); + +const queueMockPushSuccess = jest.fn().mockImplementation(() => Promise.resolve()); +const queueMockGetJobSuccess = jest.fn().mockImplementation(() => { + return { + getStats: async () => { + return { + json: async () => { + return { inProgress: 0, failed: 0, success: 0 }; + }, + }; + }, + }; +}); + +const errorForClearImportProject = new Error('Failed to clear import projects result'); +const errorForImportProject = new Error('Import projects failed.'); +const errorForGetImportResult = new Error('Cannot get import result'); +const errorForGetProgressStatus = new Error('Cannot get progress status'); + +const storageDeleteFailed = jest.fn().mockRejectedValue(errorForClearImportProject); +const storageQueryFailed = jest.fn().mockImplementation(() => { + throw errorForClearImportProject; +}); +const storageGetFailed = jest.fn().mockRejectedValue(errorForGetImportResult); + +const setLastSyncTimeMockFailed = jest.fn().mockRejectedValue(errorForImportProject); +const queueMockGetJobFailed = jest.fn().mockRejectedValue(errorForGetProgressStatus); +const storageSetFailed = jest.fn().mockRejectedValue(errorForImportProject); +const queueMockPushFailed = jest.fn().mockRejectedValue(errorForImportProject); +const queueOneTimeLimitError = new ImportFailedError( + ImportErrorTypes.ONE_TIME_IMPORT_LIMIT, + `Sorry, unfortunately you can import maximum ${QUEUE_ONE_TIME_LIMIT} projects at one time.`, +); + +jest.mock('./last-sync-time', () => { + const module = jest.requireActual('./last-sync-time'); + return { ...module, setLastSyncTime: jest.fn() }; +}); + +const setLastSyncTimeMock = mocked(setLastSyncTime); + +const storageQueryFailedForGetImportResult = jest.fn().mockImplementation(() => { + throw errorForGetImportResult; +}); + +describe('importProjects test cases', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('importProjects test case: success for less than 50 projects per one import', async () => { + Queue.prototype.push = queueMockPushSuccess; + storage.set = storageSetSuccess; + + await importProjects('', new Array(5).fill(''), 1234); + + expect(Queue.prototype.push).toHaveBeenCalledTimes(1); + }); + + it('importProjects test case: success for more than 50 projects per one import', async () => { + Queue.prototype.push = queueMockPushSuccess; + storage.set = storageSetSuccess; + + await importProjects('', new Array(60).fill(''), 1234); + + expect(Queue.prototype.push).toHaveBeenCalledTimes(2); + }); + + it('importProjects test case: request failed by reason 500 projects limitation', async () => { + Queue.prototype.push = queueMockPushSuccess; + storage.set = storageSetSuccess; + + await expect(importProjects('', new Array(QUEUE_ONE_TIME_LIMIT + 1).fill(''), 1234)).rejects.toThrow( + queueOneTimeLimitError, + ); + }); + + it('importProjects test case: queue failed', async () => { + Queue.prototype.push = queueMockPushFailed; + + await expect(importProjects('', new Array(5).fill(''), 1234)).rejects.toThrow(errorForImportProject); + expect(storage.set).not.toHaveBeenCalled(); + }); + + it('importProjects test case: storage set failed', async () => { + Queue.prototype.push = queueMockPushSuccess; + storage.set = storageSetFailed; + + await expect(importProjects('', [], 1234)).rejects.toThrow(errorForImportProject); + }); + + it('importRepositories test case: set last sync time failed', async () => { + Queue.prototype.push = queueMockPushSuccess; + storage.set = storageSetSuccess; + setLastSyncTimeMock.mockImplementation(setLastSyncTimeMockFailed); + + await expect(importProjects('', [], 1234)).rejects.toThrow(errorForImportProject); + + expect(storage.set).toHaveBeenCalledTimes(2); + }); +}); + +describe('clearImportResult test cases', () => { + beforeEach(() => { + storage.delete = storageDeleteFailed; + }); + + it('clearImportResult test case: success', async () => { + storage.delete = storageDeleteSuccess; + storage.query = storageQuerySuccess; + + await expect(clearImportResult()).resolves.not.toThrow(); + }); + + it('clearImportResult test case: storage query failed', async () => { + storage.query = storageQueryFailed; + + await expect(clearImportResult()).rejects.toThrow(errorForClearImportProject); + + expect(storage.query).toHaveBeenCalled(); + expect(storage.delete).not.toHaveBeenCalled(); + }); + + it('clearImportResult test case: storage delete failed', async () => { + storage.query = storageQuerySuccess; + + await expect(clearImportResult()).rejects.toThrow(errorForClearImportProject); + + expect(storage.query).toHaveBeenCalled(); + expect(storage.delete).toHaveBeenCalled(); + }); +}); + +describe('getImportResult test cases', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('getImportResult test case: success', async () => { + storage.query = storageQuerySuccess; + storage.get = jest.fn().mockReturnValue(1); + + expect(await getImportResult()).toEqual({ failed: ['value'], total: 1 }); + }); + + it('getImportResult test case: storage query failed', async () => { + storage.query = storageQueryFailedForGetImportResult; + + await expect(getImportResult()).rejects.toThrow(errorForGetImportResult); + + expect(storage.query).toHaveBeenCalled(); + expect(storage.get).not.toHaveBeenCalled(); + }); + + it('getImportResult test case: storage get failed', async () => { + storage.query = storageQuerySuccess; + storage.get = storageGetFailed; + + await expect(getImportResult()).rejects.toThrow(errorForGetImportResult); + + expect(storage.query).toHaveBeenCalled(); + expect(storage.get).toHaveBeenCalled(); + }); +}); + +describe('getImportStatus test cases', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('getImportStatus test case: success', async () => { + storage.get = storageGetSuccess; + Queue.prototype.getJob = queueMockGetJobSuccess; + + const result = await getImportStatus(); + + expect(result).toEqual({ failed: 0, success: 0, inProgress: 0 }); + expect(queueMockGetJobSuccess).toHaveBeenCalledTimes(2); + }); + + it('getImportStatus test case: error empty jobIds array', async () => { + storage.get = storageGetEmptyArray; + Queue.prototype.getJob = queueMockGetJobSuccess; + + await expect(getImportStatus()).rejects.toThrow('No running job'); + expect(queueMockGetJobSuccess).not.toHaveBeenCalled(); + }); + + it('getImportStatus test case: failed', async () => { + storage.get = queueMockGetJobFailed; + Queue.prototype.getJob = queueMockGetJobSuccess; + + await expect(getImportStatus()).rejects.toThrow(errorForGetProgressStatus); + expect(queueMockGetJobSuccess).not.toHaveBeenCalled(); + }); +}); diff --git a/src/services/import-projects.ts b/src/services/import-projects.ts new file mode 100644 index 0000000..152ca6b --- /dev/null +++ b/src/services/import-projects.ts @@ -0,0 +1,133 @@ +// eslint-disable-next-line max-classes-per-file +import { storage, startsWith, ListResult } from '@forge/api'; +import { Queue } from '@forge/events'; +import { Payload } from '@forge/events/out/types'; +import { chunk } from 'lodash'; + +import { CLEAR_STORAGE_CHUNK_SIZE, CLEAR_STORAGE_DELAY, STORAGE_KEYS } from '../constants'; +import { Queues, ImportableProject, ProjectImportResult, ImportStatus } from '../types'; +import { ImportErrorTypes } from '../resolverTypes'; +import { setLastSyncTime } from './last-sync-time'; +import { deleteKeysFromStorageByChunks } from '../utils/storage-utils'; + +export const QUEUE_ONE_TIME_LIMIT = 500; +const QUEUE_PUSH_EVENTS_LIMIT = 50; + +class OneTimeLimitImportError extends Error {} + +export class ImportFailedError extends Error { + constructor(readonly errorType: ImportErrorTypes, readonly message: string) { + super(message); + } +} + +export const importProjects = async ( + cloudId: string, + projectsReadyToImport: ImportableProject[], + groupId: number, +): Promise => { + try { + const queueOneTimeLimit = process.env.QUEUE_ONE_TIME_LIMIT || QUEUE_ONE_TIME_LIMIT; + + const queue = new Queue({ key: Queues.IMPORT }); + + if (projectsReadyToImport.length > queueOneTimeLimit) { + throw new OneTimeLimitImportError( + `Sorry, unfortunately you can import maximum ${QUEUE_ONE_TIME_LIMIT} projects at one time.`, + ); + } + + const projectsData: Payload[] = projectsReadyToImport.map((project) => { + return { + createProjectData: JSON.stringify({ + cloudId, + project, + groupId, + }), + }; + }); + + const jobIds = []; + const projectChunks = chunk(projectsData, QUEUE_PUSH_EVENTS_LIMIT); + + for (const projectChunk of projectChunks) { + const jobId = await queue.push(projectChunk, { delayInSeconds: 2 }); + jobIds.push(jobId); + } + + await storage.set(STORAGE_KEYS.CURRENT_IMPORT_TOTAL_PROJECTS, projectsReadyToImport.length); + await storage.set(STORAGE_KEYS.CURRENT_IMPORT_QUEUE_JOB_IDS, jobIds); + + await setLastSyncTime(); + } catch (e) { + console.error(e.message); + if (e instanceof OneTimeLimitImportError) { + throw new ImportFailedError(ImportErrorTypes.ONE_TIME_IMPORT_LIMIT, e.message); + } + + throw new ImportFailedError(ImportErrorTypes.UNEXPECTED_ERROR, e.message); + } +}; + +export const getImportStatus = async (): Promise => { + try { + const jobIds = await storage.get(STORAGE_KEYS.CURRENT_IMPORT_QUEUE_JOB_IDS); + if (!jobIds.length) { + throw new Error('No running job'); + } + const queue = new Queue({ key: Queues.IMPORT }); + const jobStatuses = await Promise.all( + jobIds.map((id: string) => { + const job = queue.getJob(id); + return job.getStats().then((s) => s.json()); + }), + ); + + return jobStatuses.reduce( + (acc: ImportStatus, importStatus: ImportStatus) => { + return { + inProgress: acc.inProgress + importStatus.inProgress, + success: acc.success + importStatus.success, + failed: acc.failed + importStatus.failed, + }; + }, + { inProgress: 0, success: 0, failed: 0 }, + ); + } catch (err) { + throw new ImportFailedError(ImportErrorTypes.CANNOT_GET_PROGRESS_STATUS, err.message); + } +}; + +const getFailedProjects = (): Promise => { + const response = storage.query().where('key', startsWith(STORAGE_KEYS.CURRENT_IMPORT_FAILED_PROJECT_PREFIX)); + + return response.getMany(); +}; + +export const getImportResult = async (): Promise => { + try { + const listFailedProjects = await getFailedProjects(); + const failed = listFailedProjects.results.map(({ value }) => value as ImportableProject); + const total = await storage.get(STORAGE_KEYS.CURRENT_IMPORT_TOTAL_PROJECTS); + return { + failed, + total, + }; + } catch (err) { + throw new ImportFailedError(ImportErrorTypes.CANNOT_GET_IMPORT_RESULT, err.message); + } +}; + +export const clearImportResult = async (): Promise => { + try { + const failedProjects = await getFailedProjects(); + const deleteFailedProjects = failedProjects.results.map(({ key }) => key); + await deleteKeysFromStorageByChunks( + [...deleteFailedProjects, STORAGE_KEYS.CURRENT_IMPORT_TOTAL_PROJECTS], + CLEAR_STORAGE_CHUNK_SIZE, + CLEAR_STORAGE_DELAY, + ); + } catch (err) { + throw new ImportFailedError(ImportErrorTypes.FAILED_CLEAR_IMPORT_RESULT, err.message); + } +}; diff --git a/src/services/insert-metric-values.test.ts b/src/services/insert-metric-values.test.ts new file mode 100644 index 0000000..d852218 --- /dev/null +++ b/src/services/insert-metric-values.test.ts @@ -0,0 +1,34 @@ +/* eslint-disable import/first, import/order */ +import { mockAgg, mockInsertMetricValueByExternalId } from '../__tests__/helpers/mock-agg'; + +mockAgg(); +import { insertMetricValues } from './insert-metric-values'; +import { MOCK_CLOUD_ID } from '../__tests__/fixtures/gitlab-data'; +import { generateMetric, generateMetricInput } from '../__tests__/helpers/gitlab-helper'; +import { BuiltinMetricDefinitions } from '@atlassian/forge-graphql'; + +const MOCK_METRIC_INPUT = generateMetricInput([ + generateMetric(BuiltinMetricDefinitions.WEEKLY_DEPLOYMENT_FREQUENCY_28D), +]); + +describe('insertMetricValues', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('inserts metric values', async () => { + await insertMetricValues(MOCK_METRIC_INPUT, MOCK_CLOUD_ID); + + expect(mockInsertMetricValueByExternalId).toHaveBeenCalledWith({ + cloudId: MOCK_CLOUD_ID, + externalMetricSourceId: MOCK_METRIC_INPUT.projectID, + metricDefinitionId: MOCK_METRIC_INPUT.metrics[0].metricAri, + value: { + value: MOCK_METRIC_INPUT.metrics[0].value, + timestamp: MOCK_METRIC_INPUT.metrics[0].timestamp, + }, + }); + + expect(mockInsertMetricValueByExternalId).toHaveBeenCalledTimes(1); + }); +}); diff --git a/src/services/insert-metric-values.ts b/src/services/insert-metric-values.ts new file mode 100644 index 0000000..fbc01e6 --- /dev/null +++ b/src/services/insert-metric-values.ts @@ -0,0 +1,25 @@ +import { insertMetricValueByExternalId } from '../client/compass'; +import { MetricsEventPayload } from '../types'; + +export const insertMetricValues = async (metricsPayload: MetricsEventPayload, cloudId: string): Promise => { + const startTime = Date.now(); + const { projectID, metrics } = metricsPayload; + + console.log({ + message: 'Sending metrics to compass.', + metricsCount: metrics.length, + projectID, + cloudId, + }); + + await Promise.all( + metrics.map(async (metric) => { + await insertMetricValueByExternalId(cloudId, projectID, metric); + }), + ); + + console.log({ + message: 'insertMetricValues finished.', + duration: Date.now() - startTime, + }); +}; diff --git a/src/services/last-sync-time.ts b/src/services/last-sync-time.ts new file mode 100644 index 0000000..07fb3f7 --- /dev/null +++ b/src/services/last-sync-time.ts @@ -0,0 +1,12 @@ +import { storage } from '@forge/api'; + +import { STORAGE_KEYS } from '../constants'; + +export const setLastSyncTime = async (lastSyncTime: string = new Date().toISOString()): Promise => + storage.set(STORAGE_KEYS.LAST_SYNC_TIME, lastSyncTime); + +export const getLastSyncTime = async (): Promise => { + const lastSyncTime = await storage.get(STORAGE_KEYS.LAST_SYNC_TIME); + + return lastSyncTime || null; +}; diff --git a/src/services/mergeRequest.test.ts b/src/services/mergeRequest.test.ts new file mode 100644 index 0000000..aaa6528 --- /dev/null +++ b/src/services/mergeRequest.test.ts @@ -0,0 +1,39 @@ +/* eslint-disable import/first, import/order */ +import { mocked } from 'jest-mock'; + +import { mockAgg } from '../__tests__/helpers/mock-agg'; + +mockAgg(); + +import { TEST_TOKEN } from '../__tests__/fixtures/gitlab-data'; +import { fetchPaginatedData } from '../utils/fetchPaginatedData'; +import { getLastMergedMergeRequests, getOpenMergeRequests } from './mergeRequest'; +import { getMergeRequests } from '../client/gitlab'; + +jest.mock('../utils/fetchPaginatedData'); +jest.mock('../client/gitlab'); + +const mockedFetchPaginatedData = mocked(fetchPaginatedData); +const mockedGetMergeRequests = mocked(getMergeRequests); + +const MOCK_PROJECT_ID = 12345; +const BRANCH_NAME = 'koko'; + +describe('MergeRequest Service', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('throws error in case of failed open MR fetching', async () => { + const errorMsg = 'Error while fetching open merge requests from Gitlab!'; + mockedFetchPaginatedData.mockRejectedValue(new Error(errorMsg)); + await expect(getOpenMergeRequests(TEST_TOKEN, MOCK_PROJECT_ID, BRANCH_NAME)).rejects.toThrowError(errorMsg); + }); + + it('throws error in case of failed merged merge requests fetching', async () => { + const errorMsg = 'Error while fetching merged merge requests from Gitlab!'; + + mockedGetMergeRequests.mockRejectedValue(new Error(errorMsg)); + await expect(getLastMergedMergeRequests(TEST_TOKEN, MOCK_PROJECT_ID, BRANCH_NAME)).rejects.toThrowError(errorMsg); + }); +}); diff --git a/src/services/mergeRequest.ts b/src/services/mergeRequest.ts new file mode 100644 index 0000000..1e3104b --- /dev/null +++ b/src/services/mergeRequest.ts @@ -0,0 +1,58 @@ +import { fetchPaginatedData } from '../utils/fetchPaginatedData'; +import { getMergeRequests, MergeRequestWorkInProgressFilterOptions } from '../client/gitlab'; +import { MergeRequest, MergeRequestOrderBy, MergeRequestState } from '../types'; + +export const getOpenMergeRequests = async ( + groupToken: string, + projectId: number, + targetBranch: string, +): Promise => { + const scope = 'all'; + + try { + return fetchPaginatedData(getMergeRequests, { + groupToken, + projectId, + state: MergeRequestState.OPENED, + scope, + targetBranch, + orderBy: MergeRequestOrderBy.UPDATED_AT, + wip: MergeRequestWorkInProgressFilterOptions.FILTER_OUT_WIP, + isSimpleView: true, + }); + } catch (err) { + const ERROR_MESSAGE = 'Error while fetching open merge requests from Gitlab!'; + console.error(ERROR_MESSAGE, err); + + throw new Error(ERROR_MESSAGE); + } +}; + +export const getLastMergedMergeRequests = async ( + groupToken: string, + projectId: number, + targetBranch: string, + numberOfMergeRequests = 10, +): Promise => { + const page = 1; + const scope = 'all'; + + try { + const { data } = await getMergeRequests(page, numberOfMergeRequests, { + groupToken, + projectId, + state: MergeRequestState.MERGED, + scope, + targetBranch, + orderBy: MergeRequestOrderBy.UPDATED_AT, + }); + + return data; + } catch (e) { + const ERROR_MESSAGE = 'Error while fetching merged merge requests from Gitlab!'; + + console.error(ERROR_MESSAGE, e.message); + + throw new Error(ERROR_MESSAGE); + } +}; diff --git a/src/services/metric-calculations/merge-request-cycle-time.test.ts b/src/services/metric-calculations/merge-request-cycle-time.test.ts new file mode 100644 index 0000000..e53c53f --- /dev/null +++ b/src/services/metric-calculations/merge-request-cycle-time.test.ts @@ -0,0 +1,17 @@ +/* eslint-disable import/first */ +import { mockForgeApi } from '../../__tests__/helpers/forge-helper'; + +mockForgeApi(); + +import { mergeRequests } from '../../__tests__/fixtures/gitlab-data'; +import { mergeRequestCycleTime } from './merge-request-cycle-time'; + +describe('mergeRequestCycleTime', () => { + it('calculate cycle time', () => { + expect(mergeRequestCycleTime(mergeRequests)).toBe(13); + }); + + it('calculate cycle time for empty array with MRs', () => { + expect(mergeRequestCycleTime([])).toBe(0); + }); +}); diff --git a/src/services/metric-calculations/merge-request-cycle-time.ts b/src/services/metric-calculations/merge-request-cycle-time.ts new file mode 100644 index 0000000..fb14e08 --- /dev/null +++ b/src/services/metric-calculations/merge-request-cycle-time.ts @@ -0,0 +1,28 @@ +import { MergeRequest } from '../../types'; + +const MILLIS_PER_MIN = 60000; + +const calculateCycleTimeForMr = (mergeRequest: MergeRequest): number => { + const { created_at: createdAt, merged_at: mergedAt } = mergeRequest; + + if (!createdAt || !mergedAt) { + console.error('No merge request created on or merge time found.'); + + return undefined; + } + + const timeOpenedInMs = Date.parse(createdAt); + const timeMergedInMs = Date.parse(mergedAt); + const cycleTimeInMs = timeMergedInMs - timeOpenedInMs; + const cycleTimeInMins = cycleTimeInMs / MILLIS_PER_MIN; + return cycleTimeInMins; +}; + +const isDefined = (value: T | undefined): value is T => value !== undefined; + +export const mergeRequestCycleTime = (mergeRequests: MergeRequest[]): number | null => { + const cycleTimes = mergeRequests.map((mergeRequest) => calculateCycleTimeForMr(mergeRequest)).filter(isDefined); + const average = cycleTimes.length ? cycleTimes.reduce((acc, cycleTime) => acc + cycleTime, 0) / cycleTimes.length : 0; + + return Math.ceil(average) || 0; +}; diff --git a/src/services/send-compass-event.test.ts b/src/services/send-compass-event.test.ts new file mode 100644 index 0000000..ae70978 --- /dev/null +++ b/src/services/send-compass-event.test.ts @@ -0,0 +1,28 @@ +/* eslint-disable import/first */ +/* eslint-disable import/order */ +import { mocked } from 'jest-mock'; +import { mockForgeApi } from '../__tests__/helpers/forge-helper'; + +mockForgeApi(); + +import { CompassEventType } from '@atlassian/forge-graphql'; +import { sendEvents } from '../client/compass'; +import { sendEventToCompass } from './send-compass-events'; +import { pipelineWebhookFixture } from '../__tests__/fixtures/build-webhook-payload'; +import { MOCK_CLOUD_ID } from '../__tests__/fixtures/gitlab-data'; +import { webhookPipelineEventToCompassBuildEvent } from './builds'; + +jest.mock('../client/compass'); +const mockSendEvents = mocked(sendEvents); + +describe('send build event to compass method', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('successfully maps gitlab pipeline event to compass build event and send it', async () => { + await sendEventToCompass(webhookPipelineEventToCompassBuildEvent(pipelineWebhookFixture, MOCK_CLOUD_ID)); + + expect(mockSendEvents).toMatchSnapshot(); + }); +}); diff --git a/src/services/send-compass-events.ts b/src/services/send-compass-events.ts new file mode 100644 index 0000000..20f762a --- /dev/null +++ b/src/services/send-compass-events.ts @@ -0,0 +1,10 @@ +import { CompassCreateEventInput } from '@atlassian/forge-graphql'; +import { sendEvents } from '../client/compass'; + +export const sendEventToCompass = async (payload: CompassCreateEventInput): Promise => { + try { + await sendEvents(payload); + } catch (e) { + console.error(`Error sending event to Compass`, e); + } +}; diff --git a/src/services/sync-component-with-file/find-config-file-changes.test.ts b/src/services/sync-component-with-file/find-config-file-changes.test.ts new file mode 100644 index 0000000..2816e75 --- /dev/null +++ b/src/services/sync-component-with-file/find-config-file-changes.test.ts @@ -0,0 +1,115 @@ +/* eslint-disable import/first */ +import { mocked } from 'jest-mock'; +import { mockForgeApi } from '../../__tests__/helpers/forge-helper'; + +mockForgeApi(); + +import { groupDiffsByChangeType } from '../../utils/push-event-utils'; +import { getCommitDiff, getFileContent } from '../../client/gitlab'; +import { findConfigAsCodeFileChanges } from './find-config-file-changes'; +import { generatePushEvent } from '../../__tests__/helpers/gitlab-helper'; +import { CommitFileDiff, CompassYaml, ComponentChanges } from '../../types'; + +jest.mock('../../client/gitlab', () => ({ + getCommitDiff: jest.fn(), + getFileContent: jest.fn(), +})); +jest.mock('../../utils/push-event-utils', () => { + const module = jest.requireActual('../../utils/push-event-utils'); + return { + ...module, + groupDiffsByChangeType: jest.fn(), + }; +}); + +const getCommitDiffMock = mocked(getCommitDiff); +const groupDiffsByChangeTypeMock = mocked(groupDiffsByChangeType); +const getFileContentMock = mocked(getFileContent); + +const createCommitFileDiffMock = ( + mockFileContents: CompassYaml[], + overrideCommitFileDiff: Partial = {}, +): CommitFileDiff => { + mockFileContents.forEach((fileContent) => getFileContentMock.mockResolvedValueOnce(fileContent)); + + return { + diff: 'diff', + new_path: 'new/path', + old_path: 'old/path', + new_file: false, + renamed_file: false, + deleted_file: false, + ...overrideCommitFileDiff, + }; +}; + +describe('findConfigAsCodeFileChanges', () => { + beforeEach(() => { + jest.clearAllMocks(); + getCommitDiffMock.mockResolvedValue([ + { + diff: 'string', + old_path: 'string', + new_path: 'string', + new_file: true, + renamed_file: false, + deleted_file: false, + }, + ]); + }); + + it('returns empty componentsToSync and componentsToUnlink arrays if no changes present', async () => { + const event = generatePushEvent(); + groupDiffsByChangeTypeMock.mockReturnValue({ added: [], modified: [], removed: [] }); + const expectedResult: ComponentChanges = { + componentsToSync: [], + componentsToUnlink: [], + }; + + const result = await findConfigAsCodeFileChanges(event, 'token'); + expect(result).toEqual(expectedResult); + }); + + it('returns correct componentsToSync and componentsToUnlink', async () => { + const event = generatePushEvent(); + + const removedMock = [ + createCommitFileDiffMock([{ id: 'removed1' }], { deleted_file: true }), + createCommitFileDiffMock([{ id: 'removed2' }], { deleted_file: true }), + ]; + + const addedMock = [ + createCommitFileDiffMock([{ id: 'added1' }], { new_path: 'path/to/name1.yml', new_file: true }), + createCommitFileDiffMock([{ id: 'added2' }], { new_path: 'path/to/name2.yml', new_file: true }), + ]; + + const modifiedMock = [ + createCommitFileDiffMock([{ id: 'modifiedBefore1' }, { id: 'modifiedAfter1' }], { new_path: 'path/name3.yaml' }), + createCommitFileDiffMock([{ id: 'modifiedBefore2' }, { id: 'modifiedAfter2' }], { new_path: 'path/name4.yaml' }), + ]; + + groupDiffsByChangeTypeMock.mockReturnValue({ + added: addedMock, + modified: modifiedMock, + removed: removedMock, + }); + const expectedResult: ComponentChanges = { + componentsToSync: [ + { componentYaml: { id: 'added1' }, absoluteFilePath: 'path/to/name1.yml' }, + { componentYaml: { id: 'added2' }, absoluteFilePath: 'path/to/name2.yml' }, + { componentYaml: { id: 'modifiedAfter1' }, absoluteFilePath: 'path/name3.yaml' }, + { componentYaml: { id: 'modifiedAfter2' }, absoluteFilePath: 'path/name4.yaml' }, + ], + componentsToUnlink: [ + { id: 'removed1' }, + { id: 'removed2' }, + { id: 'modifiedBefore1' }, + { id: 'modifiedBefore2' }, + ], + }; + + const result = await findConfigAsCodeFileChanges(event, 'token'); + + expect(result).toEqual(expectedResult); + }); +}); diff --git a/src/services/sync-component-with-file/find-config-file-changes.ts b/src/services/sync-component-with-file/find-config-file-changes.ts new file mode 100644 index 0000000..7945c6f --- /dev/null +++ b/src/services/sync-component-with-file/find-config-file-changes.ts @@ -0,0 +1,112 @@ +import { CommitFileDiff, CompassYaml, ComponentChanges, ComponentSyncPayload, PushEvent } from '../../types'; +import { getCommitDiff, getFileContent } from '../../client/gitlab'; +import { groupDiffsByChangeType } from '../../utils/push-event-utils'; + +const getRemovedFiles = async ( + token: string, + compassYmlFilesDiffs: CommitFileDiff[], + event: PushEvent, +): Promise => { + return Promise.all( + compassYmlFilesDiffs.map((diff: CommitFileDiff) => { + return getFileContent(token, event.project.id, diff.old_path, event.before); + }), + ); +}; + +const getAddedFiles = async ( + token: string, + compassYmlFilesDiffs: CommitFileDiff[], + event: PushEvent, +): Promise => { + return Promise.all( + compassYmlFilesDiffs.map((diff: CommitFileDiff) => + getFileContent(token, event.project.id, diff.new_path, event.after).then((componentYaml) => ({ + componentYaml, + absoluteFilePath: diff.new_path, + })), + ), + ); +}; + +const getModifiedFiles = async ( + token: string, + compassYmlFilesDiffs: CommitFileDiff[], + event: PushEvent, +): Promise<{ componentsToSync: ComponentSyncPayload[]; componentsToUnlink: CompassYaml[] }> => { + const changes = await Promise.all( + compassYmlFilesDiffs.map(async (diff) => { + const oldFilePromise = getFileContent(token, event.project.id, diff.old_path, event.before); + const newFilePromise = getFileContent(token, event.project.id, diff.new_path, event.after); + + const [oldFile, newFile] = await Promise.all([oldFilePromise, newFilePromise]); + + const componentSyncPayload: ComponentSyncPayload = { + componentYaml: newFile, + absoluteFilePath: diff.new_path, + }; + + return { + oldFile, + newFile: componentSyncPayload, + }; + }), + ); + + return changes.reduce<{ componentsToSync: ComponentSyncPayload[]; componentsToUnlink: CompassYaml[] }>( + (result, { oldFile, newFile }) => { + if (oldFile.id !== newFile.componentYaml.id) { + result.componentsToUnlink.push(oldFile); + } + result.componentsToSync.push(newFile); + + return result; + }, + { componentsToSync: [], componentsToUnlink: [] }, + ); +}; + +export const findConfigAsCodeFileChanges = async (event: PushEvent, token: string): Promise => { + let filesDiffs: CommitFileDiff[] = []; + try { + filesDiffs = await getCommitDiff(token, event.project.id, event.checkout_sha); + } catch (e) { + console.error({ + message: 'Error with commits diff request', + error: e, + }); + throw e; + } + + const { added, removed, modified } = groupDiffsByChangeType(filesDiffs); + + if (added.length === 0 && removed.length === 0 && modified.length === 0) { + return { + componentsToSync: [], + componentsToUnlink: [], + }; + } + + const [removedComponents, addedComponents, modifiedComponents] = await Promise.all([ + getRemovedFiles(token, removed, event), + getAddedFiles(token, added, event), + getModifiedFiles(token, modified, event), + ]); + + const componentsToSync = [...addedComponents, ...modifiedComponents.componentsToSync]; + const componentsToUnlink = [...removedComponents, ...modifiedComponents.componentsToUnlink]; + + console.log( + 'component IDs to sync', + componentsToSync.map((c) => c.componentYaml.id), + ); + console.log( + 'component IDs to remove', + componentsToUnlink.map((c) => c.id), + ); + + return { + componentsToSync, + componentsToUnlink, + }; +}; diff --git a/src/services/sync-component-with-file/index.ts b/src/services/sync-component-with-file/index.ts new file mode 100644 index 0000000..fb8ce46 --- /dev/null +++ b/src/services/sync-component-with-file/index.ts @@ -0,0 +1,3 @@ +export { syncComponent } from './sync-component'; +export { unlinkComponent } from './unlink-component'; +export { findConfigAsCodeFileChanges } from './find-config-file-changes'; diff --git a/src/services/sync-component-with-file/report-sync-error.test.ts b/src/services/sync-component-with-file/report-sync-error.test.ts new file mode 100644 index 0000000..65047d8 --- /dev/null +++ b/src/services/sync-component-with-file/report-sync-error.test.ts @@ -0,0 +1,56 @@ +/* eslint-disable import/first, import/order */ +import { mockAgg, mockUpdateComponentDataManager } from '../../__tests__/helpers/mock-agg'; + +mockAgg(); + +import { ComponentSyncEventStatus } from '@atlassian/forge-graphql'; +import { AggClientError, InvalidConfigFileError } from '../../models/errors'; +import { reportSyncError } from './report-sync-error'; + +describe('reportSyncError', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('should build lastSyncError with UserError status in case of InvalidConfigFileError', async () => { + const error = new InvalidConfigFileError(['error1'], 'error'); + await reportSyncError(error, 'id', 'externalSourceURL'); + + expect(mockUpdateComponentDataManager).toBeCalledWith( + expect.objectContaining({ + lastSyncEvent: { + status: ComponentSyncEventStatus.UserError, + lastSyncErrors: error.errors, + }, + }), + ); + }); + + it('should build lastSyncError with UserError status in case of AggClientError', async () => { + const error = new AggClientError(['error2']); + await reportSyncError(error, 'id', 'externalSourceURL'); + + expect(mockUpdateComponentDataManager).toBeCalledWith( + expect.objectContaining({ + lastSyncEvent: { + status: ComponentSyncEventStatus.UserError, + lastSyncErrors: error.errors, + }, + }), + ); + }); + + it('should build lastSyncError with ServerError status in case of other errors', async () => { + const error = new Error('message'); + await reportSyncError(error, 'id', 'externalSourceURL'); + + expect(mockUpdateComponentDataManager).toBeCalledWith( + expect.objectContaining({ + lastSyncEvent: { + status: ComponentSyncEventStatus.ServerError, + lastSyncErrors: [error.message], + }, + }), + ); + }); +}); diff --git a/src/services/sync-component-with-file/report-sync-error.ts b/src/services/sync-component-with-file/report-sync-error.ts new file mode 100644 index 0000000..391cbfa --- /dev/null +++ b/src/services/sync-component-with-file/report-sync-error.ts @@ -0,0 +1,38 @@ +import { ComponentSyncEventStatus } from '@atlassian/forge-graphql'; +import { updateDataManager } from '../../client/compass'; + +import { AggClientError, InvalidConfigFileError } from '../../models/errors'; + +const buildLastSyncEvent = ( + error: Error, +): { + status: ComponentSyncEventStatus.ServerError | ComponentSyncEventStatus.UserError; + lastSyncErrors: string[]; +} => { + if (error instanceof InvalidConfigFileError || error instanceof AggClientError) { + return { + status: ComponentSyncEventStatus.UserError, + lastSyncErrors: error.errors, + }; + } + + return { + status: ComponentSyncEventStatus.ServerError, + lastSyncErrors: [error.message], + }; +}; + +export const reportSyncError = async (error: Error, componentId: string, externalSourceURL: string): Promise => { + try { + await updateDataManager({ + componentId, + externalSourceURL, + lastSyncEvent: buildLastSyncEvent(error), + }); + } catch (e) { + console.error({ + message: 'Error reporting sync error to data manager.', + error: e, + }); + } +}; diff --git a/src/services/sync-component-with-file/sync-component.test.ts b/src/services/sync-component-with-file/sync-component.test.ts new file mode 100644 index 0000000..b723cc6 --- /dev/null +++ b/src/services/sync-component-with-file/sync-component.test.ts @@ -0,0 +1,280 @@ +/* eslint-disable import/first, import/order */ +import { + mockAgg, + mockGetComponent, + mockCreateExternalAlias, + mockUpdateComponent, +} from '../../__tests__/helpers/mock-agg'; + +mockAgg(); + +import { Component, CompassComponentType, CompassLinkType } from '@atlassian/forge-graphql'; +import { mocked } from 'jest-mock'; + +import { generatePushEvent } from '../../__tests__/helpers/gitlab-helper'; +import { CompassYaml, YamlLink } from '../../types'; +import { syncComponent } from './sync-component'; +import { EXTERNAL_SOURCE, IMPORT_LABEL } from '../../constants'; +import { reportSyncError } from './report-sync-error'; +import { getProjectById } from '../../client/gitlab'; +import { getProjectLabels } from '../get-labels'; +import { TEST_GET_PROJECT_BY_ID_RESPONSE, TEST_TOKEN } from '../../__tests__/fixtures/gitlab-data'; + +jest.mock('../../client/gitlab'); +jest.mock('../../services/get-labels'); +jest.mock('./validate-config-file'); +jest.mock('./yaml-config-transforms'); +jest.mock('./report-sync-error'); + +const TEST_FILE_NAME = 'TEST_FILE_NAME'; + +const MOCK_GET_PROJECT_LABELS = [...TEST_GET_PROJECT_BY_ID_RESPONSE.topics, 'language:javascript']; +const MOCK_COMPONENT_LABELS = ['label']; + +const mockGetProjectById = mocked(getProjectById); +const mockGetProjectLabels = mocked(getProjectLabels); + +const getMockedComponent = (overrideMockedComponent: Partial = {}): Component => ({ + id: 'id', + name: 'mock', + type: CompassComponentType.Service, + typeId: 'service', + labels: MOCK_COMPONENT_LABELS, + changeMetadata: {}, + ...overrideMockedComponent, +}); + +const getMockedCompassYaml = (overrideMockedCompassYaml: Partial = {}): CompassYaml => ({ + id: 'id', + name: 'name', + description: 'description', + ownerId: 'ownerId', + fields: { tier: 1 }, + relationships: { DEPENDS_ON: ['depends1'] }, + ...overrideMockedCompassYaml, +}); + +const createCompassYamlLink = (type: CompassLinkType): YamlLink => ({ + type, + url: 'https://url', +}); + +describe('syncComponent', () => { + beforeEach(() => { + jest.clearAllMocks(); + mockGetProjectById.mockResolvedValue(TEST_GET_PROJECT_BY_ID_RESPONSE); + mockGetProjectLabels.mockResolvedValue(MOCK_GET_PROJECT_LABELS); + }); + + it('creates external alias for component in case data manager is not present and external alias was not created previously for the project', async () => { + const event = generatePushEvent(); + const compassYaml = getMockedCompassYaml(); + const component = getMockedComponent({ + externalAliases: [{ externalAliasId: '000000', externalSource: EXTERNAL_SOURCE }], + }); + mockGetComponent.mockResolvedValue({ + success: true, + data: { component }, + errors: [], + }); + + await syncComponent(TEST_TOKEN, compassYaml, TEST_FILE_NAME, event, event.project.default_branch); + + const expectedParameter = { + componentId: compassYaml.id, + externalAlias: { + externalId: event.project.id.toString(), + externalSource: EXTERNAL_SOURCE, + }, + }; + + expect(mockCreateExternalAlias).toBeCalledWith(expectedParameter); + }); + + it('should not create external alias for component in case data manager is present', async () => { + const event = generatePushEvent(); + const compassYaml = getMockedCompassYaml(); + const component = getMockedComponent({ dataManager: { externalSourceURL: 'url' } }); + mockGetComponent.mockResolvedValue({ + success: true, + data: { component }, + errors: [], + }); + + await syncComponent(TEST_TOKEN, compassYaml, TEST_FILE_NAME, event, event.project.default_branch); + + expect(mockCreateExternalAlias).not.toBeCalled(); + }); + + it('should not create external alias for component in case the same external alias was created previously', async () => { + const event = generatePushEvent(); + const compassYaml = getMockedCompassYaml(); + const component = getMockedComponent({ + externalAliases: [{ externalAliasId: '1', externalSource: EXTERNAL_SOURCE }], + }); + mockGetComponent.mockResolvedValue({ + success: true, + data: { component }, + errors: [], + }); + + await syncComponent(TEST_TOKEN, compassYaml, TEST_FILE_NAME, event, event.project.default_branch); + + expect(mockCreateExternalAlias).not.toBeCalled(); + }); + + it('should not create external alias if no component present', async () => { + const event = generatePushEvent(); + const compassYaml = getMockedCompassYaml({ id: undefined }); + + await syncComponent(TEST_TOKEN, compassYaml, TEST_FILE_NAME, event, event.project.default_branch); + + expect(mockCreateExternalAlias).not.toBeCalled(); + }); + + it('should update component without changing yaml links', async () => { + const event = generatePushEvent(); + const compassYaml = getMockedCompassYaml({ + links: [ + createCompassYamlLink(CompassLinkType.Repository), + createCompassYamlLink(CompassLinkType.Repository), + createCompassYamlLink(CompassLinkType.Repository), + createCompassYamlLink(CompassLinkType.Repository), + createCompassYamlLink(CompassLinkType.Repository), + createCompassYamlLink(CompassLinkType.Project), + ], + }); + const component = getMockedComponent({ dataManager: { externalSourceURL: 'url' } }); + mockGetComponent.mockResolvedValue({ + success: true, + data: { component }, + errors: [], + }); + + await syncComponent(TEST_TOKEN, compassYaml, TEST_FILE_NAME, event, event.project.default_branch); + + expect(mockUpdateComponent).toBeCalledWith( + expect.objectContaining({ + links: compassYaml.links, + }), + ); + }); + + it('should update component without changing yaml links if required link already exists', async () => { + const event = generatePushEvent({ + project: { + id: 1, + name: 'test', + default_branch: 'main', + web_url: 'https://url', + }, + }); + const compassYaml = getMockedCompassYaml({ + links: [createCompassYamlLink(CompassLinkType.Project)], + }); + const component = getMockedComponent({ dataManager: { externalSourceURL: 'url' } }); + mockGetComponent.mockResolvedValue({ + success: true, + data: { component }, + errors: [], + }); + + await syncComponent(TEST_TOKEN, compassYaml, TEST_FILE_NAME, event, event.project.default_branch); + + expect(mockUpdateComponent).toBeCalledWith( + expect.objectContaining({ + links: compassYaml.links, + }), + ); + }); + + it('should update component with adding required yaml link', async () => { + const event = generatePushEvent(); + const compassYaml = getMockedCompassYaml({ + links: [createCompassYamlLink(CompassLinkType.Project)], + }); + const component = getMockedComponent({ dataManager: { externalSourceURL: 'url' } }); + mockGetComponent.mockResolvedValue({ + success: true, + data: { component }, + errors: [], + }); + + await syncComponent(TEST_TOKEN, compassYaml, TEST_FILE_NAME, event, event.project.default_branch); + + const expectedLinks = [ + ...compassYaml.links, + { + type: CompassLinkType.Repository, + url: event.project.web_url, + }, + ]; + + expect(mockUpdateComponent).toBeCalledWith( + expect.objectContaining({ + links: expectedLinks, + }), + ); + }); + + it('should update component with correct externalSourceURL', async () => { + const event = generatePushEvent(); + const compassYaml = getMockedCompassYaml(); + const component = getMockedComponent({ dataManager: { externalSourceURL: 'url' } }); + mockGetComponent.mockResolvedValue({ + success: true, + data: { component }, + errors: [], + }); + + await syncComponent(TEST_TOKEN, compassYaml, TEST_FILE_NAME, event, event.project.default_branch); + + expect(mockUpdateComponent).toBeCalledWith( + expect.objectContaining({ + dataManager: { + externalSourceURL: `${event.project.web_url}/blob/${event.project.default_branch}/${TEST_FILE_NAME}`, + }, + }), + ); + }); + + it('should update component with adding labels', async () => { + const event = generatePushEvent(); + const compassYaml = getMockedCompassYaml({ + links: [createCompassYamlLink(CompassLinkType.Project)], + }); + const component = getMockedComponent({ dataManager: { externalSourceURL: 'url' } }); + mockGetComponent.mockResolvedValue({ + success: true, + data: { component }, + errors: [], + }); + + await syncComponent(TEST_TOKEN, compassYaml, TEST_FILE_NAME, event, event.project.default_branch); + + const expectedLabels = [...MOCK_COMPONENT_LABELS, IMPORT_LABEL, ...MOCK_GET_PROJECT_LABELS]; + + expect(mockUpdateComponent).toBeCalledWith( + expect.objectContaining({ + labels: expectedLabels, + }), + ); + }); + + it('should catch error when update component fails', async () => { + const event = generatePushEvent(); + const compassYaml = getMockedCompassYaml(); + const component = getMockedComponent(); + const error = new Error('test'); + mockGetComponent.mockResolvedValue({ + success: true, + data: { component }, + errors: [], + }); + + mockUpdateComponent.mockRejectedValue(error); + await syncComponent(TEST_TOKEN, compassYaml, TEST_FILE_NAME, event, event.project.default_branch); + + expect(reportSyncError).toBeCalledWith(error, expect.anything(), expect.anything()); + }); +}); diff --git a/src/services/sync-component-with-file/sync-component.ts b/src/services/sync-component-with-file/sync-component.ts new file mode 100644 index 0000000..47fb8fd --- /dev/null +++ b/src/services/sync-component-with-file/sync-component.ts @@ -0,0 +1,100 @@ +import { Component, CreateLinkInput } from '@atlassian/forge-graphql'; +import { CompassYaml, PushEvent } from '../../types'; +import { reportSyncError } from './report-sync-error'; +import { transformRelationshipsFromYamlConfig, transformFieldsFromYamlConfig } from './yaml-config-transforms'; +import validateConfigFile from './validate-config-file'; +import { EXTERNAL_SOURCE, IMPORT_LABEL } from '../../constants'; +import { createExternalAlias, getComponent, updateComponent } from '../../client/compass'; +import { appendLink } from '../../utils/append-link'; +import { getProjectLabels } from '../get-labels'; +import { getProjectById } from '../../client/gitlab'; + +const getFileUrl = (filePath: string, event: PushEvent, branchName: string) => { + return `${event.project.web_url}/blob/${branchName}/${filePath}`; +}; + +const shouldCreateExternalAlias = (projectId: string, component?: Component): boolean => { + if (component) { + const isAliasAlreadyExisted = component.externalAliases + ? component.externalAliases.some( + (alias) => alias.externalAliasId === projectId && alias.externalSource === EXTERNAL_SOURCE, + ) + : false; + + return !component.dataManager && !isAliasAlreadyExisted; + } + + return false; +}; + +export const syncComponent = async ( + token: string, + componentYaml: CompassYaml, + filePath: string, + event: PushEvent, + trackingBranch: string, +): Promise => { + const startTime = Date.now(); + const externalSourceURL = getFileUrl(filePath, event, trackingBranch); + const { name, id: componentId, fields, description, ownerId, links, relationships } = componentYaml; + let currentComponent: Component | null; + + console.log({ message: 'Syncing component with file', filePath, componentId }); + + try { + currentComponent = componentId ? await getComponent(componentId) : null; + + if (shouldCreateExternalAlias(event.project.id.toString(), currentComponent)) { + await createExternalAlias({ + componentId, + externalAlias: { + externalId: event.project.id.toString(), + externalSource: EXTERNAL_SOURCE, + }, + }); + } + + validateConfigFile(componentYaml, currentComponent); + + const { topics } = await getProjectById(token, event.project.id); + const projectLabels = await getProjectLabels(event.project.id, token, topics); + + const formattedLabels = projectLabels.map((label) => label.split(' ').join('-').toLowerCase()); + + const labels = currentComponent?.labels + ? [...currentComponent.labels, IMPORT_LABEL, ...formattedLabels] + : [IMPORT_LABEL, ...formattedLabels]; + + await updateComponent({ + id: componentId, + name, + fields: transformFieldsFromYamlConfig(fields), + description: description || null, + ownerId: ownerId || null, + links: appendLink(event.project.web_url, links) as CreateLinkInput[], + relationships: transformRelationshipsFromYamlConfig(relationships), + dataManager: { + externalSourceURL, + }, + labels, + currentComponent, + }); + } catch (error) { + console.warn({ + message: 'syncComponentWithFile failed at the 2nd stage', + error, + duration: Date.now() - startTime, + }); + + if (currentComponent) { + await reportSyncError(error, componentId, externalSourceURL); + } + + return; + } + + console.log({ + message: 'syncComponentWithFile completed', + duration: Date.now() - startTime, + }); +}; diff --git a/src/services/sync-component-with-file/unlink-component.test.ts b/src/services/sync-component-with-file/unlink-component.test.ts new file mode 100644 index 0000000..6ad5b46 --- /dev/null +++ b/src/services/sync-component-with-file/unlink-component.test.ts @@ -0,0 +1,61 @@ +/* eslint-disable import/first */ + +import { mockAgg, mockDeleteExternalAlias, mockDetachDataManager } from '../../__tests__/helpers/mock-agg'; + +mockAgg(); + +import { unlinkComponent } from './unlink-component'; +import { CompassYaml } from '../../types'; + +describe('Unlink component from file', () => { + const mockCompassYaml: CompassYaml = { + id: 'abc123', + }; + const mockProjId = 'mockProjId-1'; + + beforeEach(() => { + jest.clearAllMocks(); + }); + + test('detaches data manager and deletes external alias when config removed', async () => { + mockDetachDataManager.mockResolvedValueOnce({ + success: true, + errors: [], + }); + mockDeleteExternalAlias.mockResolvedValueOnce({ + success: true, + errors: [], + }); + await unlinkComponent(mockCompassYaml.id, mockProjId); + expect(mockDetachDataManager).toBeCalled(); + expect(mockDeleteExternalAlias).toBeCalled(); + }); + + test('throws error if detachDataManager fails', async () => { + mockDetachDataManager.mockResolvedValueOnce({ + success: false, + errors: [{ message: 'testError' }], + }); + mockDeleteExternalAlias.mockResolvedValueOnce({ + success: true, + errors: [], + }); + await expect(unlinkComponent(mockCompassYaml.id, mockProjId)).rejects.toThrow( + new Error('Error unlinking component: testError'), + ); + }); + + test('throws error if deleteExternalAlias fails', async () => { + mockDetachDataManager.mockResolvedValueOnce({ + success: false, + errors: [], + }); + mockDeleteExternalAlias.mockResolvedValueOnce({ + success: true, + errors: [{ message: 'testError' }], + }); + await expect(unlinkComponent(mockCompassYaml.id, mockProjId)).rejects.toThrow( + new Error('Error unlinking component: testError'), + ); + }); +}); diff --git a/src/services/sync-component-with-file/unlink-component.ts b/src/services/sync-component-with-file/unlink-component.ts new file mode 100644 index 0000000..15c72ae --- /dev/null +++ b/src/services/sync-component-with-file/unlink-component.ts @@ -0,0 +1,39 @@ +import { EXTERNAL_SOURCE } from '../../constants'; +import { deleteExternalAlias, detachDataManager } from '../../client/compass'; +import { GraphqlGatewayError } from '../../models/errors'; + +export const unlinkComponent = async (componentId: string, repoId: string): Promise => { + console.log(`Unlinking component ${componentId}`); + + let errors: string[] = []; + const startTimeDetachFile = Date.now(); + try { + await detachDataManager({ componentId }); + } catch (error) { + errors = + error instanceof GraphqlGatewayError + ? errors.concat(error.errors.map((e) => e.message)) + : errors.concat([error.message]); + } + + try { + await deleteExternalAlias({ + componentId, + externalAlias: { + externalId: repoId, + externalSource: EXTERNAL_SOURCE, + }, + }); + } catch (error) { + errors = + error instanceof GraphqlGatewayError + ? errors.concat(error.errors.map((e) => e.message)) + : errors.concat([error.message]); + } + + console.debug(`unlinkComponentFromFile took ${Date.now() - startTimeDetachFile} ms`); + if (errors.length === 0) { + return; + } + throw new Error(`Error unlinking component: ${errors.join(', ')}`); +}; diff --git a/src/services/sync-component-with-file/validate-config-file/config-file-parser.test.ts b/src/services/sync-component-with-file/validate-config-file/config-file-parser.test.ts new file mode 100644 index 0000000..c736663 --- /dev/null +++ b/src/services/sync-component-with-file/validate-config-file/config-file-parser.test.ts @@ -0,0 +1,739 @@ +/* eslint-disable import/first */ +import { mockForgeApi } from '../../../__tests__/helpers/forge-helper'; + +mockForgeApi(); +// eslint-disable-next-line import/order +import { CompassComponentType, CompassRelationshipType } from '@atlassian/forge-graphql'; +import { YamlFields, YamlLink, YamlRelationships } from '../../../types'; +import { types, relationshipKeyTypes } from '../../../models/expected-compass-types'; + +import ConfigFileParser from './config-file-parser'; +import { MAX_DESCRIPTION_LENGTH, MAX_NAME_LENGTH } from '../../../constants'; +import { TEST_COMPONENT_ID } from '../../../__tests__/fixtures/gitlab-data'; + +const BASE_CONFIG = { + id: 'ari:cloud:compass:122345:component/12345/12345', + name: 'Hello world', +}; + +const MOCK_OWNER_ID = 'ari:cloud:teams::team/12345'; + +describe('ConfigFileParser', () => { + let serviceConfigFileParser: ConfigFileParser; + let configFileParser: ConfigFileParser; + + beforeEach(() => { + serviceConfigFileParser = new ConfigFileParser(CompassComponentType.Service); + configFileParser = new ConfigFileParser(CompassComponentType.Other); + }); + + describe('Fields validators', () => { + test('does not add error when non-service component has null fields', () => { + const config = { + ...BASE_CONFIG, + fields: null as YamlFields, + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual([]); + }); + + test('does not add error when non-service component has empty fields', () => { + const config = { + ...BASE_CONFIG, + fields: {}, + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual([]); + }); + + test('does not add error when non-service component is missing fields', () => { + configFileParser.validateConfig(BASE_CONFIG); + expect(configFileParser.errors).toEqual([]); + }); + + test('adds error when non-service component defines a field', () => { + const config = { + ...BASE_CONFIG, + fields: { + tier: 3, + }, + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual(['"tier" is not a valid property']); + }); + + test('adds error if service component has no fields', () => { + serviceConfigFileParser.validateConfig(BASE_CONFIG); + expect(serviceConfigFileParser.errors).toEqual([ + '"fields" must be included in the configuration file', + 'the "fields" property in the configuration file must include "tier"', + ]); + }); + + test('add no errors if service component has fields', () => { + const config = { + ...BASE_CONFIG, + fields: { + tier: '4', + }, + }; + + serviceConfigFileParser.validateConfig(config); + expect(serviceConfigFileParser.errors).toEqual([]); + }); + + test("does not add error if field value isn't a string", () => { + const fields = { + tier: 4, + }; + + serviceConfigFileParser.validateFieldProperties(fields); + expect(serviceConfigFileParser.errors).toEqual([]); + }); + + test('adds error if field key is unrecognized', () => { + const fields = { + someKey: 4, + }; + + configFileParser.validateFieldProperties(fields); + expect(configFileParser.errors).toEqual(['"someKey" is not a valid property']); + }); + + test('truncates fields key if over string limit', () => { + const fields = { + Loremipsumdolorsitametconsectetur: 2, + }; + + configFileParser.validateFieldProperties(fields); + expect(configFileParser.errors).toEqual(['"Loremipsumdolorsitametcon" is not a valid property']); + }); + + test("adds error when tier value isn't recognized", () => { + const fields = { + tier: 0, + }; + + serviceConfigFileParser.validateFieldProperties(fields); + expect(serviceConfigFileParser.errors).toEqual(['"tier" must have a value of: 1, 2, 3, 4']); + }); + + test('adds error when component is service and fields is empty', () => { + const fields = {}; + + serviceConfigFileParser.validateFieldProperties(fields); + expect(serviceConfigFileParser.errors).toEqual([ + 'the "fields" property in the configuration file must include "tier"', + ]); + }); + + test('adds error if field key is unrecognized in service component', () => { + const fields = { + someKey: 4, + }; + + serviceConfigFileParser.validateFieldProperties(fields); + expect(serviceConfigFileParser.errors).toEqual([ + 'the "fields" property in the configuration file must include "tier"', + '"someKey" must be one of the following keys: tier', + ]); + }); + + test('adds error if field key is unrecognized in service component', () => { + const fields = { + someKey: 4, + }; + + serviceConfigFileParser.validateFieldProperties(fields); + expect(serviceConfigFileParser.errors).toEqual([ + 'the "fields" property in the configuration file must include "tier"', + '"someKey" must be one of the following keys: tier', + ]); + }); + }); + + describe('validValueType', () => { + test('does not add error when optional property is set to null', () => { + const expectedType = types.OPTIONAL_STRING; + + configFileParser.validValueType(null, expectedType, 'name'); + expect(configFileParser.errors).toEqual([]); + }); + + test('does not add error when optional property is set to value with string type', () => { + const expectedType = types.OPTIONAL_STRING; + + configFileParser.validValueType('hello world', expectedType, 'name'); + expect(configFileParser.errors).toEqual([]); + }); + + test('adds error when required property is set to null', () => { + const expectedType = types.REQUIRED_STRING; + + configFileParser.validValueType(null, expectedType, 'name'); + expect(configFileParser.errors).toEqual(['"name" must be of type "string"']); + }); + + test('does not add error when required string is set to value with string type', () => { + const expectedType = types.REQUIRED_STRING; + + configFileParser.validValueType('hello world', expectedType, 'name'); + expect(configFileParser.errors).toEqual([]); + }); + + test('adds error when links is not an array', () => { + const config = { + ...BASE_CONFIG, + links: {}, + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual(['"links" must be of type "array"']); + }); + }); + + describe('checkForMandatoryKeys', () => { + test('adds error if key is mandatory', () => { + const actualKeys = ['name']; + const expectedObject = { + id: types.REQUIRED_STRING, + }; + + configFileParser.checkForMandatoryKeys(actualKeys, expectedObject); + expect(configFileParser.errors).toEqual(['"id" must be included in the configuration file']); + }); + + test('does not add error if key not is mandatory', () => { + const actualKeys = ['name']; + const expectedObject = { + id: types.OPTIONAL_STRING, + }; + + configFileParser.checkForMandatoryKeys(actualKeys, expectedObject); + expect(configFileParser.errors).toEqual([]); + }); + }); + + describe('checkIfKeyIsUnknown', () => { + test('adds error if key is not expected', () => { + const key = 'foo'; + const expectedKeys = ['id', 'description']; + + configFileParser.checkIfKeyIsUnknown(key, expectedKeys); + expect(configFileParser.errors).toEqual(['"foo" is not a valid property']); + }); + + test('does not add error if key is expected', () => { + const key = 'id'; + const expectedKeys = ['id', 'description']; + + configFileParser.checkIfKeyIsUnknown(key, expectedKeys); + expect(configFileParser.errors).toEqual([]); + }); + }); + + describe('validateLinkProperties', () => { + test('adds error if link entry is missing mandatory url', () => { + const links = [ + { + type: 'DOCUMENT', + }, + ]; + + configFileParser.validateLinkProperties(links); + expect(configFileParser.errors).toEqual(['the "links" property in the configuration file must include "url"']); + }); + + test('adds error if link entry is missing mandatory type', () => { + const links = [ + { + url: 'https://atlassian.com', + }, + ]; + + configFileParser.validateLinkProperties(links); + expect(configFileParser.errors).toEqual(['the "links" property in the configuration file must include "type"']); + }); + + test('adds error if link entry is missing mandatory url and type', () => { + const links = [ + { + name: 'Test link', + }, + ]; + + configFileParser.validateLinkProperties(links); + expect(configFileParser.errors).toEqual([ + 'the "links" property in the configuration file must include "type" and "url"', + ]); + }); + + test('does not add error when all mandatory keys are defined', () => { + const links = [ + { + type: 'DOCUMENT', + url: 'https://atlassian.com', + }, + ]; + + configFileParser.validateLinkProperties(links); + expect(configFileParser.errors).toEqual([]); + }); + + test('does not add error when all known keys are defined', () => { + const links = [ + { + type: 'DOCUMENT', + url: 'https://atlassian.com', + name: 'Hello world', + }, + ]; + + configFileParser.validateLinkProperties(links); + expect(configFileParser.errors).toEqual([]); + }); + + test('adds error when unexpected key is added to entry', () => { + const links = [ + { + type: 'DOCUMENT', + url: 'https://atlassian.com', + unknownKey: 1, + }, + ]; + + configFileParser.validateLinkProperties(links); + expect(configFileParser.errors).toEqual(['"unknownKey" must be one of the following keys: type, url, name']); + }); + + describe('validate property types', () => { + test('adds error when type is not formatted correctly', () => { + const links = [ + { + type: 1, + url: 'https://atlassian.com', + }, + ]; + + configFileParser.validateLinkProperties(links); + expect(configFileParser.errors).toEqual([ + '"1" is not a valid link type. The accepted values are: ' + + 'DOCUMENT, CHAT_CHANNEL, REPOSITORY, PROJECT, DASHBOARD, OTHER_LINK', + ]); + }); + + test('adds error when type is invalid', () => { + const links = [ + { + type: 'UNKNOWN_TYPE', + url: 'https://atlassian.com', + }, + ]; + + configFileParser.validateLinkProperties(links); + expect(configFileParser.errors).toEqual([ + '"UNKNOWN_TYPE" is not a valid link type. The accepted values are: ' + + 'DOCUMENT, CHAT_CHANNEL, REPOSITORY, PROJECT, DASHBOARD, OTHER_LINK', + ]); + }); + + test('truncates type when over string limit', () => { + const links = [ + { + type: 'Loremipsumdolorsitametconsectetur', + url: 'https://atlassian.com', + }, + ]; + + configFileParser.validateLinkProperties(links); + expect(configFileParser.errors).toEqual([ + '"Loremipsumdolorsitametcon" is not a valid link type. The accepted values are: ' + + 'DOCUMENT, CHAT_CHANNEL, REPOSITORY, PROJECT, DASHBOARD, OTHER_LINK', + ]); + }); + + test('adds error when url is not formatted correctly', () => { + const links = [ + { + type: 'DOCUMENT', + url: {}, + }, + ]; + + configFileParser.validateLinkProperties(links); + expect(configFileParser.errors).toEqual(['"url" must be of type "string"']); + }); + + test('adds error when name is not formatted correctly', () => { + const links = [ + { + type: 'DOCUMENT', + url: 'https://atlassian.com', + name: 1, + }, + ]; + + configFileParser.validateLinkProperties(links); + expect(configFileParser.errors).toEqual(['"name" must be of type "string"']); + }); + + test('does not add error when type is ON_CALL and component is a service', () => { + const links = [ + { + type: 'ON_CALL', + url: 'https://atlassian.com', + }, + ]; + + serviceConfigFileParser.validateLinkProperties(links); + expect(serviceConfigFileParser.errors).toEqual([]); + }); + + test('adds error when type is ON_CALL and component is not a service', () => { + const links = [ + { + type: 'ON_CALL', + url: 'https://atlassian.com', + }, + ]; + + configFileParser.validateLinkProperties(links); + expect(configFileParser.errors).toEqual([ + '"ON_CALL" is not a valid link type. The accepted values are: ' + + 'DOCUMENT, CHAT_CHANNEL, REPOSITORY, PROJECT, DASHBOARD, OTHER_LINK', + ]); + }); + }); + }); + + describe('validate relationship properties', () => { + test('does not add error if relationships is missing DEPENDS_ON', () => { + const relationships = {}; + + configFileParser.validateTopLevelProperties(relationships, relationshipKeyTypes); + expect(configFileParser.errors).toEqual([]); + }); + + test('does not add error if DEPENDS_ON exists', () => { + const relationships = { + DEPENDS_ON: [], + } as YamlRelationships; + + configFileParser.validateTopLevelProperties(relationships, relationshipKeyTypes); + expect(configFileParser.errors).toEqual([]); + }); + + test('adds error when unexpected key is added to relationships object', () => { + const relationships = { + DEPENDS_ON: [], + unknownKey: {}, + } as YamlRelationships; + + configFileParser.validateTopLevelProperties(relationships, relationshipKeyTypes); + expect(configFileParser.errors).toEqual(['"unknownKey" is not a valid property']); + }); + + test('adds error if DEPENDS_ON value is not an object', () => { + const relationships = { + DEPENDS_ON: 1, + }; + + configFileParser.validateTopLevelProperties(relationships, relationshipKeyTypes); + expect(configFileParser.errors).toEqual(['"DEPENDS_ON" must be of type "object"']); + }); + }); + + describe('validateRelationshipsArray', () => { + test('adds error if DEPENDS_ON value is not an object ', () => { + configFileParser.validateRelationshipsArray(1 as unknown as string[], CompassRelationshipType.DependsOn); + expect(configFileParser.errors).toEqual([]); + }); + + test("adds error if elements in DEPENDS_ON array aren't ARIs ", () => { + const endNodes = ['string:that:is:not:an:ari']; + + configFileParser.validateRelationshipsArray(endNodes, CompassRelationshipType.DependsOn); + expect(configFileParser.errors).toEqual(['"DEPENDS_ON elements" must be of type "ARI"']); + }); + + test('does not add error if elements in DEPENDS_ON array are ARIs ', () => { + const endNodes = [TEST_COMPONENT_ID]; + + configFileParser.validateRelationshipsArray(endNodes, CompassRelationshipType.DependsOn); + expect(configFileParser.errors).toEqual([]); + }); + + test('does not add error if DEPENDS_ON is null', () => { + configFileParser.validateRelationshipsArray(null, CompassRelationshipType.DependsOn); + expect(configFileParser.errors).toEqual([]); + }); + + test('does not add error if DEPENDS_ON is empty', () => { + configFileParser.validateRelationshipsArray([], CompassRelationshipType.DependsOn); + expect(configFileParser.errors).toEqual([]); + }); + }); + + describe('validateConfig', () => { + test('adds error when id is missing', () => { + const config = { + name: 'hello', + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual(['"id" must be included in the configuration file']); + }); + + test('adds error when name is missing', () => { + const config = { + id: BASE_CONFIG.id, + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual(['"name" must be included in the configuration file']); + }); + + test('adds multiple errors when there are multiple formatting problems', () => { + const config = { + name: 'hello', + }; + + serviceConfigFileParser.validateConfig(config); + expect(serviceConfigFileParser.errors).toEqual([ + '"id" and "fields" must be included in the configuration file', + 'the "fields" property in the configuration file must include "tier"', + ]); + }); + + test('adds error when unexpected key is added to config file', () => { + const config = { + ...BASE_CONFIG, + unknownKey: '', + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual(['"unknownKey" is not a valid property']); + }); + + test('does not add error when all mandatory properties exist', () => { + configFileParser.validateConfig(BASE_CONFIG); + expect(configFileParser.errors).toEqual([]); + }); + + describe('validate property types', () => { + test('adds error if id value is not an ari', () => { + const config = { + id: 'somestring', + name: 'Hello world', + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual(['"id" must be of type "ARI"']); + }); + + test('adds error if name value is not a string', () => { + const config = { + ...BASE_CONFIG, + name: {}, + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual(['"name" must be of type "string"']); + }); + + test('does not add error if name value is max length', () => { + const config = { + ...BASE_CONFIG, + name: 'a'.repeat(MAX_NAME_LENGTH), + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual([]); + }); + + test('adds error if name value is too long', () => { + const config = { + ...BASE_CONFIG, + name: 'a'.repeat(MAX_NAME_LENGTH + 1), + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual([ + `"name" field is too long. Try again with a value no longer than ${MAX_NAME_LENGTH} characters.`, + ]); + }); + + test('adds error if name value is blank', () => { + const config = { + ...BASE_CONFIG, + name: ' ', + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual(['"name" cannot be empty string.']); + }); + + test('adds error if description value is not a string', () => { + const config = { + ...BASE_CONFIG, + description: 1, + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual(['"description" must be of type "string"']); + }); + + test('does not add error if description value is max length', () => { + const config = { + ...BASE_CONFIG, + description: 'a'.repeat(MAX_DESCRIPTION_LENGTH), + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual([]); + }); + + test('adds error if description value is too long', () => { + const config = { + ...BASE_CONFIG, + description: 'a'.repeat(MAX_DESCRIPTION_LENGTH + 1), + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual([ + `"description" field is too long. Try again with a value no longer than ${MAX_DESCRIPTION_LENGTH} characters.`, + ]); + }); + + test('adds error if ownerId value is not a string', () => { + const config = { + ...BASE_CONFIG, + ownerId: {}, + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual(['"ownerId" must be of type "string"']); + }); + + test('adds error if fields value is not an object', () => { + const config = { + ...BASE_CONFIG, + fields: 'invalid field string', + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual(['"fields" must be of type "object"']); + }); + + test('adds error if fields value is not an object and does not include tier', () => { + const config = { + ...BASE_CONFIG, + fields: 'invalid field string', + }; + + serviceConfigFileParser.validateConfig(config); + expect(serviceConfigFileParser.errors).toEqual([ + '"fields" must be of type "object"', + 'the "fields" property in the configuration file must include "tier"', + ]); + }); + + test('adds error if fields value is an array', () => { + const config = { + ...BASE_CONFIG, + fields: ['tier'], + }; + + serviceConfigFileParser.validateConfig(config); + expect(serviceConfigFileParser.errors).toEqual([ + '"fields" must be of type "object"', + 'the "fields" property in the configuration file must include "tier"', + ]); + }); + + test('adds error if links value is not an object', () => { + const config = { + ...BASE_CONFIG, + links: 1, + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual(['"links" must be of type "object"']); + }); + + test('does not add error if all types are valid', () => { + const config = { + ...BASE_CONFIG, + description: 'Test component', + ownerId: MOCK_OWNER_ID, + fields: [] as YamlFields, + links: [] as YamlLink[], + relationships: [] as YamlRelationships, + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual([]); + }); + + test('does not add error if optional property is null', () => { + const config = { + ...BASE_CONFIG, + links: null as YamlLink, + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual([]); + }); + + test('adds error if relationships value is not an object', () => { + const config = { + ...BASE_CONFIG, + relationships: 'invalid relationships string', + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual(['"relationships" must be of type "object"']); + }); + + test('adds error if relationships value is an array', () => { + const config = { + ...BASE_CONFIG, + relationships: ['test'], + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual(['"relationships" must be of type "object"']); + }); + + test('does not add error if all types are valid', () => { + const config: any = { + ...BASE_CONFIG, + description: 'Test component', + ownerId: MOCK_OWNER_ID, + fields: [], + links: [], + relationships: [], + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual([]); + }); + + test('does not add error if optional property is null', () => { + const config: any = { + ...BASE_CONFIG, + description: 'Test component', + fields: null, + links: [], + relationships: [], + }; + + configFileParser.validateConfig(config); + expect(configFileParser.errors).toEqual([]); + }); + }); + }); +}); diff --git a/src/services/sync-component-with-file/validate-config-file/config-file-parser.ts b/src/services/sync-component-with-file/validate-config-file/config-file-parser.ts new file mode 100644 index 0000000..2b438cc --- /dev/null +++ b/src/services/sync-component-with-file/validate-config-file/config-file-parser.ts @@ -0,0 +1,269 @@ +import { CompassComponentType } from '@atlassian/forge-graphql'; +import { + emptyStringErrorMessage, + invalidFieldTypeErrorMessage, + invalidKeyErrorMessage, + invalidLinkTypeErrorMessage, + invalidValueTypeErrorMessage, + maxValueLengthErrorMessage, + missingKeyErrorMessage, + missingNestedKeyErrorMessage, +} from '../../../models/error-messages'; + +import { + configKeyTypes, + fieldKeyTypes, + isRequired, + linkKeyTypes, + parseType, + relationshipKeyTypes, + serviceConfigKeyTypes, + serviceFieldKeyTypes, + types, + validLinkTypes, + validServiceLinkTypes, + validTierValues, +} from '../../../models/expected-compass-types'; +import { MAX_DESCRIPTION_LENGTH, MAX_NAME_LENGTH } from '../../../constants'; +import { parse } from '../../../utils/parse-ari'; + +const unwrapPropertyKeys = (object: any, expectedObject: any): any => ({ + actualKeys: Object.keys(object), + expectedKeys: Object.keys(expectedObject), +}); + +const isArray = (actualKeys: Array): boolean => actualKeys.length > 0 && actualKeys[0] === '0'; +export default class ConfigFileParser { + public errors: Array; + + private type: CompassComponentType; + + constructor(compassComponentType: CompassComponentType) { + this.errors = []; + this.type = compassComponentType; + } + + validateConfig(config: any): void { + const expectedObjectTypes = this.type === CompassComponentType.Service ? serviceConfigKeyTypes : configKeyTypes; + this.validateTopLevelProperties(config, expectedObjectTypes); + + const validFields = config.fields && this.validValueType(config.fields, expectedObjectTypes.fields, 'fields'); + if (validFields) { + this.validateFieldProperties(config.fields); + } else if (this.type === CompassComponentType.Service) { + this.addError(missingNestedKeyErrorMessage(['tier'], 'fields')); + } + const validLinks = config.links && this.validValueType(config.links, expectedObjectTypes.links, 'links'); + if (validLinks) { + this.validateLinkProperties(config.links); + } + const validRelationships = + config.relationships && + this.validValueType(config.relationships, expectedObjectTypes.relationships, 'relationships'); + if (validRelationships) { + this.validateRelationshipProperties(config.relationships); + } + } + + validateTopLevelProperties(object: any, expectedObject: any): void { + const { actualKeys, expectedKeys } = unwrapPropertyKeys(object, expectedObject); + + this.checkForMandatoryKeys(actualKeys, expectedObject); + + for (const key of actualKeys) { + this.checkIfKeyIsUnknown(key, expectedKeys); + if (!['fields', 'links', 'relationships'].includes(key)) { + this.validValueType(object[key], expectedObject[key], key); + } + } + } + + validateFieldProperties(fields: any): void { + const expectedObject = this.type === CompassComponentType.Service ? serviceFieldKeyTypes : fieldKeyTypes; + const { actualKeys, expectedKeys } = unwrapPropertyKeys(fields, expectedObject); + + if (isArray(actualKeys)) { + this.addError(invalidValueTypeErrorMessage('fields', 'object')); + if (this.type === CompassComponentType.Service) { + this.addError(missingNestedKeyErrorMessage(['tier'], 'fields')); + } + return; + } + + this.checkForMandatoryKeys(actualKeys, expectedObject, 'fields'); + this.checkForUnknownKeys(actualKeys, expectedKeys); + + for (const [key, value] of Object.entries(fields)) { + this.checkFields(key, value); + } + } + + validateLinkProperties(links: any): void { + if (links == null) { + return; + } + + if (!Array.isArray(links)) { + this.addError(invalidValueTypeErrorMessage('links', 'array')); + return; + } + + for (const link of links) { + this.checkLinkType(link.type); + + const actualKeys = Object.keys(link); + this.checkForMandatoryKeys(actualKeys, linkKeyTypes, 'links'); + this.checkForUnknownLinkKeys(actualKeys, link); + } + } + + validateRelationshipProperties(relationships: any): void { + const { actualKeys, expectedKeys } = unwrapPropertyKeys(relationships, relationshipKeyTypes); + + if (isArray(actualKeys)) { + this.addError(invalidValueTypeErrorMessage('relationships', 'object')); + return; + } + + this.checkForMandatoryKeys(actualKeys, expectedKeys, 'relationships'); + this.checkForUnknownKeys(actualKeys, expectedKeys); + + for (const key of Object.keys(relationships)) { + // Check that the relationship type is valid ie. DEPENDS_ON + if (Object.keys(relationshipKeyTypes).includes(key)) { + // Validate the array of ARIs + const validRelationshipsArray = !this.validValueType( + relationships[key], + (relationshipKeyTypes as any)[key], + key, + ); + if (validRelationshipsArray) { + this.validateRelationshipsArray(relationships, key); + } + } + } + } + + validateRelationshipsArray(endNodes: Array, relationshipType: string): void { + if (typeof endNodes !== 'object' || endNodes == null) { + return; + } + + endNodes.forEach((componentId) => { + this.validValueType(componentId, types.REQUIRED_ARI, `${relationshipType} elements`); + }); + } + + // CHECKS + checkForMandatoryKeys(actualKeys: Array, expectedObject: any, topLevelProperty?: string): void { + // Check if there are keys that are required to exist in config file but do not + const expectedKeys = Object.keys(expectedObject); + const mandatoryKeys = expectedKeys.filter((key) => isRequired(expectedObject[key])); + const missingKeys = []; + for (const key of mandatoryKeys) { + if (!actualKeys.includes(key)) { + missingKeys.push(key); + } + } + if (missingKeys.length > 0) { + const errorMessage = topLevelProperty + ? missingNestedKeyErrorMessage(missingKeys, topLevelProperty) + : missingKeyErrorMessage(missingKeys); + this.addError(errorMessage); + } + } + + checkForUnknownKeys(actualKeys: Array, expectedKeys: Array): void { + for (const key of actualKeys) { + this.checkIfKeyIsUnknown(key, expectedKeys, true); + } + } + + checkForUnknownLinkKeys(actualKeys: Array, link: any): void { + for (const key of actualKeys) { + this.checkIfKeyIsUnknown(key, Object.keys(linkKeyTypes), true); + if (key !== 'type') { + this.validValueType(link[key], (linkKeyTypes as any)[key], key); + } + } + } + + checkIfKeyIsUnknown(key: string, expectedKeys: Array, nested = false): void { + // Check if there are extra keys not defined in config file + if (!expectedKeys.includes(key)) { + const errorMessage = nested ? invalidKeyErrorMessage(key, expectedKeys) : invalidKeyErrorMessage(key); + this.addError(errorMessage); + } + } + + validValueType(value: any, expectedType: string, key: string): boolean { + let isValid = true; + // checkIfKeyIsUnknown will catch this + if (expectedType === undefined) { + return isValid; + } + + if (!isRequired(expectedType) && value == null) { + return isValid; + } + + if (expectedType === types.REQUIRED_ARI) { + try { + const { resourceId } = parse(value); + const ids = resourceId.split('/'); + if (ids.length !== 2) { + isValid = false; + this.addError(invalidValueTypeErrorMessage(key, 'ARI')); + } + } catch (e) { + isValid = false; + this.addError(invalidValueTypeErrorMessage(key, 'ARI')); + } + return isValid; + } + + const parsedExpectedTypes = parseType(expectedType).split('|'); + if (!parsedExpectedTypes.some((parsedExpectedType) => typeof value === parsedExpectedType)) { + isValid = false; + this.addError(invalidValueTypeErrorMessage(key, parsedExpectedTypes.join(', '))); + } + + if (isValid && key === 'name') { + if ((value as string).length > MAX_NAME_LENGTH) { + isValid = false; + this.addError(maxValueLengthErrorMessage(key, MAX_NAME_LENGTH)); + } + if ((value as string).trim().length === 0) { + isValid = false; + this.addError(emptyStringErrorMessage(key)); + } + } + + if (isValid && key === 'description' && (value as string).length > MAX_DESCRIPTION_LENGTH) { + isValid = false; + this.addError(maxValueLengthErrorMessage(key, MAX_DESCRIPTION_LENGTH)); + } + + return isValid; + } + + checkFields(key: string, value: any): void { + if (key === 'tier' && !validTierValues.includes(value.toString())) { + this.addError(invalidFieldTypeErrorMessage('tier', validTierValues)); + } + } + + checkLinkType(type: string): void { + if (type == null) { + return; + } + const linkTypes = this.type === CompassComponentType.Service ? validServiceLinkTypes : validLinkTypes; + if (typeof type !== 'string' || !linkTypes.includes(type.toUpperCase())) { + this.addError(invalidLinkTypeErrorMessage(type, linkTypes)); + } + } + + addError(message: string): void { + this.errors.push(message); + } +} diff --git a/src/services/sync-component-with-file/validate-config-file/index.ts b/src/services/sync-component-with-file/validate-config-file/index.ts new file mode 100644 index 0000000..5125d18 --- /dev/null +++ b/src/services/sync-component-with-file/validate-config-file/index.ts @@ -0,0 +1,20 @@ +import { Component } from '@atlassian/forge-graphql'; + +import { InvalidConfigFileError } from '../../../models/errors'; +import { CompassYaml } from '../../../types'; +import ConfigFileParser from './config-file-parser'; + +export default function validateConfigFile(file: CompassYaml, currentComponent?: Component): void { + let validationErrors: Array = []; + + if (!currentComponent) { + validationErrors.push(`Component with id ${file?.id} not found`); + } + + const configFileParser = new ConfigFileParser(currentComponent.type); + configFileParser.validateConfig(file); + validationErrors = validationErrors.concat(configFileParser.errors); + if (validationErrors.length > 0) { + throw new InvalidConfigFileError(validationErrors); + } +} diff --git a/src/services/sync-component-with-file/yaml-config-transforms.ts b/src/services/sync-component-with-file/yaml-config-transforms.ts new file mode 100644 index 0000000..d8c39f9 --- /dev/null +++ b/src/services/sync-component-with-file/yaml-config-transforms.ts @@ -0,0 +1,32 @@ +import { YamlFields, YamlRelationships } from '../../types'; + +export function transformRelationshipsFromYamlConfig(relationships: YamlRelationships): any[] { + const transformedRelationships: any[] = []; + if (!relationships) { + return transformedRelationships; + } + + for (const relationshipType of Object.keys(relationships)) { + transformedRelationships.push( + (relationships as any)[relationshipType].map((nodeId: any) => ({ + nodeId, + type: relationshipType, + })), + ); + } + return transformedRelationships.flat(); +} + +export function transformFieldsFromYamlConfig(fields: YamlFields): Record> { + if (!fields || Object.keys(fields).length === 0) { + return null; + } + + const outputFields: Record> = {}; + for (const [k, v] of Object.entries(fields)) { + if (v != null) { + outputFields[k] = Array.isArray(v) ? v.map(toString) : [v.toString()]; + } + } + return outputFields; +} diff --git a/src/services/webhook.test.ts b/src/services/webhook.test.ts new file mode 100644 index 0000000..5f7c564 --- /dev/null +++ b/src/services/webhook.test.ts @@ -0,0 +1,63 @@ +/* eslint-disable import/first */ +import { mocked } from 'jest-mock'; +import { storage, mockForgeApi, webTrigger } from '../__tests__/helpers/forge-helper'; + +mockForgeApi(); + +import { getGroupWebhook, registerGroupWebhook } from '../client/gitlab'; +import { setupWebhook } from './webhooks'; +import { TEST_TOKEN } from '../__tests__/fixtures/gitlab-data'; + +jest.mock('../client/gitlab'); +const mockGetGroupWebhook = mocked(getGroupWebhook); +const mockRegisterGroupWebhook = mocked(registerGroupWebhook); + +const MOCK_GROUP_ID = 123; +const MOCK_WEBHOOK_KEY = `webhook-id-${MOCK_GROUP_ID}`; +const MOCK_WEBHOOK_SIGNATURE_KEY = `webhook-sign-id-${MOCK_GROUP_ID}`; +const MOCK_WEBHOOK_ID = 345; + +describe('webhook service', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('returns existing webhook from storage', async () => { + storage.get = jest.fn().mockReturnValueOnce(MOCK_WEBHOOK_ID); + storage.getSecret = jest.fn().mockReturnValueOnce(TEST_TOKEN); + mockGetGroupWebhook.mockResolvedValue({ id: 456 }); + + const result = await setupWebhook(123); + + expect(storage.set).not.toHaveBeenCalled(); + expect(result).toBe(MOCK_WEBHOOK_ID); + }); + + it('setups new webhook', async () => { + storage.get = jest.fn().mockReturnValueOnce(undefined); + storage.getSecret = jest.fn().mockReturnValueOnce(TEST_TOKEN); + webTrigger.getUrl = jest.fn().mockReturnValue('https://example.com'); + mockRegisterGroupWebhook.mockResolvedValue(MOCK_WEBHOOK_ID); + + const result = await setupWebhook(MOCK_GROUP_ID); + + expect(mockGetGroupWebhook).not.toHaveBeenCalled(); + expect(storage.set).toHaveBeenNthCalledWith(1, MOCK_WEBHOOK_KEY, MOCK_WEBHOOK_ID); + expect(storage.set).toHaveBeenNthCalledWith(2, MOCK_WEBHOOK_SIGNATURE_KEY, expect.anything()); + expect(result).toBe(MOCK_WEBHOOK_ID); + }); + + it('setups new webhook in case of invalid webhook in storage', async () => { + storage.get = jest.fn().mockReturnValueOnce(MOCK_WEBHOOK_KEY); + storage.getSecret = jest.fn().mockReturnValueOnce(TEST_TOKEN); + mockGetGroupWebhook.mockResolvedValue(null); + webTrigger.getUrl = jest.fn().mockReturnValue('https://example.com'); + mockRegisterGroupWebhook.mockResolvedValue(MOCK_WEBHOOK_ID); + + const result = await setupWebhook(MOCK_GROUP_ID); + + expect(storage.set).toHaveBeenNthCalledWith(1, MOCK_WEBHOOK_KEY, MOCK_WEBHOOK_ID); + expect(storage.set).toHaveBeenNthCalledWith(2, MOCK_WEBHOOK_SIGNATURE_KEY, expect.anything()); + expect(result).toBe(MOCK_WEBHOOK_ID); + }); +}); diff --git a/src/services/webhooks.ts b/src/services/webhooks.ts new file mode 100644 index 0000000..b1d2136 --- /dev/null +++ b/src/services/webhooks.ts @@ -0,0 +1,46 @@ +import { storage, webTrigger } from '@forge/api'; + +import { registerGroupWebhook, deleteGroupWebhook, getGroupWebhook } from '../client/gitlab'; +import { GITLAB_EVENT_WEBTRIGGER, STORAGE_KEYS, STORAGE_SECRETS } from '../constants'; +import { generateSignature } from '../utils/generate-signature-utils'; + +export const setupWebhook = async (groupId: number): Promise => { + const [existingWebhook, groupToken] = await Promise.all([ + storage.get(`${STORAGE_KEYS.WEBHOOK_KEY_PREFIX}${groupId}`), + storage.getSecret(`${STORAGE_SECRETS.GROUP_TOKEN_KEY_PREFIX}${groupId}`), + ]); + + const isWebhookValid = existingWebhook && (await getGroupWebhook(groupId, existingWebhook, groupToken)) !== null; + + if (isWebhookValid) { + return existingWebhook; + } + + const webtriggerURL = await webTrigger.getUrl(GITLAB_EVENT_WEBTRIGGER); + const webtriggerURLWithGroupId = `${webtriggerURL}?groupId=${groupId}`; + const webhookSignature = generateSignature(); + const webhookId = await registerGroupWebhook({ + groupId, + url: webtriggerURLWithGroupId, + token: groupToken, + signature: webhookSignature, + }); + + await Promise.all([ + storage.set(`${STORAGE_KEYS.WEBHOOK_KEY_PREFIX}${groupId}`, webhookId), + storage.set(`${STORAGE_KEYS.WEBHOOK_SIGNATURE_PREFIX}${groupId}`, webhookSignature), + ]); + + return webhookId; +}; + +export const deleteWebhook = async (groupId: number): Promise => { + const [webhookId, groupToken] = await Promise.all([ + storage.get(`${STORAGE_KEYS.WEBHOOK_KEY_PREFIX}${groupId}`), + storage.getSecret(`${STORAGE_SECRETS.GROUP_TOKEN_KEY_PREFIX}${groupId}`), + ]); + + if (webhookId) { + await deleteGroupWebhook(groupId, webhookId, groupToken); + } +}; diff --git a/src/types.ts b/src/types.ts new file mode 100644 index 0000000..33044a0 --- /dev/null +++ b/src/types.ts @@ -0,0 +1,393 @@ +import { CompassComponentType, CreateLinkInput } from '@atlassian/forge-graphql'; + +// 2nd parameter passed into extension point & webtrigger functions +type InvocationContext = { + principal: { + accountId: string; + }; + installContext: string; +}; + +type WebtriggerRequest = { + body: string; + queryParameters: { + groupId: number[]; + }; + headers: { + [key: string]: string[]; + }; +}; + +type WebtriggerResponse = { + body: string; + statusCode: number; + headers: Record; +}; + +type GroupAccessToken = { + user_id: number; + scopes: string[]; + name: string; + expires_at: string; + id: number; + active: boolean; + created_at: string; + revoked: boolean; + access_level: number; +}; + +type BaseGitlabEvent = { + object_kind: string; + project: { + id: number; + name: string; + default_branch: string; + web_url: string; + }; +}; + +type PushEvent = BaseGitlabEvent & { + before: string; + after: string; + ref: string; + checkout_sha: string; + commits: { + added: string[]; + modified: string[]; + removed: string[]; + }; +}; + +type MergeRequestEvent = BaseGitlabEvent & { + object_attributes: { + target_branch: string; + }; +}; + +type Build = { + id: number; + stage: string; + name: string; + status: string; + created_at: string; + started_at: string; + finished_at: string | null; + duration: number; + queued_duration: number; + when: string; + manual: boolean; + allow_failure: boolean; + user: any; + runner: any; + artifacts_file: any; + environment: null | { + name: string; + action: string; + deployment_tier: string; + }; +}; + +type PipelineEvent = BaseGitlabEvent & { + object_attributes: { + id: number; + ref: string; + tag: boolean; + sha: string; + before_sha: string; + source: string; + status: string; + detailed_status: string; + stages: string[]; + created_at: string; + finished_at: string | null; + duration: number; + queued_duration: any; + variables: any[]; + }; + builds: Build[]; +}; + +type DeploymentEvent = BaseGitlabEvent & { deployment_id: number; environment: string }; + +type GitlabEvent = PushEvent | MergeRequestEvent | PipelineEvent | DeploymentEvent; + +// Config as code types +type YamlFields = { + tier?: number; +}; + +type YamlLink = { + type: string; + url: string; + name?: string | null; + id?: string; +}; + +type YamlRelationships = { + DEPENDS_ON?: string[]; +}; + +type CompassYaml = { + id?: string; + name?: string; + description?: string; + ownerId?: string; + fields?: YamlFields; + links?: Array; + relationships?: YamlRelationships; +}; + +type ComponentSyncPayload = { + componentYaml: CompassYaml; + absoluteFilePath: string; +}; + +type ComponentChanges = { + componentsToSync: ComponentSyncPayload[]; + componentsToUnlink: CompassYaml[]; +}; + +type RegisterWebhookPayload = { + groupId: number; + url: string; + signature: string; + token: string; +}; + +type CommitFileDiff = { + diff: string; + old_path: string; + new_path: string; + new_file: boolean; + renamed_file: boolean; + deleted_file: boolean; +}; + +type DiffsByChangeType = { + added: CommitFileDiff[]; + modified: CommitFileDiff[]; + removed: CommitFileDiff[]; +}; + +enum Queues { + IMPORT = 'import-queue', +} + +enum COMPASS_GATEWAY_MESSAGES { + COMPONENT_NOT_FOUND = 'Component not found', +} + +type GitlabAPIGroup = { + full_name: string; + name: string; + id: number; + path: string; +}; + +type GitlabAPIProject = { + id: number; + description: string; + name: string; + topics: string[]; + default_branch: string; + web_url: string; + namespace: { + id: number; + full_path: string; + name: string; + path: string; + }; + created_at: string; +}; + +type Project = { + id: number; + description: string | null; + name: string; + url: string; + labels: string[]; + defaultBranch: string; + groupName: string; + groupPath: string; + groupFullPath: string; +}; + +type ProjectImportStatus = { + isManaged: boolean; + isCompassFilePrOpened: boolean; + hasComponent: boolean; +}; + +type ProjectReadyForImport = { + componentId?: string; + componentLinks?: CreateLinkInput[]; + componentType?: CompassComponentType; + shouldOpenMR?: boolean; +} & ProjectImportStatus & + Project; + +type ImportableProject = ProjectReadyForImport & { + type: CompassComponentType; +}; + +type ProjectImportResult = { + failed: ImportableProject[]; + total: number; +}; + +type ImportStatus = { + success: number; + inProgress: number; + failed: number; +}; + +type ProjectBranch = { + name: string; +}; + +type Deployment = { + id: number; + created_at: string; + updated_at: string; + deployable: { + status: string; + finished_at: string; + pipeline: { + id: number; + web_url: string; + }; + }; + environment: { + name: string; + id: number; + }; + status: string; +}; + +type GitlabApiPipeline = { + id: number; + project_id: number; + status: string; + ref: string; + web_url: string; + created_at: string; + updated_at: string; +}; + +type Metric = { + metricAri: string; + value: number; + timestamp?: string; +}; + +type MetricsEventPayload = { + projectID: string; + metrics: Metric[]; +}; + +type MergeRequest = { + merged_at: string; + created_at: string; +}; + +enum MergeRequestState { + OPENED = 'opened', + CLOSED = 'closed', + LOCKED = 'locked', + MERGED = 'merged', +} + +enum MergeRequestOrderBy { + UPDATED_AT = 'updated_at', + CREATED_AT = 'created_at', + TITLE = 'title', +} + +enum EnvironmentTier { + PRODUCTION = 'production', + STAGING = 'staging', + TESTING = 'testing', + DEVELOPMENT = 'development', + OTHER = 'other', +} + +enum GitlabPipelineStates { + CREATED = 'created', + WAITING_FOR_RESOURCE = 'waiting_for_resource', + PREPARING = 'preparing', + PENDING = 'pending', + RUNNING = 'running', + SUCCESS = 'success', + FAILED = 'failed', + CANCELED = 'canceled', + SKIPPED = 'skipped', + MANUAL = 'manual', + SCHEDULED = 'scheduled', +} + +enum GitLabAccessLevels { + NO_ACCESS = 0, + MINIMAL_ACCESS = 5, + GUEST = 10, + REPORTER = 20, + DEVELOPER = 30, + MAINTAINER = 40, + OWNER = 50, +} + +type Environment = { + id: number; + name: string; + tier: EnvironmentTier; +}; + +type GroupProjectsResponse = { + total: number; + projects: ProjectReadyForImport[]; +}; + +export type { + WebtriggerRequest, + WebtriggerResponse, + GitlabAPIGroup, + GroupAccessToken, + GitlabEvent, + PushEvent, + MergeRequestEvent, + PipelineEvent, + DeploymentEvent, + ComponentChanges, + RegisterWebhookPayload, + CommitFileDiff, + CompassYaml, + ComponentSyncPayload, + YamlLink, + YamlRelationships, + YamlFields, + DiffsByChangeType, + GitlabAPIProject, + GitlabApiPipeline, + Project, + ProjectReadyForImport, + ImportableProject, + ProjectImportResult, + ImportStatus, + InvocationContext, + ProjectBranch, + Deployment, + MetricsEventPayload, + MergeRequest, + Metric, + Environment, + GroupProjectsResponse, +}; + +export { + Queues, + COMPASS_GATEWAY_MESSAGES, + MergeRequestState, + MergeRequestOrderBy, + EnvironmentTier, + GitlabPipelineStates, + GitLabAccessLevels, +}; diff --git a/src/utils/append-link.ts b/src/utils/append-link.ts new file mode 100644 index 0000000..db26cae --- /dev/null +++ b/src/utils/append-link.ts @@ -0,0 +1,24 @@ +import { CompassLinkType } from '@atlassian/forge-graphql'; + +import { MAX_LINKS_OF_TYPE } from '../constants'; +import { YamlLink } from '../types'; + +export const appendLink = (requiredLink: string, configLinks: YamlLink[] = []): YamlLink[] => { + if (configLinks.filter((link) => link.type === CompassLinkType.Repository).length >= MAX_LINKS_OF_TYPE) { + return configLinks; + } + + const found = configLinks?.some((configLink) => configLink.url.includes(requiredLink)); + + if (!found) { + return [ + ...configLinks, + { + type: CompassLinkType.Repository, + url: requiredLink, + }, + ]; + } + + return configLinks; +}; diff --git a/src/utils/create-compass-yaml.ts b/src/utils/create-compass-yaml.ts new file mode 100644 index 0000000..cd2708b --- /dev/null +++ b/src/utils/create-compass-yaml.ts @@ -0,0 +1,37 @@ +import yaml from 'js-yaml'; +import { Component } from '@atlassian/forge-graphql'; +import { CompassYaml } from '../types'; + +export const generateCompassYamlData = (projectURL: string, component: Component): CompassYaml => { + const { fields, name, description, ownerId, id: componentId, relationships } = component; + + const compassYMLFields = fields + ? { + tier: Number((fields['compass:tier'] as string[])[0]), + } + : {}; + + return { + name, + id: componentId, + description, + ownerId, + fields: compassYMLFields, + links: [ + { + type: 'REPOSITORY', + name: null, + url: projectURL, + }, + ], + relationships: { + DEPENDS_ON: relationships ? relationships.map((relationship) => relationship.nodeId) : [], + }, + }; +}; + +export const createCompassYml = (compassYamlData: CompassYaml): string => { + const compassYML = yaml.dump(compassYamlData, { lineWidth: -1, quotingType: "'" }); + + return Buffer.from(compassYML).toString('base64'); +}; diff --git a/src/utils/fetchPaginatedData.ts b/src/utils/fetchPaginatedData.ts new file mode 100644 index 0000000..c45c466 --- /dev/null +++ b/src/utils/fetchPaginatedData.ts @@ -0,0 +1,26 @@ +import { GitLabHeaders, GitlabPaginatedFetch } from '../client/gitlab'; + +export const fetchPaginatedData = async ( + fetchFn: GitlabPaginatedFetch, + fetchFnParameters: Record<'groupToken', string> & P, + page = 1, + perPage = 100, +): Promise => { + const { data: firstPageData, headers } = await fetchFn(page, perPage, fetchFnParameters); + const total = Number(headers.get(GitLabHeaders.PAGINATION_TOTAL)); + + if (total <= perPage) { + return firstPageData; + } + + const numberOfPages = Math.ceil(total / perPage); + const promises = []; + + for (let pageNumber = 2; pageNumber <= numberOfPages; pageNumber += 1) { + promises.push(fetchFn(pageNumber, perPage, fetchFnParameters)); + } + + const restOfData = await Promise.all(promises); + + return [...firstPageData, ...restOfData.map(({ data }) => data).flat()]; +}; diff --git a/src/utils/generate-signature-utils.ts b/src/utils/generate-signature-utils.ts new file mode 100644 index 0000000..2f14cc8 --- /dev/null +++ b/src/utils/generate-signature-utils.ts @@ -0,0 +1,5 @@ +import { randomBytes } from 'crypto'; + +export const generateSignature = () => { + return randomBytes(256).toString('hex'); +}; diff --git a/src/utils/get-forge-app-id.ts b/src/utils/get-forge-app-id.ts new file mode 100644 index 0000000..7c31000 --- /dev/null +++ b/src/utils/get-forge-app-id.ts @@ -0,0 +1,10 @@ +import { MissingAppIdError } from '../models/errors'; + +export const getForgeAppId = (): string => { + const forgeAppId = process.env.FORGE_APP_ID; + if (!forgeAppId) { + throw new MissingAppIdError(); + } + + return forgeAppId; +}; diff --git a/src/utils/has-deployment-after-28days.ts b/src/utils/has-deployment-after-28days.ts new file mode 100644 index 0000000..39d1cc3 --- /dev/null +++ b/src/utils/has-deployment-after-28days.ts @@ -0,0 +1,13 @@ +import { getProjectById } from '../client/gitlab'; +import { getDateInThePast } from './time-utils'; +import { DAYS_TO_CALC } from '../constants'; +import { getDeploymentAfter28Days } from '../services/deployment'; + +export const hasDeploymentAfter28Days = async (projectId: number, groupToken: string): Promise => { + const dateBefore = getDateInThePast(DAYS_TO_CALC + 1); + + const { created_at: dateAfter } = await getProjectById(groupToken, projectId); + const data = await getDeploymentAfter28Days(groupToken, projectId, dateAfter, dateBefore); + + return data.length > 0; +}; diff --git a/src/utils/parse-ari.ts b/src/utils/parse-ari.ts new file mode 100644 index 0000000..cc1e8cf --- /dev/null +++ b/src/utils/parse-ari.ts @@ -0,0 +1,22 @@ +import { InvalidAriError } from '../models/errors'; + +export const parse = (ariString: string) => { + if (!ariString) { + throw new InvalidAriError(ariString); + } + const match = /^ari:cloud:([^:/]+):([^:/]*):([^:/]*)\/(.*)$/.exec(ariString); + if (!match || match.length !== 5) { + throw new InvalidAriError(ariString); + } + const terms = match.map((a) => (a && a.length > 0 ? a : undefined)); + const resourceOwner = terms[1]; + if (!resourceOwner) { + throw new InvalidAriError(ariString); + } + return { + resourceOwner, + cloudId: terms[2], + resourceType: terms[3], + resourceId: terms[4], + }; +}; diff --git a/src/utils/push-event-utils.test.ts b/src/utils/push-event-utils.test.ts new file mode 100644 index 0000000..ff0d5d0 --- /dev/null +++ b/src/utils/push-event-utils.test.ts @@ -0,0 +1,59 @@ +/* eslint-disable import/first */ +import { mockForgeApi } from '../__tests__/helpers/forge-helper'; + +mockForgeApi(); + +import { DiffsByChangeType } from '../types'; +import { createCommitFileDiff } from '../__tests__/helpers/gitlab-helper'; +import { groupDiffsByChangeType } from './push-event-utils'; + +const validCompassYamlName = '/compass.yaml'; +const invalidCompassYamlName = '/invalidName.yaml'; + +describe('groupDiffsByChangeType', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('should empty DiffsByChangeType if compass.yaml file not changed', () => { + const filesDiffs = [createCommitFileDiff({ new_path: invalidCompassYamlName, old_path: invalidCompassYamlName })]; + const expectedResult: DiffsByChangeType = { added: [], modified: [], removed: [] }; + + const result = groupDiffsByChangeType(filesDiffs); + + expect(result).toEqual(expectedResult); + }); + + it('should push diff to added type if compass.yaml added', () => { + const filesDiffs = [createCommitFileDiff({ new_path: validCompassYamlName, new_file: true })]; + const expectedResult: DiffsByChangeType = { added: [filesDiffs[0]], modified: [], removed: [] }; + + const result = groupDiffsByChangeType(filesDiffs); + + expect(result).toEqual(expectedResult); + }); + + it('should push diff to removed type if the compass.yaml deleted or renamed', () => { + const filesDiffs = [ + createCommitFileDiff({ new_path: validCompassYamlName, deleted_file: true }), + createCommitFileDiff({ new_path: invalidCompassYamlName, renamed_file: true, old_path: validCompassYamlName }), + ]; + const expectedResult: DiffsByChangeType = { added: [], modified: [], removed: [...filesDiffs] }; + + const result = groupDiffsByChangeType(filesDiffs); + + expect(result).toEqual(expectedResult); + }); + + it('should push diff to modified type if the compass.yaml changed', () => { + const filesDiffs = [ + createCommitFileDiff({ diff: 'changed diff', old_path: validCompassYamlName }), + createCommitFileDiff({ new_path: validCompassYamlName, renamed_file: true }), + ]; + const expectedResult: DiffsByChangeType = { added: [], modified: [...filesDiffs], removed: [] }; + + const result = groupDiffsByChangeType(filesDiffs); + + expect(result).toEqual(expectedResult); + }); +}); diff --git a/src/utils/push-event-utils.ts b/src/utils/push-event-utils.ts new file mode 100644 index 0000000..58eeb5b --- /dev/null +++ b/src/utils/push-event-utils.ts @@ -0,0 +1,45 @@ +import { CommitFileDiff, DiffsByChangeType, PushEvent } from '../types'; + +const CONFIG_AS_CODE_FILE_REGEX = /(\/compass\.yml$)|^compass\.yml$|(\/compass\.yaml$)|^compass\.yaml$/; + +const isCompassYamlFile = (diff: CommitFileDiff) => + diff.new_path.match(CONFIG_AS_CODE_FILE_REGEX) || diff.old_path?.match(CONFIG_AS_CODE_FILE_REGEX); + +const isDestructiveRename = (diff: CommitFileDiff) => + diff.renamed_file && CONFIG_AS_CODE_FILE_REGEX.test(diff.old_path) && !CONFIG_AS_CODE_FILE_REGEX.test(diff.new_path); + +const isRemovedFile = (diff: CommitFileDiff) => diff.deleted_file || isDestructiveRename(diff); + +const isModifiedFile = (diff: CommitFileDiff) => + (diff.diff.length > 0 && !diff.deleted_file && !diff.new_file) || + (diff.renamed_file && CONFIG_AS_CODE_FILE_REGEX.test(diff.new_path)); + +export const groupDiffsByChangeType = (filesDiffs: CommitFileDiff[]) => { + return filesDiffs.reduce( + (result, diff) => { + // only grab compass.yml files + if (isCompassYamlFile(diff)) { + if (diff.new_file) { + result.added.push(diff); + return result; + } + if (isRemovedFile(diff)) { + result.removed.push(diff); + return result; + } + if (isModifiedFile(diff)) { + result.modified.push(diff); + return result; + } + } + return result; + }, + { added: [], modified: [], removed: [] }, + ); +}; + +export const isEventForTrackingBranch = (event: PushEvent, trackingBranch: string): boolean => { + const trackingBranchRef = `refs/heads/${trackingBranch}`; + + return event.ref === trackingBranchRef; +}; diff --git a/src/utils/storage-utils.ts b/src/utils/storage-utils.ts new file mode 100644 index 0000000..bcb801b --- /dev/null +++ b/src/utils/storage-utils.ts @@ -0,0 +1,27 @@ +import { ListResult, startsWith, storage } from '@forge/api'; +import { chunk } from 'lodash'; + +import { sleep } from './time-utils'; +import { STORAGE_KEYS } from '../constants'; + +export const deleteKeysFromStorageByChunks = async ( + keys: string[], + chunkSize: number, + delay: number, +): Promise => { + const keyChunks = chunk(keys, chunkSize); + + for (const keyChunk of keyChunks) { + await Promise.all(keyChunk.map((key: string) => storage.delete(key))); + await sleep(delay); + } +}; + +export const getGroupIds = async (): Promise => { + const groupsKeys: ListResult = await storage + .query() + .where('key', startsWith(STORAGE_KEYS.GROUP_KEY_PREFIX)) + .getMany(); + + return groupsKeys.results.map(({ key }) => Number(key.replace(STORAGE_KEYS.GROUP_KEY_PREFIX, ''))); +}; diff --git a/src/utils/time-utils.ts b/src/utils/time-utils.ts new file mode 100644 index 0000000..a8e0d0d --- /dev/null +++ b/src/utils/time-utils.ts @@ -0,0 +1,10 @@ +import { DAYS_TO_CALC, MILLISEC_IN_DAY } from '../constants'; + +export const sleep = (ms: number) => { + return new Promise((resolve) => { + setTimeout(resolve, ms); + }); +}; + +export const getDateInThePast = (daysToCalc = DAYS_TO_CALC) => + new Date(Date.now() - daysToCalc * MILLISEC_IN_DAY).toISOString(); diff --git a/src/utils/url-utils.ts b/src/utils/url-utils.ts new file mode 100644 index 0000000..6cf579a --- /dev/null +++ b/src/utils/url-utils.ts @@ -0,0 +1,2 @@ +export const queryParamsGenerator = (params: { [key: string]: string }): string => + new URLSearchParams(params).toString(); diff --git a/src/utils/webtrigger-utils.ts b/src/utils/webtrigger-utils.ts new file mode 100644 index 0000000..9a46afb --- /dev/null +++ b/src/utils/webtrigger-utils.ts @@ -0,0 +1,23 @@ +import { WebtriggerResponse } from '../types'; + +export const serverResponse = ( + message: string, + // eslint-disable-next-line default-param-last + statusCode = 200, + parameters?: Record, +): WebtriggerResponse => { + const body = JSON.stringify({ + message, + success: statusCode >= 200 && statusCode < 300, + ...(parameters !== undefined && { parameters }), + }); + const defaultHeaders = { + 'Content-Type': ['application/json'], + }; + + return { + body, + statusCode, + headers: defaultHeaders, + }; +}; diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..3068c82 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "module": "esnext", + "target": "es2019", + "sourceMap": true, + "moduleResolution": "node", + "esModuleInterop": true, + "lib": ["dom", "dom.iterable", "esnext"], + "types": ["jest", "node"], + "baseUrl": "./", + "allowJs": true, + "jsx": "react", + "jsxFactory": "ForgeUI.createElement", + "noImplicitAny": true, + "skipLibCheck": true, + "allowSyntheticDefaultImports": true + }, + "include": ["./src/**/*"] +} diff --git a/ui/.gitignore b/ui/.gitignore new file mode 100644 index 0000000..2422136 --- /dev/null +++ b/ui/.gitignore @@ -0,0 +1,33 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js + +# testing +/coverage + +# production +/build + +# misc +.DS_Store +.env.local +.env.development.local +.env.test.local +.env.production.local + +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# features +/src/features.ts + +# errors +/src/errors.ts + +# shared types +/src/resolverTypes.ts +/src/types.ts diff --git a/ui/README.md b/ui/README.md new file mode 100644 index 0000000..b58e0af --- /dev/null +++ b/ui/README.md @@ -0,0 +1,46 @@ +# Getting Started with Create React App + +This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app). + +## Available Scripts + +In the project directory, you can run: + +### `yarn start` + +Runs the app in the development mode.\ +Open [http://localhost:3000](http://localhost:3000) to view it in the browser. + +The page will reload if you make edits.\ +You will also see any lint errors in the console. + +### `yarn test` + +Launches the test runner in the interactive watch mode.\ +See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information. + +### `yarn build` + +Builds the app for production to the `build` folder.\ +It correctly bundles React in production mode and optimizes the build for the best performance. + +The build is minified and the filenames include the hashes.\ +Your app is ready to be deployed! + +See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information. + +### `yarn eject` + +**Note: this is a one-way operation. Once you `eject`, you can’t go back!** + +If you aren’t satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project. + +Instead, it will copy all the configuration files and the transitive dependencies (webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you’re on your own. + +You don’t have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn’t feel obligated to use this feature. However we understand that this tool wouldn’t be useful if you couldn’t customize it when you are ready for it. + +## Learn More + +You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started). + +To learn React, check out the [React documentation](https://reactjs.org/). diff --git a/ui/jest.config.js b/ui/jest.config.js new file mode 100644 index 0000000..b660d7b --- /dev/null +++ b/ui/jest.config.js @@ -0,0 +1,13 @@ +/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'jsdom', + coverageThreshold: { + global: { + branches: 80, + functions: 85, + lines: 85, + statements: 85, + }, + }, +}; diff --git a/ui/package.json b/ui/package.json new file mode 100644 index 0000000..8ec83c1 --- /dev/null +++ b/ui/package.json @@ -0,0 +1,64 @@ +{ + "name": "ui", + "version": "0.1.0", + "private": true, + "homepage": ".", + "dependencies": { + "@atlaskit/button": "^16.2.2", + "@atlaskit/checkbox": "^12.3.12", + "@atlaskit/css-reset": "^6.3.8", + "@atlaskit/dynamic-table": "^14.5.4", + "@atlaskit/empty-state": "^7.3.10", + "@atlaskit/form": "^8.5.4", + "@atlaskit/icon": "^21.10.6", + "@atlaskit/inline-message": "^11.2.7", + "@atlaskit/progress-bar": "^0.5.6", + "@atlaskit/section-message": "^6.1.10", + "@atlaskit/select": "^15.2.11", + "@atlaskit/spinner": "^15.1.9", + "@atlaskit/textfield": "^5.1.10", + "@atlaskit/theme": "^12.1.6", + "@atlaskit/tooltip": "^17.5.9", + "@atlassian/forge-graphql": "^5.9.3", + "@forge/api": "^2.6.1", + "@forge/bridge": "^2.3.0", + "escape-string-regexp": "^5.0.0", + "lodash.debounce": "^4.0.8", + "react": "^17.0.2", + "react-dom": "^17.0.2", + "react-router-dom": "^6.3.0", + "styled-components": "^5.3.5" + }, + "devDependencies": { + "@testing-library/jest-dom": "^5.16.2", + "@testing-library/react": "^12.1.4", + "@types/jest": "^27.4.1", + "@types/lodash.debounce": "^4.0.7", + "@types/node": "^14.14.31", + "@types/react": "^17.0.40", + "@types/react-dom": "^17.0.13", + "@types/styled-components": "^5.1.24", + "react-scripts": "^5.0.0", + "typescript": "~4.5.5" + }, + "scripts": { + "start": "SKIP_PREFLIGHT_CHECK=true BROWSER=none PORT=3001 react-scripts start", + "build": "SKIP_PREFLIGHT_CHECK=true react-scripts build", + "test": "SKIP_PREFLIGHT_CHECK=true react-scripts test", + "pretest": "node -p \"JSON.stringify({...require('@forge/bridge/package.json'), main: 'out/index.js'}, null, 2)\" > tmp.json && mv tmp.json node_modules/@forge/bridge/package.json", + "eject": "react-scripts eject", + "prebuild": "cd ../src && cp types.ts ../ui/src && cp resolverTypes.ts ../ui/src && cp features.ts ../ui/src" + }, + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + } +} diff --git a/ui/public/index.html b/ui/public/index.html new file mode 100644 index 0000000..db2e8d4 --- /dev/null +++ b/ui/public/index.html @@ -0,0 +1,12 @@ + + + + + + Compass Forge Template + + + +
+ + diff --git a/ui/src/App.tsx b/ui/src/App.tsx new file mode 100644 index 0000000..e532926 --- /dev/null +++ b/ui/src/App.tsx @@ -0,0 +1,13 @@ +import { AppRouter } from './AppRouter'; +import { AppContextProvider } from './context/AppContext'; +import { ImportContextProvider } from './context/ImportContext'; + +export const App = () => { + return ( + + + + + + ); +}; diff --git a/ui/src/AppRouter.tsx b/ui/src/AppRouter.tsx new file mode 100644 index 0000000..2d5f7c4 --- /dev/null +++ b/ui/src/AppRouter.tsx @@ -0,0 +1,39 @@ +import { Route, MemoryRouter, Routes } from 'react-router-dom'; + +import { ConnectedPage } from './components/ConnectedPage'; +import { AuthPage } from './components/AuthPage'; +import { SelectImportPage } from './components/SelectImportPage'; +import { ImportProgressResultPage } from './components/ImportProgressResultPage'; +import { useAppContext } from './hooks/useAppContext'; +import { ApplicationState, ROUTES } from './routes'; +import { IMPORT_MODULE_KEY } from './constants'; + +export const AppRouter = () => { + const { initialRoute, moduleKey } = useAppContext(); + + return moduleKey === IMPORT_MODULE_KEY ? ( + <> + { + + + } /> + } /> + + + } + + ) : ( + <> + {initialRoute && ( + + + } /> + } /> + } /> + } /> + + + )} + + ); +}; diff --git a/ui/src/components/AuthPage/__tests__/AuthPage.test.tsx b/ui/src/components/AuthPage/__tests__/AuthPage.test.tsx new file mode 100644 index 0000000..e6904fc --- /dev/null +++ b/ui/src/components/AuthPage/__tests__/AuthPage.test.tsx @@ -0,0 +1,105 @@ +import { fireEvent, render, screen } from '@testing-library/react'; +import { act } from 'react-dom/test-utils'; +import '@testing-library/jest-dom/extend-expect'; + +import { AppRouter } from '../../../AppRouter'; +import { AppContextProvider } from '../../../context/AppContext'; +import { ErrorMessages } from '../../../errorMessages'; +import { defaultMocks, mockInvoke, mockGetContext } from '../../../helpers/mockHelpers'; +import { AuthErrorTypes } from '../../../resolverTypes'; + +jest.mock('@forge/bridge', () => ({ + invoke: jest.fn(), + view: { + getContext: jest.fn(), + }, +})); + +jest.mock('escape-string-regexp', () => ({ + escapeStringRegexp: jest.fn(), +})); + +const setup = async () => { + const { findByPlaceholderText, findByText } = render( + + + , + ); + + await act(async () => { + fireEvent.change(await findByPlaceholderText('Enter your group access token'), { target: { value: 'koko' } }); + }); + await act(async () => { + fireEvent.change(await findByPlaceholderText('Enter your group token name'), { target: { value: 'momo' } }); + }); + await act(async () => { + fireEvent.click(await findByText('Connect group')); + }); +}; + +describe('Auth flow validation', () => { + it('renders error in the case when group token invalid', async () => { + mockGetContext('admin-page-ui'); + mockInvoke({ + ...defaultMocks, + 'groups/connect': { + success: false, + errors: [{ message: 'Error', errorType: AuthErrorTypes.INVALID_GROUP_TOKEN }], + }, + }); + + await setup(); + + expect(screen.getByTestId('incorrect-token-message')).toHaveTextContent( + ErrorMessages[AuthErrorTypes.INVALID_GROUP_TOKEN].description.join(''), + ); + }); + + it('renders error in the case when group token name invalid', async () => { + mockGetContext('admin-page-ui'); + mockInvoke({ + ...defaultMocks, + 'groups/connect': { + success: false, + errors: [{ message: 'Error', errorType: AuthErrorTypes.INVALID_GROUP_TOKEN_NAME }], + }, + }); + + await setup(); + + expect(screen.getByTestId('incorrect-token-name-message')).toHaveTextContent( + ErrorMessages[AuthErrorTypes.INVALID_GROUP_TOKEN_NAME].description, + ); + }); + + it('renders error in the case when group token scopes invalid', async () => { + mockGetContext('admin-page-ui'); + mockInvoke({ + ...defaultMocks, + 'groups/connect': { + success: false, + errors: [{ message: 'Error', errorType: AuthErrorTypes.INCORRECT_GROUP_TOKEN_SCOPES }], + }, + }); + + await setup(); + + expect(screen.getByTestId('incorrect-token-scopes-message')).toHaveTextContent( + ErrorMessages[AuthErrorTypes.INCORRECT_GROUP_TOKEN_SCOPES].description.join(''), + ); + }); + + it('renders error in the case when unexpected error', async () => { + mockGetContext('admin-page-ui'); + mockInvoke({ + ...defaultMocks, + 'groups/connect': { success: false, errors: [{ message: 'Error', errorType: AuthErrorTypes.UNEXPECTED_ERROR }] }, + }); + + await setup(); + + expect(screen.getByTestId('unexpected-message')).toHaveTextContent( + ErrorMessages[AuthErrorTypes.UNEXPECTED_ERROR].description, + ); + }); +}); diff --git a/ui/src/components/AuthPage/index.tsx b/ui/src/components/AuthPage/index.tsx new file mode 100644 index 0000000..f5380c5 --- /dev/null +++ b/ui/src/components/AuthPage/index.tsx @@ -0,0 +1,227 @@ +import { useState } from 'react'; +import { useNavigate } from 'react-router-dom'; +import styled from 'styled-components'; +import { router } from '@forge/bridge'; + +import SectionMessage from '@atlaskit/section-message'; +import { Field, FormFooter, HelperMessage } from '@atlaskit/form'; +import Textfield from '@atlaskit/textfield'; +import Button, { LoadingButton } from '@atlaskit/button'; +import { gridSize } from '@atlaskit/theme'; +import WatchIcon from '@atlaskit/icon/glyph/watch'; +import WatchFilledIcon from '@atlaskit/icon/glyph/watch-filled'; + +import { ApplicationState } from '../../routes'; +import { ForgeLink } from '../ForgeLink'; +import { connectGroup } from '../../services/invokes'; +import { ErrorMessages } from '../../errorMessages'; +import { AuthErrorTypes, ErrorTypes } from '../../resolverTypes'; + +const SectionMessageWrapper = styled.div` + margin-bottom: ${gridSize() * 2}px; +`; + +const FormWrapper = styled.div` + width: 350px; +`; + +const ReloadButtonWrapper = styled.div` + > button { + padding: 0; + } +`; + +const buildValidationMethod = (errorType: ErrorTypes) => { + switch (errorType) { + case AuthErrorTypes.INVALID_GROUP_TOKEN: + return ( + +

+ {ErrorMessages[AuthErrorTypes.INVALID_GROUP_TOKEN].description[0]} + {ErrorMessages[AuthErrorTypes.INVALID_GROUP_TOKEN].description[1]} + {ErrorMessages[AuthErrorTypes.INVALID_GROUP_TOKEN].description[2]} +

+
+ ); + case AuthErrorTypes.INVALID_GROUP_TOKEN_NAME: + return ( + +

{ErrorMessages[AuthErrorTypes.INVALID_GROUP_TOKEN_NAME].description}

+
+ ); + case AuthErrorTypes.INCORRECT_GROUP_TOKEN_SCOPES: + return ( + +

+ {ErrorMessages[AuthErrorTypes.INCORRECT_GROUP_TOKEN_SCOPES].description[0]} + {ErrorMessages[AuthErrorTypes.INCORRECT_GROUP_TOKEN_SCOPES].description[1]} + {ErrorMessages[AuthErrorTypes.INCORRECT_GROUP_TOKEN_SCOPES].description[2]} + {ErrorMessages[AuthErrorTypes.INCORRECT_GROUP_TOKEN_SCOPES].description[3]} + {ErrorMessages[AuthErrorTypes.INCORRECT_GROUP_TOKEN_SCOPES].description[4]} +

+
+ ); + default: + return ( + +

{ErrorMessages[AuthErrorTypes.UNEXPECTED_ERROR].description}

+ + + +
+ ); + } +}; + +export const AuthPage = () => { + const [tokenName, setTokenName] = useState(''); + const [token, setToken] = useState(''); + const [isLoadingSubmit, setLoadingSubmit] = useState(false); + const [errorType, setErrorType] = useState(null); + const [isTokenVisible, setIsTokenVisible] = useState(false); + + const navigate = useNavigate(); + + const handleNavigateToConnectedPage = () => { + navigate(`..${ApplicationState.CONNECTED}`, { replace: true }); + }; + + const handleSubmit = async (): Promise => { + setLoadingSubmit(true); + + try { + const { success, errors } = await connectGroup(token.trim(), tokenName); + + if (success) { + handleNavigateToConnectedPage(); + } else { + setErrorType((errors && errors[0].errorType) || AuthErrorTypes.UNEXPECTED_ERROR); + } + + setLoadingSubmit(false); + } catch (err) { + setLoadingSubmit(false); + setErrorType(AuthErrorTypes.UNEXPECTED_ERROR); + } + }; + + const tokenNameOnChange = (e: React.FormEvent) => { + if (errorType) { + setErrorType(null); + } + setTokenName(e.currentTarget.value); + }; + + const tokenOnChange = (e: React.FormEvent) => { + if (errorType) { + setErrorType(null); + } + setToken(e.currentTarget.value); + }; + + const toggleTokenView = (e: React.MouseEvent) => { + e.stopPropagation(); + + setIsTokenVisible(!isTokenVisible); + }; + + const isSubmitBtnDisabled = !tokenName || !token; + + return ( +
+ + +

+ Create and retrieve your group access token from your GitLab account to connect to Compass. When creating + your group access token, make sure you: +

+
    +
  • do not set expiration date, leave it empty
  • +
  • select an owner role for the token
  • +
  • set required scopes for the token to “api” and “write_repository”
  • +
  • have GitLab owner permissions for the group you want to connect
  • +
+

+ + Learn more about group access tokens + +

+
+
+ + {errorType && buildValidationMethod(errorType)} + + + + {({ fieldProps }) => ( + toggleTokenView(e)} + iconBefore={ + isTokenVisible ? ( + + ) : ( + + ) + } + appearance='subtle' + /> + } + /> + )} + + + {({ fieldProps }) => ( + <> + + Note: The name of the token must be the same as it is in GitLab + + )} + + + + Connect group + + + +
+ ); +}; diff --git a/ui/src/components/ConnectedPage/ConnectInfoPanel.tsx b/ui/src/components/ConnectedPage/ConnectInfoPanel.tsx new file mode 100644 index 0000000..9df4b06 --- /dev/null +++ b/ui/src/components/ConnectedPage/ConnectInfoPanel.tsx @@ -0,0 +1,49 @@ +import styled from 'styled-components'; + +import CheckCircleIcon from '@atlaskit/icon/glyph/check-circle'; +import { LoadingButton } from '@atlaskit/button'; +import { N30 } from '@atlaskit/theme/colors'; +import { gridSize } from '@atlaskit/theme'; + +import { GitlabAPIGroup } from '../../resolverTypes'; + +const ConnectedGroupWrapper = styled.div` + display: flex; + align-items: center; + justify-content: space-between; + border: 1px solid ${N30}; + border-radius: ${gridSize() / 2}px; + padding: ${gridSize() * 2}px; +`; + +const IconTitleGroupWrapper = styled.div` + display: flex; + align-items: center; + width: 80%; +`; + +const ConnectedText = styled.p` + margin: 0 0 0 5px; +`; + +type Props = { + connectedGroup: GitlabAPIGroup; + handleDisconnectGroup: (id: number) => void; + isDisconnectGroupInProgress: boolean; +}; + +export const ConnectInfoPanel = ({ connectedGroup, handleDisconnectGroup, isDisconnectGroupInProgress }: Props) => { + return ( + + + + + Your GitLab group {connectedGroup.name} is successfully connected to Compass + + + handleDisconnectGroup(connectedGroup.id)} isLoading={isDisconnectGroupInProgress}> + Disconnect + + + ); +}; diff --git a/ui/src/components/ConnectedPage/ImportControls.tsx b/ui/src/components/ConnectedPage/ImportControls.tsx new file mode 100644 index 0000000..c3aaa4d --- /dev/null +++ b/ui/src/components/ConnectedPage/ImportControls.tsx @@ -0,0 +1,89 @@ +import { useEffect, useState } from 'react'; +import { useNavigate } from 'react-router-dom'; + +import Button from '@atlaskit/button'; +import Spinner from '@atlaskit/spinner'; +import InlineMessage from '@atlaskit/inline-message'; +import { router } from '@forge/bridge'; + +import { ApplicationState } from '../../routes'; +import { ImportProgressBar } from '../ImportProgressBar'; +import { useImportContext } from '../../hooks/useImportContext'; +import { getLastSyncTime } from '../../services/invokes'; +import { formatLastSyncTime } from '../../helpers/time'; +import { ImportButtonWrapper } from '../styles'; +import { useAppContext } from '../../hooks/useAppContext'; + +type Props = { + groupName: string; + groupId: number; +}; + +export const ImportControls = ({ groupName, groupId }: Props) => { + const [lastSyncTime, setLastSyncTime] = useState(null); + const [lastSyncTimeIsLoading, setLastSyncTimeIsLoading] = useState(false); + const [lastSyncTimeErrorMessage, setLastSyncTimeAnErrorMessage] = useState(); + + const { isImportInProgress } = useImportContext(); + const { appId } = useAppContext(); + + const navigate = useNavigate(); + + const handleImportNavigate = () => { + router.navigate(`/compass/import/redirect/${encodeURIComponent(`ari:cloud:ecosystem::app/${appId}`)}`); + }; + + const fetchLastSyncTime = async () => { + setLastSyncTimeIsLoading(true); + + try { + const { data: time, success, errors } = await getLastSyncTime(); + if (success && time) { + setLastSyncTime(time); + } + + if (errors && errors.length > 0) { + setLastSyncTimeAnErrorMessage(errors[0].message); + } + } catch (error) { + setLastSyncTimeAnErrorMessage((error as Error).message); + } finally { + setLastSyncTimeIsLoading(false); + } + }; + + useEffect(() => { + fetchLastSyncTime(); + }, []); + + const lastSyncTimeMsg = lastSyncTime ? `Last imported: ${formatLastSyncTime(lastSyncTime)}` : 'No import history'; + + return ( + <> +

Import projects

+

+ Import projects from {groupName} as components to track in Compass. +

+ + {isImportInProgress ? ( + + ) : ( + + + + {lastSyncTimeIsLoading && } + {lastSyncTimeErrorMessage && ( + +

{lastSyncTimeErrorMessage}

+
+ )} + {!lastSyncTimeIsLoading && !lastSyncTimeErrorMessage && ( + + )} +
+ )} + + ); +}; diff --git a/ui/src/components/ConnectedPage/index.tsx b/ui/src/components/ConnectedPage/index.tsx new file mode 100644 index 0000000..266353e --- /dev/null +++ b/ui/src/components/ConnectedPage/index.tsx @@ -0,0 +1,76 @@ +import { useState, useEffect } from 'react'; +import { useNavigate } from 'react-router-dom'; + +import Spinner from '@atlaskit/spinner'; + +import { disconnectGroup } from '../../services/invokes'; +import { ConnectInfoPanel } from './ConnectInfoPanel'; +import { ImportControls } from './ImportControls'; +import { CenterWrapper } from '../styles'; +import { DefaultErrorState } from '../DefaultErrorState'; +import { ApplicationState } from '../../routes'; +import { useAppContext } from '../../hooks/useAppContext'; +import { AuthErrorTypes, ErrorTypes, GitlabAPIGroup } from '../../resolverTypes'; +import { useImportContext } from '../../hooks/useImportContext'; +import { ImportResult } from '../ImportResult'; + +export const ConnectedPage = () => { + const [isDisconnectGroupInProgress, setDisconnectGroupInProgress] = useState(false); + const [errorType, setErrorType] = useState(); + const [groups, setGroups] = useState(); + + const navigate = useNavigate(); + const { getGroups, clearGroup } = useAppContext(); + const { isImportInProgress } = useImportContext(); + + const handleDisconnectGroup = async (id: number) => { + setDisconnectGroupInProgress(true); + try { + const { success, errors } = await disconnectGroup(id); + clearGroup(id); + + if (success) { + setDisconnectGroupInProgress(false); + navigate(`..${ApplicationState.AUTH}`, { replace: true }); + } + if (errors && errors.length > 0) { + setDisconnectGroupInProgress(false); + setErrorType(errors[0].errorType || AuthErrorTypes.UNEXPECTED_ERROR); + } + } catch (err) { + setErrorType(AuthErrorTypes.UNEXPECTED_ERROR); + } finally { + setDisconnectGroupInProgress(false); + } + }; + + useEffect(() => { + getGroups().then(setGroups); + }, []); + + if (errorType) { + return ; + } + + if (!groups?.length) { + return ( + + + + ); + } + + return ( +
+ + + + + {!isImportInProgress ? : null} +
+ ); +}; diff --git a/ui/src/components/DefaultErrorState.tsx b/ui/src/components/DefaultErrorState.tsx new file mode 100644 index 0000000..8ed447e --- /dev/null +++ b/ui/src/components/DefaultErrorState.tsx @@ -0,0 +1,21 @@ +import { FunctionComponent } from 'react'; + +import SectionMessage from '@atlaskit/section-message'; + +import { ImportantText, SectionWrapper } from './styles'; +import { ErrorTypes } from '../resolverTypes'; + +type Props = { + errorType?: ErrorTypes; +}; + +export const DefaultErrorState: FunctionComponent = ({ errorType }) => { + return ( + + + {errorType} +

Please, try to reload a page.

+
+
+ ); +}; diff --git a/ui/src/components/EmptyState/GenericEmptyState.tsx b/ui/src/components/EmptyState/GenericEmptyState.tsx new file mode 100644 index 0000000..f64be5a --- /dev/null +++ b/ui/src/components/EmptyState/GenericEmptyState.tsx @@ -0,0 +1,21 @@ +import { FunctionComponent } from 'react'; +import EmptyState from '@atlaskit/empty-state'; + +type Props = { + header: string; + description: string; + image: string; +}; + +export const GenericEmptyState: FunctionComponent = ({ header, description, image }: Props) => { + return ( + + ); +}; diff --git a/ui/src/components/EmptyState/buildEmptyView.tsx b/ui/src/components/EmptyState/buildEmptyView.tsx new file mode 100644 index 0000000..1960cfb --- /dev/null +++ b/ui/src/components/EmptyState/buildEmptyView.tsx @@ -0,0 +1,33 @@ +import { ERROR_IMAGES } from './errorImages'; +import { GenericEmptyState } from './GenericEmptyState'; + +type Props = { + isProjectsExist: boolean; + error?: string; +}; + +export const buildEmptyView = ({ isProjectsExist, error }: Props): JSX.Element | undefined => { + if (error) { + return ( + + ); + } + + return isProjectsExist ? ( + + ) : ( + + ); +}; diff --git a/ui/src/components/EmptyState/errorImages.ts b/ui/src/components/EmptyState/errorImages.ts new file mode 100644 index 0000000..df601de --- /dev/null +++ b/ui/src/components/EmptyState/errorImages.ts @@ -0,0 +1,11 @@ +import ErrorWindowImg from '../assets/ErrorWindow.svg'; +import NoResultsImg from '../assets/MoonFlag.svg'; +import NoSearchResultsImg from '../assets/SearchNoResults.svg'; +import GenericEmptyStateImg from '../assets/GenericEmptyState.svg'; + +export const ERROR_IMAGES = { + UNEXPECTED: ErrorWindowImg, + NO_RESULTS: NoResultsImg, + GENERIC_EMPTY_STATE: GenericEmptyStateImg, + NO_SEARCH_RESULTS: NoSearchResultsImg, +}; diff --git a/ui/src/components/ForgeLink.tsx b/ui/src/components/ForgeLink.tsx new file mode 100644 index 0000000..a5664a5 --- /dev/null +++ b/ui/src/components/ForgeLink.tsx @@ -0,0 +1,24 @@ +import { FunctionComponent, ReactNode } from 'react'; + +import { router } from '@forge/bridge'; + +type Props = { + to: string; + testId?: string; + openInNewTab?: boolean; + children: ReactNode; +}; + +export const ForgeLink: FunctionComponent = ({ to, testId, openInNewTab, children }) => ( + { + e.preventDefault(); + const nav = openInNewTab ? 'open' : 'navigate'; + router[nav](to); + }} + > + {children} + +); diff --git a/ui/src/components/FormatOptionLabel/index.tsx b/ui/src/components/FormatOptionLabel/index.tsx new file mode 100644 index 0000000..a9d2fdd --- /dev/null +++ b/ui/src/components/FormatOptionLabel/index.tsx @@ -0,0 +1,16 @@ +import { FunctionComponent } from 'react'; +import { CompassComponentTypeOption } from '../../services/types'; +import { ComponentTypeIcon } from '../component-type-icon'; +import { LabelContainer, OptionContainer } from './styles'; + +export const FormatOptionLabel: FunctionComponent = ({ + value, + label, +}: CompassComponentTypeOption) => { + return ( + + + {label} + + ); +}; diff --git a/ui/src/components/FormatOptionLabel/styles.ts b/ui/src/components/FormatOptionLabel/styles.ts new file mode 100644 index 0000000..c2b0fd7 --- /dev/null +++ b/ui/src/components/FormatOptionLabel/styles.ts @@ -0,0 +1,12 @@ +import { gridSize } from '@atlaskit/theme'; + +import styled from 'styled-components'; + +export const OptionContainer = styled.div` + display: flex; + align-items: center; +`; + +export const LabelContainer = styled.div` + padding-left: ${gridSize()}px; +`; diff --git a/ui/src/components/ImportProgressBar/index.tsx b/ui/src/components/ImportProgressBar/index.tsx new file mode 100644 index 0000000..5c3407c --- /dev/null +++ b/ui/src/components/ImportProgressBar/index.tsx @@ -0,0 +1,26 @@ +import ProgressBar from '@atlaskit/progress-bar'; +import SectionMessage from '@atlaskit/section-message'; + +import { useImportProgress } from '../../hooks/useImportProgress'; +import { ProgressDescriptionWrapper } from './styles'; + +export const ImportProgressBar = () => { + const { error, importedRepositories, totalSelectedRepos } = useImportProgress(); + + if (error) { + return ( + +

Unfortunately, we can't show a current progress status. Please, refresh a page or try again later.

+
+ ); + } + + return ( + <> + + + {importedRepositories} of {totalSelectedRepos} projects imported + + + ); +}; diff --git a/ui/src/components/ImportProgressBar/styles.ts b/ui/src/components/ImportProgressBar/styles.ts new file mode 100644 index 0000000..b1669c1 --- /dev/null +++ b/ui/src/components/ImportProgressBar/styles.ts @@ -0,0 +1,6 @@ +import { gridSize } from '@atlaskit/theme'; +import styled from 'styled-components'; + +export const ProgressDescriptionWrapper = styled.div` + margin-top: ${gridSize() * 3}px; +`; diff --git a/ui/src/components/ImportProgressResultPage/index.tsx b/ui/src/components/ImportProgressResultPage/index.tsx new file mode 100644 index 0000000..63cadc7 --- /dev/null +++ b/ui/src/components/ImportProgressResultPage/index.tsx @@ -0,0 +1,45 @@ +import Button from '@atlaskit/button'; +import { useNavigate } from 'react-router-dom'; +import styled from 'styled-components'; + +import { gridSize } from '@atlaskit/theme'; + +import { router } from '@forge/bridge'; +import { useImportContext } from '../../hooks/useImportContext'; +import { ApplicationState } from '../../routes'; +import { ImportProgressBar } from '../ImportProgressBar'; +import { ImportResult } from '../ImportResult'; +import { IMPORT_MODULE_KEY } from '../../constants'; + +const DoneButtonWrapper = styled.div` + margin-top: ${gridSize() * 2}px; +`; + +type Props = { + moduleKey: string; +}; + +export const ImportProgressResultPage = ({ moduleKey }: Props) => { + const { isImportInProgress } = useImportContext(); + + const navigate = useNavigate(); + + const handleNavigateWhenDone = () => { + if (moduleKey === IMPORT_MODULE_KEY) { + router.navigate('/compass/components'); + } else { + const path = `..${ApplicationState.CONNECTED}`; + navigate(path, { replace: true }); + } + }; + + return ( + <> + {isImportInProgress ? : } + + + + + + ); +}; diff --git a/ui/src/components/ImportResult/buildErrorState.tsx b/ui/src/components/ImportResult/buildErrorState.tsx new file mode 100644 index 0000000..b26177f --- /dev/null +++ b/ui/src/components/ImportResult/buildErrorState.tsx @@ -0,0 +1,21 @@ +import { ErrorTypes } from '../../resolverTypes'; +import { ERROR_IMAGES } from '../EmptyState/errorImages'; +import { GenericEmptyState } from '../EmptyState/GenericEmptyState'; + +type Props = { + error?: ErrorTypes | null; +}; + +export const buildErrorState = ({ error }: Props): JSX.Element | undefined => { + if (error) { + return ( + + ); + } + + return undefined; +}; diff --git a/ui/src/components/ImportResult/buildHead.tsx b/ui/src/components/ImportResult/buildHead.tsx new file mode 100644 index 0000000..fa326fb --- /dev/null +++ b/ui/src/components/ImportResult/buildHead.tsx @@ -0,0 +1,20 @@ +import { HeadType } from '@atlaskit/dynamic-table/dist/types/types'; + +export const buildHead = (): HeadType => { + return { + cells: [ + { + key: 'NAME', + content: 'Name', + width: 20, + isSortable: false, + }, + { + key: 'DESCRIPTION', + content: 'Description', + width: 80, + isSortable: false, + }, + ], + }; +}; diff --git a/ui/src/components/ImportResult/buildRows.tsx b/ui/src/components/ImportResult/buildRows.tsx new file mode 100644 index 0000000..16a3988 --- /dev/null +++ b/ui/src/components/ImportResult/buildRows.tsx @@ -0,0 +1,38 @@ +import { RowType } from '@atlaskit/dynamic-table/dist/types/types'; +import Tooltip from '@atlaskit/tooltip'; +import { ImportableProject } from '../../types'; + +import { ForgeLink } from '../ForgeLink'; +import { TruncateDescription } from '../styles'; +import { NameWrapper } from './styles'; + +type Props = { + failedProjects: ImportableProject[]; +}; + +export const buildRows = ({ failedProjects }: Props): RowType[] => + failedProjects.map(({ id, name, description, url }) => { + return { + key: `${id}`, + cells: [ + { + key: 'name', + content: ( + + + {name} + + + ), + }, + { + key: 'description', + content: ( + + {description || '-'} + + ), + }, + ], + }; + }); diff --git a/ui/src/components/ImportResult/index.tsx b/ui/src/components/ImportResult/index.tsx new file mode 100644 index 0000000..ccc9475 --- /dev/null +++ b/ui/src/components/ImportResult/index.tsx @@ -0,0 +1,92 @@ +import { FunctionComponent, useEffect, useMemo, useState } from 'react'; +import { DynamicTableStateless } from '@atlaskit/dynamic-table'; +import Spinner from '@atlaskit/spinner'; +import { R400, G500 } from '@atlaskit/theme/colors'; +import ErrorIcon from '@atlaskit/icon/glyph/error'; +import CheckIcon from '@atlaskit/icon/glyph/check'; + +import SectionMessage from '@atlaskit/section-message'; +import { buildHead } from './buildHead'; +import { buildRows } from './buildRows'; +import { ErrorInfo, ImportResultCounterWrapper, SuccessInfo } from './styles'; +import { buildErrorState } from './buildErrorState'; +import { useImportResult } from '../../hooks/useImportResult'; +import { CenterWrapper, SectionWrapper, TableWrapper } from '../styles'; +import { clearResult } from '../../services/invokes'; +import { ErrorTypes, ImportErrorTypes } from '../../resolverTypes'; + +export const ImportResult: FunctionComponent = () => { + const [error, setError] = useState(); + const { failedProjects, totalProjects, isLoading, error: importResultError } = useImportResult(); + + const rows = useMemo(() => buildRows({ failedProjects }), [failedProjects]); + const errorState = useMemo(() => buildErrorState({ error: importResultError }), [error]); + + const clearImportResults = async () => { + try { + const { errors } = await clearResult(); + + if (errors) { + setError(errors[0].errorType || ImportErrorTypes.UNEXPECTED_ERROR); + } + } catch { + setError(ImportErrorTypes.UNEXPECTED_ERROR); + } + }; + + useEffect(() => { + if (Boolean(failedProjects.length) || Boolean(totalProjects)) { + clearImportResults(); + } + }, [failedProjects, totalProjects]); + + if (isLoading) { + return ( + + + + ); + } + + if (error) { + return ( + + +

Something went wrong! Try to reload a page.

+
+
+ ); + } + + if (!failedProjects.length && !totalProjects) { + return null; + } + + const successInfoText = + totalProjects > 1 + ? `${totalProjects} components are successfully imported.` + : `${totalProjects} component is successfully imported.`; + + return ( + <> + {failedProjects.length === 0 ? ( + <> + + + {successInfoText} + + + ) : ( + <> + + + {failedProjects.length} components failed to import + + + + + + )} + + ); +}; diff --git a/ui/src/components/ImportResult/styles.ts b/ui/src/components/ImportResult/styles.ts new file mode 100644 index 0000000..086a9dc --- /dev/null +++ b/ui/src/components/ImportResult/styles.ts @@ -0,0 +1,26 @@ +import { gridSize } from '@atlaskit/theme'; +import { R400, G500 } from '@atlaskit/theme/colors'; +import styled from 'styled-components'; + +export const NameWrapper = styled.div` + margin: ${gridSize()}px 0; +`; + +export const ImportResultCounterWrapper = styled.div` + padding-top: ${gridSize() * 2}px; + display: flex; +`; + +export const ErrorInfo = styled.span` + color: ${R400}; + font-size: 14px; + line-height: 20px; + margin: ${gridSize() * 0.25}px 0 0 ${gridSize() - 3}px; +`; + +export const SuccessInfo = styled.span` + color: ${G500}; + font-size: 14px; + line-height: 20px; + margin: ${gridSize() * 0.25}px 0 0 ${gridSize() - 3}px; +`; diff --git a/ui/src/components/ProjectsImportTable/buildTableBody.tsx b/ui/src/components/ProjectsImportTable/buildTableBody.tsx new file mode 100644 index 0000000..674a351 --- /dev/null +++ b/ui/src/components/ProjectsImportTable/buildTableBody.tsx @@ -0,0 +1,127 @@ +import Select from '@atlaskit/select'; +import Tooltip from '@atlaskit/tooltip'; +import Checkbox from '@atlaskit/checkbox'; +import { RowType } from '@atlaskit/dynamic-table/dist/types/types'; + +import { CompassComponentTypeOption, ProjectImportSelection } from '../../services/types'; +import { ForgeLink } from '../ForgeLink'; +import { COMPONENT_TYPE_OPTIONS, tooltipsText } from '../utils'; +import { FormatOptionLabel } from '../FormatOptionLabel'; +import { TruncateDescription } from '../styles'; +import { TooltipGenerator } from '../TooltipGenerator'; +import { DropdownWrapper } from './styles'; + +type Props = { + projects: ProjectImportSelection[]; + onSelectItem: (id: number) => void; + onChangeComponentType: (id: number, type: CompassComponentTypeOption) => void; +}; + +const mapStatus = (isManaged: boolean, isCompassFilePrOpened: boolean, hasComponent: boolean) => { + if (isManaged) { + return ( + + {tooltipsText.managed.children} + + ); + } + if (isCompassFilePrOpened) { + return ( + + {tooltipsText.inProgress.children} + + ); + } + if (hasComponent) { + return ( + + {tooltipsText.created.children} + + ); + } + + return '-'; +}; + +export const buildTableBody = ({ projects, onSelectItem, onChangeComponentType }: Props): RowType[] => { + return projects.map((project) => { + const { + id, + name, + description, + url, + isSelected, + groupFullPath, + groupPath, + type, + isManaged, + isCompassFilePrOpened, + hasComponent, + } = project; + + return { + key: `${id}`, + cells: [ + { + key: 'checkbox', + content: ( +
+ onSelectItem(id)} + /> +
+ ), + }, + { + key: 'name', + content: ( + + {name} + + ), + }, + { + key: 'groupPath', + content: ( + + {groupPath || '-'} + + ), + }, + { + key: 'description', + content: ( + + {description || '-'} + + ), + }, + { + key: 'status', + content: mapStatus(isManaged, isCompassFilePrOpened, hasComponent), + }, + { + key: 'type', + content: ( + + handleChangeGroup(e)} + inputId='select-group' + className='single-select' + classNamePrefix='react-select' + placeholder='Select group' + options={groupSelectorOptions} + /> + + + + + + + {projects.length !== 0 ? ( + + setPage((prevPage) => prevPage + 1)} + isLoading={!!projects.length && isProjectsLoading} + > + Load More + + + ) : null} + + + + + handleNavigateToScreen()} + isLoading={isProjectsImporting} + > + Select + + + + ); +}; diff --git a/ui/src/components/SelectImportPage/screens/__mocks__/mocks.ts b/ui/src/components/SelectImportPage/screens/__mocks__/mocks.ts new file mode 100644 index 0000000..abf7d15 --- /dev/null +++ b/ui/src/components/SelectImportPage/screens/__mocks__/mocks.ts @@ -0,0 +1,36 @@ +import { CompassComponentType } from '@atlassian/forge-graphql/dist/src/graphql-types'; +import { Link } from '@atlassian/forge-graphql'; + +export const groupMock = [ + { + full_name: 'koko-momo', + name: 'momo', + id: 1223, + path: 'koko/momo', + }, +]; + +export const projectImportSelectionMock = [ + { + isSelected: false, + type: { + label: 'label', + value: CompassComponentType.Service, + }, + id: 2, + name: 'a', + description: 'description', + defaultBranch: 'default_branch', + labels: ['label', 'language:javascript'], + url: 'web_url', + componentId: '', + componentLinks: [] as unknown as Link[], + componentType: CompassComponentType.Application, + hasComponent: true, + isCompassFilePrOpened: false, + isManaged: true, + groupFullPath: 'koko', + groupName: 'koko', + groupPath: 'koko', + }, +]; diff --git a/ui/src/components/SelectImportPage/screens/__tests__/SelectProjectsScreen.test.tsx b/ui/src/components/SelectImportPage/screens/__tests__/SelectProjectsScreen.test.tsx new file mode 100644 index 0000000..7fb2260 --- /dev/null +++ b/ui/src/components/SelectImportPage/screens/__tests__/SelectProjectsScreen.test.tsx @@ -0,0 +1,93 @@ +import { render } from '@testing-library/react'; + +import { SelectProjectsScreen } from '../SelectProjectsScreen'; +import { projectImportSelectionMock, groupMock } from '../__mocks__/mocks'; + +jest.mock('@forge/bridge', () => ({ + invoke: jest.fn(), +})); + +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + useLocation: () => ({ + state: 123, + }), +})); + +describe('SelectProjectsScreen', () => { + it('should render projects import table with load more button', async () => { + const { findByTestId } = render( + , + ); + + expect(await findByTestId('load-more-button')).toBeDefined(); + expect(await findByTestId('projects-import-table--table')).toBeDefined(); + }); + + it('should not render load more button when projects not exist', () => { + const { queryByTestId } = render( + , + ); + + expect(queryByTestId('load-more-button')).toBeNull(); + }); + + it('should disabled load more button when all projects are rendered', () => { + const { getByTestId } = render( + , + ); + + expect(getByTestId('load-more-button')).toHaveProperty('disabled', true); + }); +}); diff --git a/ui/src/components/SelectImportPage/styles.ts b/ui/src/components/SelectImportPage/styles.ts new file mode 100644 index 0000000..d69ba46 --- /dev/null +++ b/ui/src/components/SelectImportPage/styles.ts @@ -0,0 +1,77 @@ +import styled from 'styled-components'; +import { gridSize } from '@atlaskit/theme'; +import { h700 } from '@atlaskit/theme/typography'; +import { N800 } from '@atlaskit/theme/colors'; +import { Description } from '../styles'; + +export const ButtonWrapper = styled.div` + display: flex; + justify-content: flex-end; + align-items: center; + margin: ${gridSize() * 4}px 0px; + > button:not(:first-child) { + margin-left: ${gridSize()}px; + } +`; + +export const Wrapper = styled.div` + display: flex; + flex-direction: column; + width: inherit; +`; + +export const Header = styled.span` + ${h700} + color: ${N800}; + margin: 0; +`; + +export const OverrideDescription = styled(Description)` + margin-top: ${gridSize() * 2}px; +`; + +export const StatusWrapper = styled.div` + display: flex; + align-items: center; +`; + +export const ErrorWrapper = styled.div` + margin-top: ${gridSize() * 2}px; +`; + +export const DescriptionWrapper = styled.div` + margin: 12px 0; +`; + +export const RootWrapper = styled.div` + display: flex; + flex-direction: column; + max-width: 70%; +`; + +export const OptionContainer = styled.div` + display: flex; + align-items: center; +`; + +export const LabelContainer = styled.div` + padding-left: 8px; +`; + +export const TableHeaderWrapper = styled.div` + margin-top: ${gridSize() * 5}px; + display: flex; + justify-content: space-between; +`; + +export const GroupSelectorWrapper = styled.div` + width: ${gridSize() * 30}px; +`; + +export const TableSearchWrapper = styled.div` + width: ${gridSize() * 30}px; +`; + +export const TableWrapper = styled.div` + margin-top: ${gridSize() * 4}px; +`; diff --git a/ui/src/components/SelectedProjectsTable/buildTableBody.tsx b/ui/src/components/SelectedProjectsTable/buildTableBody.tsx new file mode 100644 index 0000000..08fa5cf --- /dev/null +++ b/ui/src/components/SelectedProjectsTable/buildTableBody.tsx @@ -0,0 +1,53 @@ +import Select from '@atlaskit/select'; +import { RowType } from '@atlaskit/dynamic-table/types'; +import { ForgeLink } from '../ForgeLink'; +import { FormatOptionLabel } from '../FormatOptionLabel'; +import { COMPONENT_TYPE_OPTIONS } from '../utils'; +import { CompassComponentTypeOption, ProjectImportSelection } from '../../services/types'; + +export interface SelectedProjectsProps { + projectsReadyToImport: ProjectImportSelection[]; + onChangeComponentType: (id: number, type: CompassComponentTypeOption) => void; +} + +export const buildTableBody = ({ projectsReadyToImport, onChangeComponentType }: SelectedProjectsProps): RowType[] => { + return projectsReadyToImport.map((project) => { + return { + key: project.id.toString(), + role: 'row', + style: { + borderBottom: '1px solid #DFE1E6', + }, + cells: [ + { + key: 'name', + content: ( + + {project.name} + + ), + }, + { + key: 'description', + content: project.description, + }, + { + key: 'type', + content: ( +