Skip to content

Commit

Permalink
am
Browse files Browse the repository at this point in the history
  • Loading branch information
hyperupcall committed Oct 5, 2024
1 parent 165e5d7 commit 9efdf4c
Show file tree
Hide file tree
Showing 2 changed files with 66 additions and 84 deletions.
57 changes: 28 additions & 29 deletions .github/workflows/brokenurl.yml
Original file line number Diff line number Diff line change
@@ -1,29 +1,28 @@
name: 'Maintenance: Check Broken URLs'
# on:
# schedule:
# - cron: '2 22 * * *' # Runs at 10:02 every day
# push:
# pull_request:
on: ['push', 'pull_request']

jobs:
brokenurl:
if: github.repository == 'SchemaStore/schemastore'
runs-on: 'ubuntu-latest'
steps:
- uses: 'actions/checkout@v4'
- uses: 'actions/setup-node@v4'
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: './package-lock.json'
- run: 'npm clean-install'
# - run: 'node ./cli.js maintenance'
- name: 'Update comment'
uses: 'peter-evans/create-or-update-comment@v4'
permissions:
issues: 'write'
with:
comment-id: 2394972665
body: |
**Edit:** Some additional info
# name: 'Maintenance: Check Broken URLs'
# # on:
# # schedule:
# # - cron: '2 22 * * *' # Runs at 10:02 every day
# # push:
# # pull_request:
# on: ['push', 'pull_request']
# permissions: 'write-all'
# jobs:
# brokenurl:
# if: github.repository == 'SchemaStore/schemastore'
# runs-on: 'ubuntu-latest'
# permissions: 'write-all'
# # issues: 'write'
# steps:
# - uses: 'actions/checkout@v4'
# - uses: 'actions/setup-node@v4'
# with:
# node-version: '18'
# cache: 'npm'
# cache-dependency-path: './package-lock.json'
# - run: 'npm clean-install'
# - run: |
# gh issue create --title "I found a bug" --body "Nothing works"
# echo "this is body 2" | gh issue edit 4128 --body-file -
# env:
# GH_TOKEN: '${{ github.token }}'
# # - run: 'node ./cli.js maintenance'
93 changes: 38 additions & 55 deletions cli.js
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,7 @@ if (argv.SchemaName) {
* @property {Buffer} buffer
* @property {string} text
* @property {Record<PropertyKey, unknown>} json
* @property {string} name
* @property {string} path
*
* @typedef {Object} SchemaFile
Expand Down Expand Up @@ -223,7 +224,7 @@ async function forEachFile(/** @type {ForEachTestFile} */ obj) {
}

const schemaPath = path.join(SchemaDir, schemaName)
const schemaFile = await toSchemaFile(schemaPath)
const schemaFile = await toFile(schemaPath)
spinner.text = `Running "${obj.actionName}" on file "${schemaFile.path}"`
const data = await obj?.onSchemaFile?.(schemaFile, { spinner })

Expand All @@ -234,7 +235,7 @@ async function forEachFile(/** @type {ForEachTestFile} */ obj) {
if (isIgnoredFile(testfile)) continue

const testfilePath = path.join(TestPositiveDir, schemaId, testfile)
let file = await toTestFile(testfilePath)
let file = await toFile(testfilePath)
await obj.onPositiveTestFile(schemaFile, file, data, { spinner })
}
}
Expand All @@ -247,7 +248,7 @@ async function forEachFile(/** @type {ForEachTestFile} */ obj) {
if (isIgnoredFile(testfile)) continue

const testfilePath = path.join(TestNegativeDir, schemaId, testfile)
let file = await toTestFile(testfilePath)
let file = await toFile(testfilePath)
await obj.onNegativeTestFile(schemaFile, file, data, { spinner })
}
}
Expand All @@ -256,24 +257,13 @@ async function forEachFile(/** @type {ForEachTestFile} */ obj) {
await obj?.afterSchemaFile?.(schemaFile, { spinner })
}

async function toTestFile(/** @type {string} */ testfilePath) {
const buffer = await fs.readFile(testfilePath)
const text = buffer.toString()
return {
buffer,
text,
json: await readDataFile({ filepath: testfilePath, text }),
path: testfilePath,
}
}

if (obj.actionName) {
spinner.stop()
console.info(`✔️ Completed "${obj.actionName}"`)
}
}

async function toSchemaFile(/** @type {string} */ schemaPath) {
async function toFile(/** @type {string} */ schemaPath) {
const buffer = await fs.readFile(schemaPath)
const text = buffer.toString()
return {
Expand Down Expand Up @@ -525,12 +515,20 @@ async function taskLint() {
await forEachFile({
actionName: 'lint',
async onSchemaFile(schema) {
await assertSchemaHasCorrectMetadata(schema)
// This checks to be sure $id is a schemastore.org URL.
// Commenting out because it is overly aggressive for now.
// await assertSchemaHasCorrectMetadata(schema)
await assertTopLevelRefIsStandalone(schema)
await assertSchemaNoSmartQuotes(schema)
// await assertSchemaNoSmartQuotes(schema)

const errors = schemasafe.lint(schema.json, {
mode: 'strong',
// mode: 'strong',
requireSchema: true,
requireValidation: true,
requireStringValidation: false,
complexityChecks: true,
forbidNoopValues: true,

extraFormats: false,
schemas: {},
})
Expand Down Expand Up @@ -589,7 +587,7 @@ async function taskCheck() {
CatalogFile,
path.join(SchemaDir, 'schema-catalog.json'),
)
await assertFilePassesJsonLint(CatalogFile)
await assertFilePassesJsonLint(await toFile(CatalogFile))
assertCatalogJsonHasNoDuplicateNames()
assertCatalogJsonHasNoBadFields()
assertCatalogJsonHasNoFileMatchConflict()
Expand All @@ -601,7 +599,8 @@ async function taskCheck() {
SchemaValidationFile,
'./src/schema-validation.schema.json',
)
await assertFilePassesJsonLint(SchemaValidationFile, {
toFile
await assertFilePassesJsonLint(await toFile(SchemaValidationFile), {
ignoreComments: true,
})
await assertSchemaValidationJsonReferencesNoNonexistentFiles()
Expand All @@ -614,7 +613,7 @@ async function taskCheck() {
async onSchemaFile(schema) {
assertFileHasNoBom(schema)
assertFileHasCorrectExtensions(schema.path, ['.json'])
await assertFileHasNoDuplicatedPropertyKeys(schema)
await assertFilePassesJsonLint(schema)
await assertSchemaHasValidIdField(schema)
await assertSchemaHasValidSchemaField(schema)
},
Expand All @@ -626,7 +625,9 @@ async function taskCheck() {
'.yaml',
'.toml',
])
await assertFileHasNoDuplicatedPropertyKeys(file)
if (!file.path.endsWith('.json')) {
await assertFilePassesJsonLint(file)
}
},
async onNegativeTestFile(file) {
assertFileHasNoBom(file)
Expand All @@ -636,7 +637,9 @@ async function taskCheck() {
'.yaml',
'.toml',
])
await assertFileHasNoDuplicatedPropertyKeys(file)
if (!file.path.endsWith('.json')) {
await assertFilePassesJsonLint(file)
}
},
})

Expand Down Expand Up @@ -712,7 +715,7 @@ async function taskCheckStrict() {
draftVersion: 'draft-07',
fullStrictMode: false,
})
const metaSchemaFile = await toSchemaFile(
const metaSchemaFile = await toFile(
'./src/schemas/json/metaschema-draft-07-unofficial-strict.json',
)
let validateFn
Expand Down Expand Up @@ -766,13 +769,13 @@ async function taskMaintenance() {
forEachCatalogUrl(async (url) => {
if (url.startsWith(UrlSchemaStore)) return

await fetch(url, { method: 'HEAD' })
await fetch(url, { method: 'HEAD' })
.then((res) => {
// eslint-disable-line promise/always-return

if (!res.ok) {
console.info(`NOT OK (${res.status}/${res.statusText}): ${url}`)
}

return undefined
})
.catch((err) => {
console.info(`NOT OK (${err.code}): ${url}`)
Expand Down Expand Up @@ -1160,22 +1163,21 @@ function assertFileHasNoBom(/** @type {DataFile} */ file) {
}

async function assertFilePassesJsonLint(
/** @type {string} */ filepath,
/** @type {DataFile} */ file,
/** @type {Record<string, unknown>} */ options,
) {
try {
jsonlint.parse(await fs.readFile(filepath, 'utf-8'), {
jsonlint.parse(file.text, {
ignoreBOM: false,
ignoreComments: false,
ignoreTrailingCommas: false,
allowSingleQuotedStrings: false,
allowDuplicateObjectKeys: false,
...options,
})
console.info(`✔️ ${path.basename(filepath)} validates with jsonlint`)
} catch (err) {
printErrorAndExit(err, [
`Failed strict jsonlint parse of file "${path.basename(filepath)}"`,
`Failed strict jsonlint parse of file "${path.basename(file.path)}"`,
])
}
}
Expand Down Expand Up @@ -1208,25 +1210,6 @@ async function assertFileValidatesAgainstSchema(
}
}

async function assertFileHasNoDuplicatedPropertyKeys(
/** @type {DataFile} */ file,
) {
const fileExtension = file.path.split('.').pop()
if (fileExtension !== 'json') return

try {
jsonlint.parse(file.text, {
ignoreBOM: false,
ignoreComments: false,
ignoreTrailingCommas: false,
allowSingleQuotedStrings: false,
allowDuplicateObjectKeys: false,
})
} catch (err) {
printErrorAndExit(err, [`Failed to parse file with jsonlint: ${file.path}`])
}
}

async function assertSchemaHasValidSchemaField(
/** @type {SchemaFile} */ schema,
) {
Expand Down Expand Up @@ -1256,6 +1239,11 @@ async function assertSchemaHasValidSchemaField(

async function assertSchemaHasValidIdField(/** @type {SchemaFile} */ schema) {
let schemaId = ''
/**
* Old JSON Schema specification versions use the "id" key for unique
* identifiers, rather than "$id". See for details:
* https://json-schema.org/understanding-json-schema/basics.html#declaring-a-unique-identifier
*/
const schemasWithDollarlessId = [
'http://json-schema.org/draft-03/schema#',
'http://json-schema.org/draft-04/schema#',
Expand Down Expand Up @@ -1287,11 +1275,6 @@ async function assertSchemaHasValidIdField(/** @type {SchemaFile} */ schema) {
async function assertSchemaHasCorrectMetadata(
/** @type {SchemaFile} */ schema,
) {
/**
* Old JSON Schema specification versions use the "id" key for unique
* identifiers, rather than "$id". See for details:
* https://json-schema.org/understanding-json-schema/basics.html#declaring-a-unique-identifier
*/
const schemasWithDollarlessId = [
'http://json-schema.org/draft-03/schema#',
'http://json-schema.org/draft-04/schema#',
Expand Down

0 comments on commit 9efdf4c

Please sign in to comment.