diff --git a/.nycrc.json b/.nycrc.json index ff8e389b..e2cc4a36 100644 --- a/.nycrc.json +++ b/.nycrc.json @@ -10,5 +10,8 @@ "all": true, "include": [ "src/**/*.js" + ], + "exclude": [ + "src/metatags/*.js" ] } diff --git a/src/metatags/constants.js b/src/metatags/constants.js index ed50aee1..7f52df8a 100644 --- a/src/metatags/constants.js +++ b/src/metatags/constants.js @@ -19,6 +19,9 @@ export const H1 = 'h1'; export const HIGH = 'High'; export const MODERATE = 'Moderate'; +// Audit result constants +export const NON_UNIQUE = 'non-unique'; + // Tags lengths export const TAG_LENGTHS = { [TITLE]: { diff --git a/src/metatags/handler.js b/src/metatags/handler.js index d6661a14..9dddebd3 100644 --- a/src/metatags/handler.js +++ b/src/metatags/handler.js @@ -24,7 +24,6 @@ async function fetchAndProcessPageObject(s3Client, bucketName, key, prefix, log) return null; } const pageUrl = key.slice(prefix.length - 1).replace('scrape.json', ''); // Remove the prefix and .json suffix - log.info(`Scraped tags for ${pageUrl} : ${JSON.stringify(object.scrapeResult.tags)}`); return { [pageUrl]: { title: object.scrapeResult.tags.title, diff --git a/src/metatags/seo-checks.js b/src/metatags/seo-checks.js index e3a19ddd..7bd9c97c 100644 --- a/src/metatags/seo-checks.js +++ b/src/metatags/seo-checks.js @@ -16,7 +16,7 @@ import { H1, TAG_LENGTHS, HIGH, - MODERATE, + MODERATE, NON_UNIQUE, } from './constants.js'; class SeoChecks { @@ -146,9 +146,10 @@ class SeoChecks { const tags = { [TITLE]: pageTags[TITLE], [DESCRIPTION]: pageTags[DESCRIPTION], - [H1]: Array.isArray(pageTags[H1]) ? pageTags[H1][0] : '', + [H1]: Array.isArray(pageTags[H1]) ? pageTags[H1] : [], }; - Object.entries(tags).forEach(([tagName, tagContent = '']) => { + [TITLE, DESCRIPTION].forEach((tagName) => { + const tagContent = tags[tagName]; if (tagContent && this.allTags[tagName][tagContent.toLowerCase()]) { this.addDetectedTagEntry( url, @@ -159,7 +160,17 @@ class SeoChecks { + `It's recommended to have unique ${tagName} tags for each page.`, ); } - this.allTags[tagName][tagContent.toLowerCase()] = url; + this.allTags[tagName][tagContent?.toLowerCase()] = url; + }); + tags[H1].forEach((tag) => { + this.allTags[H1][tag] ??= { count: 0, urls: [] }; + this.allTags[H1][tag].urls.push(url); + this.allTags[H1][tag].count += 1; + + if (this.allTags[H1][tag].count > 1) { + this.detectedTags[H1][NON_UNIQUE] ??= {}; + this.detectedTags[H1][NON_UNIQUE][tag] = { ...this.allTags[H1][tag] }; + } }); } diff --git a/test/audits/metatags.test.js b/test/audits/metatags.test.js index 4a49c65b..04883fc8 100644 --- a/test/audits/metatags.test.js +++ b/test/audits/metatags.test.js @@ -348,7 +348,7 @@ describe('Meta Tags', () => { ], })); expect(addAuditStub.calledOnce).to.be.true; - expect(logStub.info.callCount).to.equal(6); + expect(logStub.info.callCount).to.equal(4); }); it('should process site tags and perform SEO checks for pages with invalid H1s', async () => { @@ -454,7 +454,7 @@ describe('Meta Tags', () => { ], })); expect(addAuditStub.calledOnce).to.be.true; - expect(logStub.info.callCount).to.equal(6); + expect(logStub.info.callCount).to.equal(4); }); it('should handle errors and return internalServerError', async () => {