Skip to content

Commit

Permalink
add prebuild
Browse files Browse the repository at this point in the history
  • Loading branch information
MohammadPCh committed Jun 12, 2022
1 parent 79fe10d commit 2126c4d
Show file tree
Hide file tree
Showing 4 changed files with 32 additions and 0 deletions.
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"version": "2.0.8",
"private": true,
"scripts": {
"prebuild": "./scripts/generate-robots-txt.js",
"build": "next build",
"dev": "next dev",
"lint": "eslint . --ext .ts --ext .tsx",
Expand Down
File renamed without changes.
2 changes: 2 additions & 0 deletions public/noncrawlable.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
User-agent: *
Disallow: /
29 changes: 29 additions & 0 deletions scripts/generate-robots-txt.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
const fs = require('fs');
const path = require('path');

const publicPath = path.join(__dirname, 'public');

function generateRobotsTxt() {
console.log('VERCEL_ENV: ', process.env.VERCEL_ENV);
const isCrawlable = process.env.VERCEL_ENV === 'production';
// Create a non-crawlable robots.txt in non-production environments
const sourceFile = path.join(
publicPath,
isCrawlable ? 'crawlable.txt' : 'noncrawlable.txt',
);
const destFile = path.join(publicPath, 'robots.txt');

try {
// Create robots.txt file
fs.copyFileSync(sourceFile, destFile);
console.log(
`Generated a ${
isCrawlable ? 'crawlable' : 'non-crawlable'
} public/robots.txt`,
);
} catch (error) {
console.log(`Cannot Generate a public/robots.txt`);
}
}

module.exports = generateRobotsTxt;

0 comments on commit 2126c4d

Please sign in to comment.