-
Notifications
You must be signed in to change notification settings - Fork 21
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
90ede5a
commit d0c09c1
Showing
10 changed files
with
3,454 additions
and
242 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
.yarn |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
# scripts |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
import path from 'path'; | ||
import glob from 'glob'; | ||
import { promisify } from 'util'; | ||
import * as fs from 'fs'; | ||
import { getLocaleFilePaths, REPO_ROOT_PATH } from './utils.js'; | ||
|
||
const promisedGlob = promisify(glob); | ||
|
||
/** parses through an individual translation file gets the translation string and | ||
* appends them to the hashmap store. | ||
* @param {string} filePath - file path for the translation file | ||
* @param {Record<string, string>} unifiedJson - The hashmap store | ||
*/ | ||
function processFile(filePath, unifiedJson) { | ||
const qualifiedPath = path.resolve(REPO_ROOT_PATH, filePath); | ||
const contents = JSON.parse(fs.readFileSync(qualifiedPath, 'utf-8')); | ||
Object.entries(contents).forEach(([key, value]) => { | ||
if (!value) { | ||
unifiedJson[key] = key; | ||
} else { | ||
if (key !== value) { | ||
unifiedJson[value] = value; | ||
} | ||
unifiedJson[key] = value; | ||
} | ||
}); | ||
} | ||
|
||
export async function downloadStrings(projectCode = 'core', locale = 'en', outFile = undefined) { | ||
const unifiedJson = {}; | ||
const resourceFiles = await getLocaleFilePaths(projectCode, locale); | ||
for (const resource of resourceFiles) { | ||
processFile(resource, unifiedJson); | ||
} | ||
const mergedTranslationFilePath = | ||
outFile ?? path.resolve(REPO_ROOT_PATH, `fhir-web-${projectCode}-${locale}.json`); | ||
fs.writeFileSync(mergedTranslationFilePath, JSON.stringify(unifiedJson, undefined, 2)); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,159 @@ | ||
import path from 'path'; | ||
import vfs from 'vinyl-fs'; | ||
import { transform } from 'i18next-parser'; | ||
import { promisedGlob, REPO_ROOT_PATH } from './utils.js'; | ||
import { gulp as i18nextParser } from 'i18next-parser'; | ||
import * as gulp from 'gulp'; | ||
|
||
export const supportedProjectCodes = ['eusm', 'core', 'echis']; | ||
export const supportedLocaleCodes = ['en', 'fr', 'sw']; | ||
|
||
const defaultParserConfigs = { | ||
FALLBACK_LOCALE: 'en', | ||
contextSeparator: '_', | ||
createOldCatalogs: false, | ||
defaultNamespace: 'translation', | ||
defaultValue: '', | ||
indentation: 2, | ||
keepRemoved: false, | ||
keySeparator: false, | ||
lexers: { | ||
hbs: ['HandlebarsLexer'], | ||
handlebars: [ | ||
{ | ||
lexer: 'HandlebarsLexer', | ||
functions: ['t'], | ||
}, | ||
], | ||
htm: ['HTMLLexer'], | ||
html: ['HTMLLexer'], | ||
mjs: ['JavascriptLexer'], | ||
js: ['JavascriptLexer'], | ||
ts: ['JavascriptLexer'], | ||
jsx: ['JsxLexer'], | ||
tsx: ['JsxLexer'], | ||
default: ['JavascriptLexer'], | ||
}, | ||
lineEnding: 'auto', | ||
locales: [''], | ||
namespaceSeparator: '::', | ||
output: '', | ||
input: undefined, | ||
reactNamespace: false, | ||
sort: true, | ||
useKeysAsDefaultValue: false, | ||
verbose: false, | ||
}; | ||
|
||
/** get workTree folder paths relative to repo root directory */ | ||
const getPackagesFolderPaths = async () => { | ||
const workspacesGlob = '{app,packages/*}'; | ||
return promisedGlob(workspacesGlob, { cwd: REPO_ROOT_PATH }).catch(() => { | ||
return []; | ||
}); | ||
}; | ||
|
||
/** | ||
* Checks that cli provided package folders are actually valid | ||
* | ||
* @param packageFolders - cli provided folder names | ||
* @param allValidPackages - all valid semi paths | ||
*/ | ||
const filterValidPackageFolders = (packageFolders, allValidPackages) => { | ||
const validNamesLookup = {}; | ||
const validPaths = []; | ||
const invalidPaths = []; | ||
// semiPath: package path relative to the repo directory | ||
allValidPackages.forEach((semiPath) => { | ||
const parts = semiPath.split(path.sep); | ||
if (parts.length > 0) { | ||
validNamesLookup[parts[parts.length - 1]] = semiPath; | ||
} | ||
}); | ||
packageFolders.forEach((packageFolderName) => { | ||
const foundSemiPath = validNamesLookup[packageFolderName]; | ||
if (foundSemiPath) { | ||
validPaths.push(foundSemiPath); | ||
} else { | ||
invalidPaths.push(packageFolderName); | ||
} | ||
}); | ||
if (invalidPaths.length > 0) { | ||
console.error(`These folders were not found: ${invalidPaths.join(', ')}`); | ||
} | ||
return validPaths; | ||
}; | ||
|
||
const extractionRunner = (argvConfigs, packageSemiPaths) => { | ||
console.log({ argvConfigs }); | ||
const { packages, locales, keyAsDefault, preserve, project, verbose, outputNamespace } = | ||
argvConfigs; | ||
let count = 0; | ||
packageSemiPaths.forEach((packageSemiPath) => { | ||
const packageName = path.basename(packageSemiPath); | ||
const namespace = outputNamespace ? outputNamespace : packageName; | ||
const inputFilesGlob = `${path.resolve( | ||
REPO_ROOT_PATH, | ||
packageSemiPath | ||
)}/!(node_modules|dist|build)/**/!(tests)/*.@(tsx|ts|js|jsx)`; | ||
const outputPath = path.resolve( | ||
REPO_ROOT_PATH, | ||
`packages/i18n/locales/${project}/$NAMESPACE/$LOCALE.json` | ||
); | ||
const parserConfigs = { | ||
...defaultParserConfigs, | ||
defaultNamespace: namespace, | ||
locales, | ||
verbose: !!verbose, | ||
useKeysAsDefaultValue: !!keyAsDefault, | ||
createOldCatalogs: !!preserve, | ||
input: inputFilesGlob, | ||
output: outputPath, | ||
}; | ||
|
||
vfs | ||
.src(inputFilesGlob) | ||
.pipe( | ||
new transform(parserConfigs) | ||
.on('reading', function (file) { | ||
console.log(` [read] ${file.path}`); | ||
count++; | ||
}) | ||
.on('data', function (file) { | ||
console.log(` [write] ${file.path}`); | ||
}) | ||
.on('error', function (message, region) { | ||
message += `: ${region?.trim()}`; | ||
console.log(` [error] ${message}`); | ||
}) | ||
.on('warning', function (message) { | ||
console.log(` [warning] ${message}`); | ||
}) | ||
.on('finish', function () { | ||
console.log(); | ||
console.log(` Stats: ${count} files were parsed`); | ||
}) | ||
) | ||
.pipe(vfs.dest(process.cwd())); | ||
}); | ||
}; | ||
|
||
export async function runExtractions(argv) { | ||
const { packages, outputNamespace } = argv; | ||
const AllPackageFolders = await getPackagesFolderPaths(); | ||
|
||
if (packages && packages.length === 0) { | ||
console.log('Package Folders not specified, Running extraction on all packages'); | ||
} | ||
|
||
let packageFoldersToExtract = AllPackageFolders; | ||
if (packages.length > 0) { | ||
packageFoldersToExtract = filterValidPackageFolders(packages, AllPackageFolders); | ||
} | ||
|
||
if (outputNamespace && packages.length > 1) { | ||
throw Error('Can only provide a namespace override when extracting from a single package only'); | ||
} | ||
|
||
extractionRunner(argv, packageFoldersToExtract); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,111 @@ | ||
#!/usr/bin/env node | ||
import yargs from 'yargs'; | ||
import { hideBin } from 'yargs/helpers'; | ||
import { downloadStrings } from './downloadMerged.js'; | ||
import { uploadTranslations } from './uploadMerged.js'; | ||
import { runExtractions, supportedProjectCodes, supportedLocaleCodes } from './extract.js'; | ||
|
||
// Define global options and configure commands | ||
yargs(hideBin(process.argv)) | ||
.options({ | ||
project: { | ||
alias: 'p', | ||
describe: 'Code for project that own/consume translations', | ||
choices: supportedProjectCodes, | ||
default: 'core', | ||
}, | ||
}) | ||
.command( | ||
'extract [packages...]', | ||
'Gets all translatable strings and writes them to the i18n package', | ||
(yargs) => { | ||
yargs | ||
.positional('packages', { | ||
describe: 'A list of folders from which to parse for translatable strings', | ||
type: 'string', | ||
demandOption: true, | ||
}) | ||
.options({ | ||
locales: { | ||
alias: 'l', | ||
describe: 'Extracted strings will be generated for this locale(s)', | ||
choices: supportedLocaleCodes, | ||
type: 'array', | ||
default: ['en'], | ||
}, | ||
'key-as-default': { | ||
default: false, | ||
describe: 'Duplicate key to also be value', | ||
type: 'boolean', | ||
alias: 'k', | ||
}, | ||
preserve: { | ||
default: false, | ||
describe: 'Retain removed translations in separate json files', | ||
type: 'boolean', | ||
alias: 'pr', | ||
}, | ||
'output-namespace': { | ||
default: false, | ||
describe: | ||
'Manually override the namespace/package under which the translations will be written to', | ||
type: 'string', | ||
alias: 'on', | ||
}, | ||
}); | ||
}, | ||
async (argv) => { | ||
console.log({ argv }); | ||
await runExtractions(argv); | ||
} | ||
) | ||
.command( | ||
'download', | ||
'Merges all generated translatable strings into a single file', | ||
(yargs) => { | ||
yargs.options({ | ||
out: { | ||
describe: 'Where to write the merged strings into', | ||
type: 'string', | ||
}, | ||
locale: { | ||
choices: supportedLocaleCodes, | ||
describe: 'Locale for which to download translation files for', | ||
type: 'string', | ||
default: ['en'], | ||
}, | ||
}); | ||
}, | ||
async (argv) => { | ||
const { project, out, locale } = argv; | ||
await downloadStrings(project, locale, out); | ||
} | ||
) | ||
.command( | ||
'upload', | ||
'Takes a merged translation file and expands the strings into the i18n package', | ||
(yargs) => { | ||
yargs.options({ | ||
tfile: { | ||
describe: 'File with merged translations', | ||
type: 'string', | ||
demandOption: true, | ||
}, | ||
locales: { | ||
alias: 'l', | ||
describe: 'Extracted strings will be generated for this locale(s)', | ||
choices: supportedLocaleCodes, | ||
type: 'array', | ||
default: ['en'], | ||
}, | ||
}); | ||
}, | ||
async (argv) => { | ||
const { project, tfile, locale } = argv; | ||
await uploadTranslations(tfile, project, locale); | ||
} | ||
) | ||
.demandCommand(1, 'You need to specify at least one command') | ||
.help() | ||
.strict() | ||
.parse(); |
Oops, something went wrong.