first load
This commit is contained in:
256
bin/extract-strings.mjs
Normal file
256
bin/extract-strings.mjs
Normal file
@@ -0,0 +1,256 @@
|
||||
/* eslint-env node */
|
||||
/**
|
||||
* This script is used for the internal Zendesk translation system, and it creates or updates the source YAML file for translations,
|
||||
* extracting the strings from the source code.
|
||||
*
|
||||
* It searches for i18next calls (`{t("my-key", "My value")}`) and it adds the strings to the specified YAML file
|
||||
* (if not already present) with empty title and screenshot.
|
||||
* ```yaml
|
||||
* - translation:
|
||||
* key: "my-key"
|
||||
* title: ""
|
||||
* screenshot: ""
|
||||
* value: "My value"
|
||||
* ```
|
||||
*
|
||||
* If a string present in the source YAML file is not found in the source code, it will be marked as obsolete if the
|
||||
* `--mark-obsolete` flag is passed.
|
||||
*
|
||||
* If the value in the YAML file differs from the value in the source code, a warning will be printed in the console,
|
||||
* since the script cannot know which one is correct and cannot write back in the source code files. This can happen for
|
||||
* example after a "reverse string sweep", and can be eventually fixed manually.
|
||||
*
|
||||
* The script uses the i18next-parser library for extracting the strings and it adds a custom transformer for creating
|
||||
* the file in the required YAML format.
|
||||
*
|
||||
* For usage instructions, run `node extract-strings.mjs --help`
|
||||
*/
|
||||
import vfs from "vinyl-fs";
|
||||
import Vinyl from "vinyl";
|
||||
import { transform as I18NextTransform } from "i18next-parser";
|
||||
import { Transform } from "node:stream";
|
||||
import { readFileSync } from "node:fs";
|
||||
import { load, dump } from "js-yaml";
|
||||
import { resolve } from "node:path";
|
||||
import { glob } from "glob";
|
||||
import { parseArgs } from "node:util";
|
||||
|
||||
const { values: args } = parseArgs({
|
||||
options: {
|
||||
"mark-obsolete": {
|
||||
type: "boolean",
|
||||
},
|
||||
module: {
|
||||
type: "string",
|
||||
},
|
||||
help: {
|
||||
type: "boolean",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (args.help) {
|
||||
const helpMessage = `
|
||||
Usage: extract-strings.mjs [options]
|
||||
|
||||
Options:
|
||||
--mark-obsolete Mark removed strings as obsolete in the source YAML file
|
||||
--module Extract strings only for the specified module. The module name should match the folder name in the src/modules folder
|
||||
If not specified, the script will extract strings for all modules
|
||||
--help Display this help message
|
||||
|
||||
Examples:
|
||||
node extract-strings.mjs
|
||||
node extract-strings.mjs --mark-obsolete
|
||||
node extract-strings.mjs --module=ticket-fields
|
||||
`;
|
||||
|
||||
console.log(helpMessage);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const OUTPUT_YML_FILE_NAME = "en-us.yml";
|
||||
|
||||
const OUTPUT_BANNER = `#
|
||||
# This file is used for the internal Zendesk translation system, and it is generated from the extract-strings.mjs script.
|
||||
# It contains the English strings to be translated, which are used for generating the JSON files containing the translation strings
|
||||
# for each language.
|
||||
#
|
||||
# If you are building your own theme, you can remove this file and just load the translation JSON files, or provide
|
||||
# your translations with a different method.
|
||||
#
|
||||
`;
|
||||
|
||||
/** @type {import("i18next-parser").UserConfig} */
|
||||
const config = {
|
||||
// Our translation system requires that we add all 6 forms (zero, one, two, few, many, other) keys for plurals
|
||||
// i18next-parser extracts plural keys based on the target locale, so we are passing a
|
||||
// locale that need exactly the 6 forms, even if we are extracting English strings
|
||||
locales: ["ar"],
|
||||
keySeparator: false,
|
||||
namespaceSeparator: false,
|
||||
pluralSeparator: ".",
|
||||
// This path is used only as a Virtual FS path by i18next-parser, and it doesn't get written on the FS
|
||||
output: "locales/en.json",
|
||||
};
|
||||
|
||||
class SourceYmlTransform extends Transform {
|
||||
#parsedInitialContent;
|
||||
#outputDir;
|
||||
|
||||
#counters = {
|
||||
added: 0,
|
||||
obsolete: 0,
|
||||
mismatch: 0,
|
||||
};
|
||||
|
||||
constructor(outputDir) {
|
||||
super({ objectMode: true });
|
||||
|
||||
this.#outputDir = outputDir;
|
||||
this.#parsedInitialContent = this.#getSourceYmlContent();
|
||||
}
|
||||
|
||||
_transform(file, encoding, done) {
|
||||
try {
|
||||
const strings = JSON.parse(file.contents.toString(encoding));
|
||||
|
||||
const outputContent = {
|
||||
...this.#parsedInitialContent,
|
||||
parts: this.#parsedInitialContent.parts || [],
|
||||
};
|
||||
|
||||
// Find obsolete keys
|
||||
for (const { translation } of outputContent.parts) {
|
||||
if (!(translation.key in strings) && !translation.obsolete) {
|
||||
this.#counters.obsolete++;
|
||||
|
||||
if (args["mark-obsolete"]) {
|
||||
translation.obsolete = this.#getObsoleteDate();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add new keys to the source YAML or log mismatched value
|
||||
for (let [key, value] of Object.entries(strings)) {
|
||||
value = this.#fixPluralValue(key, value, strings);
|
||||
|
||||
const existingPart = outputContent.parts.find(
|
||||
(part) => part.translation.key === key
|
||||
);
|
||||
|
||||
if (existingPart === undefined) {
|
||||
outputContent.parts.push({
|
||||
translation: {
|
||||
key,
|
||||
title: "",
|
||||
screenshot: "",
|
||||
value,
|
||||
},
|
||||
});
|
||||
this.#counters.added++;
|
||||
} else if (value !== existingPart.translation.value) {
|
||||
console.warn(
|
||||
`\nFound a mismatch value for the key "${key}".\n\tSource code value: ${value}\n\tTranslation file value: ${existingPart.translation.value}`
|
||||
);
|
||||
this.#counters.mismatch++;
|
||||
}
|
||||
}
|
||||
|
||||
const virtualFile = new Vinyl({
|
||||
path: OUTPUT_YML_FILE_NAME,
|
||||
contents: Buffer.from(
|
||||
OUTPUT_BANNER +
|
||||
"\n" +
|
||||
dump(outputContent, { quotingType: `"`, forceQuotes: true })
|
||||
),
|
||||
});
|
||||
this.push(virtualFile);
|
||||
this.#printInfo();
|
||||
done();
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
#getSourceYmlContent() {
|
||||
const outputPath = resolve(this.#outputDir, OUTPUT_YML_FILE_NAME);
|
||||
return load(readFileSync(outputPath, "utf-8"));
|
||||
}
|
||||
|
||||
#getObsoleteDate() {
|
||||
const today = new Date();
|
||||
const obsolete = new Date(today.setMonth(today.getMonth() + 3));
|
||||
return obsolete.toISOString().split("T")[0];
|
||||
}
|
||||
|
||||
#printInfo() {
|
||||
const message = `Package ${this.#parsedInitialContent.packages[0]}
|
||||
Added strings: ${this.#counters.added}
|
||||
${this.#getObsoleteInfoMessage()}
|
||||
Strings with mismatched value: ${this.#counters.mismatch}
|
||||
`;
|
||||
|
||||
console.log(message);
|
||||
}
|
||||
|
||||
#getObsoleteInfoMessage() {
|
||||
if (args["mark-obsolete"]) {
|
||||
return `Removed strings (marked as obsolete): ${this.#counters.obsolete}`;
|
||||
}
|
||||
|
||||
let result = `Obsolete strings: ${this.#counters.obsolete}`;
|
||||
if (this.#counters.obsolete > 0) {
|
||||
result += " - Use --mark-obsolete to mark them as obsolete";
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// if the key ends with .zero, .one, .two, .few, .many, .other and the value is empty
|
||||
// find the same key with the `.other` suffix in strings and return the value
|
||||
#fixPluralValue(key, value, strings) {
|
||||
if (key.endsWith(".zero") && value === "") {
|
||||
return strings[key.replace(".zero", ".other")] || "";
|
||||
}
|
||||
|
||||
if (key.endsWith(".one") && value === "") {
|
||||
return strings[key.replace(".one", ".other")] || "";
|
||||
}
|
||||
|
||||
if (key.endsWith(".two") && value === "") {
|
||||
return strings[key.replace(".two", ".other")] || "";
|
||||
}
|
||||
|
||||
if (key.endsWith(".few") && value === "") {
|
||||
return strings[key.replace(".few", ".other")] || "";
|
||||
}
|
||||
|
||||
if (key.endsWith(".many") && value === "") {
|
||||
return strings[key.replace(".many", ".other")] || "";
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
const sourceFilesGlob = args.module
|
||||
? `src/modules/${args.module}/translations/en-us.yml`
|
||||
: "src/modules/**/translations/en-us.yml";
|
||||
|
||||
const sourceFiles = await glob(sourceFilesGlob);
|
||||
for (const sourceFile of sourceFiles) {
|
||||
const moduleName = sourceFile.split("/")[2];
|
||||
const inputGlob = `src/modules/${moduleName}/**/*.{ts,tsx}`;
|
||||
const outputDir = resolve(
|
||||
process.cwd(),
|
||||
`src/modules/${moduleName}/translations`
|
||||
);
|
||||
|
||||
vfs
|
||||
.src([inputGlob])
|
||||
.pipe(new I18NextTransform(config))
|
||||
.pipe(new SourceYmlTransform(outputDir))
|
||||
.pipe(vfs.dest(outputDir));
|
||||
}
|
||||
36
bin/lighthouse/account.js
Normal file
36
bin/lighthouse/account.js
Normal file
@@ -0,0 +1,36 @@
|
||||
/**
|
||||
* Reads account information from env variables or .a11yrc.json file
|
||||
*/
|
||||
const fs = require("fs");
|
||||
|
||||
let a11yAccount = {};
|
||||
const a11yrcFilePath = ".a11yrc.json";
|
||||
|
||||
if (fs.existsSync(a11yrcFilePath)) {
|
||||
a11yAccount = JSON.parse(fs.readFileSync(a11yrcFilePath));
|
||||
}
|
||||
|
||||
function isValid(account) {
|
||||
return account.subdomain && account.email && account.password;
|
||||
}
|
||||
|
||||
function getAccount() {
|
||||
// Reads account from the env or .a11yrc.json file if present
|
||||
let account = {
|
||||
subdomain: process.env.subdomain || a11yAccount.subdomain,
|
||||
email: process.env.end_user_email || a11yAccount.username,
|
||||
password: process.env.end_user_password || a11yAccount.password,
|
||||
urls: process.env?.urls?.trim()?.split(/\s+/) || a11yAccount.urls
|
||||
};
|
||||
|
||||
if (!isValid(account)) {
|
||||
console.error(
|
||||
"No account specified. Please create a .a11yrc.json file or set subdomain, end_user_email and end_user_password as environment variables"
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
return account;
|
||||
}
|
||||
|
||||
module.exports = getAccount;
|
||||
78
bin/lighthouse/config.js
Normal file
78
bin/lighthouse/config.js
Normal file
@@ -0,0 +1,78 @@
|
||||
/**
|
||||
* Sets lighthouse configuration
|
||||
*/
|
||||
module.exports = {
|
||||
lighthouse: {
|
||||
extends: "lighthouse:default",
|
||||
settings: {
|
||||
maxWaitForLoad: 10000,
|
||||
onlyCategories: ["accessibility"],
|
||||
disableFullPageScreenshot: true,
|
||||
},
|
||||
},
|
||||
// custom properties
|
||||
custom: {
|
||||
// turn audit errors into warnings
|
||||
ignore: {
|
||||
tabindex: [
|
||||
{
|
||||
path: "*",
|
||||
selector: "body.community-enabled > a.skip-navigation",
|
||||
},
|
||||
],
|
||||
"link-in-text-block": [
|
||||
{
|
||||
path: "/hc/:locale/community/topics/360000644279",
|
||||
selector:
|
||||
"li > section.striped-list-item > span.striped-list-info > a#title-360006766859",
|
||||
},
|
||||
{
|
||||
path: "/hc/:locale/community/topics/360000644279",
|
||||
selector:
|
||||
"li > section.striped-list-item > span.striped-list-info > a#title-360006766799",
|
||||
},
|
||||
{
|
||||
path: "/hc/:locale/community/posts",
|
||||
selector:
|
||||
"li > section.striped-list-item > span.striped-list-info > a#title-360006766799",
|
||||
},
|
||||
{
|
||||
path: "/hc/:locale/community/posts",
|
||||
selector:
|
||||
"li > section.striped-list-item > span.striped-list-info > a#title-360006766859",
|
||||
},
|
||||
],
|
||||
"aria-allowed-attr": [
|
||||
{
|
||||
path: "/hc/:locale/community/posts/new",
|
||||
selector:
|
||||
"div#main-content > form.new_community_post > div.form-field > a.nesty-input",
|
||||
},
|
||||
],
|
||||
"td-has-header": [
|
||||
{
|
||||
path: "/hc/:locale/subscriptions",
|
||||
selector: "main > div.container > div#main-content > table.table",
|
||||
},
|
||||
],
|
||||
"label-content-name-mismatch": [
|
||||
{
|
||||
path: "/hc/:locale/articles/:id",
|
||||
selector:
|
||||
"footer > div.article-votes > div.article-votes-controls > button.button",
|
||||
},
|
||||
],
|
||||
"target-size": [
|
||||
{
|
||||
path: "/hc/:locale/search",
|
||||
selector:
|
||||
"header > div.search-result-title-container > h2.search-result-title > a",
|
||||
},
|
||||
{
|
||||
path: "/hc/:locale/search",
|
||||
selector: "nav > ol.breadcrumbs > li > a",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
7
bin/lighthouse/constants.js
Normal file
7
bin/lighthouse/constants.js
Normal file
@@ -0,0 +1,7 @@
|
||||
module.exports = Object.freeze({
|
||||
ERROR: 0,
|
||||
SUCCESS: 1,
|
||||
WARNING: 2,
|
||||
SKIPPED: 3,
|
||||
UNKNOWN: 4
|
||||
});
|
||||
119
bin/lighthouse/index.js
Normal file
119
bin/lighthouse/index.js
Normal file
@@ -0,0 +1,119 @@
|
||||
require("dotenv").config();
|
||||
const lighthouse = require("lighthouse/core/index.cjs");
|
||||
const puppeteer = require("puppeteer");
|
||||
const config = require("./config");
|
||||
const getAccount = require("./account");
|
||||
const buildUrlsFromAPI = require("./urls");
|
||||
const login = require("./login");
|
||||
const processResults = require("./processor");
|
||||
const { ERROR, WARNING } = require("./constants");
|
||||
|
||||
const outputAudit = (
|
||||
{ title, description, url, id, selector, snippet, explanation },
|
||||
emoji
|
||||
) => {
|
||||
console.log("");
|
||||
console.log(`${emoji} ${id}: ${title}`);
|
||||
console.log(description, "\n");
|
||||
console.log({ url, selector, snippet }, "\n");
|
||||
console.log(explanation, "\n");
|
||||
}
|
||||
|
||||
(async () => {
|
||||
// TODO: If dev, check if zcli is running
|
||||
const isDev = process.argv[2] === "-d";
|
||||
const results = {
|
||||
stats: {},
|
||||
audits: [],
|
||||
};
|
||||
|
||||
// Reading account
|
||||
console.log("Reading account...", "\n");
|
||||
const account = getAccount();
|
||||
|
||||
// Build list of urls to audit
|
||||
if (!account.urls || account.urls.length === 0) {
|
||||
console.log(
|
||||
"No urls were found in .a11yrc.json or as env variable. Building urls from the API...",
|
||||
"\n"
|
||||
);
|
||||
account.urls = await buildUrlsFromAPI(account);
|
||||
}
|
||||
|
||||
// Set login URL
|
||||
account.loginUrl = isDev
|
||||
? `https://${account.subdomain}.zendesk.com/hc/admin/local_preview/start`
|
||||
: `https://${account.subdomain}.zendesk.com/hc/en-us/signin`;
|
||||
|
||||
// Output account
|
||||
console.log("Account:");
|
||||
console.log(
|
||||
{
|
||||
subdomain: account.subdomain,
|
||||
email: account.email,
|
||||
urls: account.urls,
|
||||
loginUrl: account.loginUrl,
|
||||
},
|
||||
"\n"
|
||||
);
|
||||
|
||||
// Use Puppeteer to launch headless Chrome
|
||||
console.log("Starting headless Chrome...");
|
||||
const browser = await puppeteer.launch({ headless: true });
|
||||
|
||||
// Login
|
||||
console.log(`Logging in using ${account.loginUrl}...`, "\n");
|
||||
await login(browser, account);
|
||||
|
||||
// Run lighthouse on all pages
|
||||
for (let url of account.urls) {
|
||||
console.log(`Running lighthouse in ${url}`);
|
||||
|
||||
const page = await browser.newPage();
|
||||
|
||||
const { lhr } = await lighthouse(
|
||||
url,
|
||||
{ port: new URL(browser.wsEndpoint()).port, logLevel: "silent" },
|
||||
config.lighthouse,
|
||||
page
|
||||
);
|
||||
|
||||
// Output run warnings
|
||||
lhr.runWarnings.forEach((message) => console.warn(message));
|
||||
|
||||
// Analyze / process results
|
||||
const { stats, audits } = processResults(lhr);
|
||||
results.stats[url] = stats;
|
||||
results.audits = [...results.audits, ...audits];
|
||||
|
||||
console.log("");
|
||||
}
|
||||
|
||||
// Close browser
|
||||
await browser.close();
|
||||
|
||||
// Output table with stats for each url
|
||||
console.table(results.stats);
|
||||
|
||||
// Output warnings
|
||||
const warnings = results.audits.filter((audit) => audit.result === WARNING);
|
||||
warnings.forEach((audit) => outputAudit(audit, "⚠️"));
|
||||
|
||||
// Output errors
|
||||
const errors = results.audits.filter((audit) => audit.result === ERROR);
|
||||
errors.forEach((audit) => outputAudit(audit, "❌"));
|
||||
|
||||
// Output totals
|
||||
console.log(
|
||||
"\n",
|
||||
`Total of ${errors.length} errors and ${warnings.length} warnings`,
|
||||
"\n"
|
||||
);
|
||||
|
||||
// Exit with error if there is at least one audit with an error
|
||||
if (errors.length > 0) {
|
||||
process.exit(1);
|
||||
} else {
|
||||
process.exit(0);
|
||||
}
|
||||
})();
|
||||
16
bin/lighthouse/login.js
Normal file
16
bin/lighthouse/login.js
Normal file
@@ -0,0 +1,16 @@
|
||||
async function login(browser, account) {
|
||||
const { email, password, loginUrl } = account;
|
||||
|
||||
const page = await browser.newPage();
|
||||
await page.goto(loginUrl);
|
||||
await page.waitForSelector("input#user_email", { visible: true });
|
||||
await page.type("input#user_email", email);
|
||||
await page.type("input#user_password", password);
|
||||
await Promise.all([
|
||||
page.click("#sign-in-submit-button"),
|
||||
page.waitForNavigation(),
|
||||
]);
|
||||
await page.close();
|
||||
}
|
||||
|
||||
module.exports = login;
|
||||
84
bin/lighthouse/processor.js
Normal file
84
bin/lighthouse/processor.js
Normal file
@@ -0,0 +1,84 @@
|
||||
/**
|
||||
* Filters and maps lighthouse results for a simplified output
|
||||
* If an error should be ignored, it will be converted into a warning
|
||||
*/
|
||||
const UrlPattern = require("url-pattern");
|
||||
const config = require("./config");
|
||||
const { ERROR, WARNING, SKIPPED, SUCCESS, UNKNOWN } = require("./constants");
|
||||
|
||||
function shouldIgnoreError(auditId, url, selector) {
|
||||
const path = new URL(url).pathname;
|
||||
|
||||
return config.custom.ignore[auditId]?.some((ignore) => {
|
||||
const pattern = new UrlPattern(ignore.path);
|
||||
return Boolean(pattern.match(path)) && selector === ignore.selector;
|
||||
});
|
||||
}
|
||||
|
||||
function processResults(lhr) {
|
||||
const url = lhr.mainDocumentUrl;
|
||||
const pageScore = lhr.categories.accessibility.score;
|
||||
|
||||
const audits = Object.values(lhr.audits)
|
||||
// filter and flatten data
|
||||
.map(({ id, title, description, score, details }) => {
|
||||
const newItem = {
|
||||
id,
|
||||
url,
|
||||
title,
|
||||
description,
|
||||
score,
|
||||
};
|
||||
|
||||
return details === undefined || details.items.length === 0
|
||||
? newItem
|
||||
: details.items.map((item) => ({
|
||||
...newItem,
|
||||
selector: item.node.selector,
|
||||
snippet: item.node.snippet,
|
||||
explanation: item.node.explanation,
|
||||
}));
|
||||
})
|
||||
.flat()
|
||||
// map lighthouse score to a result
|
||||
.map(({ score, ...audit }) => {
|
||||
const newItem = { ...audit };
|
||||
|
||||
switch (score) {
|
||||
case 1:
|
||||
newItem.result = SUCCESS;
|
||||
break;
|
||||
case 0:
|
||||
newItem.result = shouldIgnoreError(
|
||||
audit.id,
|
||||
audit.url,
|
||||
audit.selector
|
||||
)
|
||||
? WARNING
|
||||
: ERROR;
|
||||
break;
|
||||
case null:
|
||||
newItem.result = SKIPPED;
|
||||
break;
|
||||
default:
|
||||
console.error(`Error: unexpected score for audit ${audit.id}`);
|
||||
newItem.result = UNKNOWN;
|
||||
}
|
||||
|
||||
return newItem;
|
||||
});
|
||||
|
||||
return {
|
||||
audits,
|
||||
stats: {
|
||||
success: audits.filter((audit) => audit.result === SUCCESS).length,
|
||||
error: audits.filter((audit) => audit.result === ERROR).length,
|
||||
skipped: audits.filter((audit) => audit.result === SKIPPED).length,
|
||||
warning: audits.filter((audit) => audit.result === WARNING).length,
|
||||
unknown: audits.filter((audit) => audit.result === UNKNOWN).length,
|
||||
score: pageScore
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = processResults;
|
||||
167
bin/lighthouse/urls.js
Normal file
167
bin/lighthouse/urls.js
Normal file
@@ -0,0 +1,167 @@
|
||||
/**
|
||||
* Builds urls to audit by fetching the required ids
|
||||
* Account's API should have basic authentication enabled
|
||||
*/
|
||||
const fetch = require("node-fetch");
|
||||
|
||||
const fetchCategoryId = async (subdomain) => {
|
||||
const response = await fetch(
|
||||
`https://${subdomain}.zendesk.com/api/v2/help_center/categories`
|
||||
);
|
||||
const data = await response.json();
|
||||
const categoryId = data?.categories[0]?.id;
|
||||
|
||||
if (!categoryId) {
|
||||
throw new Error(
|
||||
"No category found. Please make sure the account has at least one category."
|
||||
);
|
||||
}
|
||||
|
||||
return categoryId;
|
||||
};
|
||||
|
||||
const fetchSectionId = async (subdomain) => {
|
||||
const response = await fetch(
|
||||
`https://${subdomain}.zendesk.com/api/v2/help_center/sections`
|
||||
);
|
||||
const data = await response.json();
|
||||
const sectionId = data?.sections[0]?.id;
|
||||
|
||||
if (!sectionId) {
|
||||
throw new Error(
|
||||
"No section found. Please make sure the account has at least one section."
|
||||
);
|
||||
}
|
||||
|
||||
return sectionId;
|
||||
};
|
||||
|
||||
const fetchArticleId = async (subdomain) => {
|
||||
const response = await fetch(
|
||||
`https://${subdomain}.zendesk.com/api/v2/help_center/articles`
|
||||
);
|
||||
const data = await response.json();
|
||||
const articleId = data?.articles[0]?.id;
|
||||
|
||||
if (!articleId) {
|
||||
throw new Error(
|
||||
"No article found. Please make sure the account has at least one article."
|
||||
);
|
||||
}
|
||||
|
||||
return articleId;
|
||||
};
|
||||
|
||||
const fetchTopicId = async (subdomain) => {
|
||||
const response = await fetch(
|
||||
`https://${subdomain}.zendesk.com/api/v2/community/topics`
|
||||
);
|
||||
const data = await response.json();
|
||||
const topicId = data?.topics[0]?.id;
|
||||
|
||||
if (!topicId) {
|
||||
throw new Error(
|
||||
"No community topic found. Please make sure the account has at least one community topic."
|
||||
);
|
||||
}
|
||||
|
||||
return topicId;
|
||||
};
|
||||
|
||||
const fetchPostId = async (subdomain) => {
|
||||
const response = await fetch(
|
||||
`https://${subdomain}.zendesk.com/api/v2/community/posts`
|
||||
);
|
||||
const data = await response.json();
|
||||
const postId = data?.posts[0]?.id;
|
||||
|
||||
if (!postId) {
|
||||
throw new Error(
|
||||
"No community post found. Please make sure the account has at least one community post."
|
||||
);
|
||||
}
|
||||
|
||||
return postId;
|
||||
};
|
||||
|
||||
const fetchUserId = async (subdomain, email, password) => {
|
||||
const response = await fetch(
|
||||
`https://${subdomain}.zendesk.com/api/v2/users/me`,
|
||||
{
|
||||
headers: {
|
||||
authorization: `Basic ${Buffer.from(
|
||||
`${email}:${password}`,
|
||||
"binary"
|
||||
).toString("base64")}`,
|
||||
},
|
||||
}
|
||||
);
|
||||
const data = await response.json();
|
||||
const userId = data?.user?.id;
|
||||
|
||||
if (!userId) {
|
||||
throw new Error(
|
||||
"Fetching the user id failed. Please make sure this account's API has password access enabled. [Learn more](https://developer.zendesk.com/api-reference/introduction/security-and-auth/#basic-authentication)"
|
||||
);
|
||||
}
|
||||
|
||||
return userId;
|
||||
};
|
||||
|
||||
const fetchRequestId = async (subdomain, email, password) => {
|
||||
const response = await fetch(
|
||||
`https://${subdomain}.zendesk.com/api/v2/requests`,
|
||||
{
|
||||
headers: {
|
||||
authorization: `Basic ${Buffer.from(
|
||||
`${email}:${password}`,
|
||||
"binary"
|
||||
).toString("base64")}`,
|
||||
},
|
||||
}
|
||||
);
|
||||
const data = await response.json();
|
||||
const requestId = data?.requests?.[0]?.id;
|
||||
|
||||
if (!requestId) {
|
||||
throw new Error(
|
||||
"No request id found. Please make sure the user has at least one request and this account's API has password access enabled. [Learn more](https://developer.zendesk.com/api-reference/introduction/security-and-auth/#basic-authentication)"
|
||||
);
|
||||
}
|
||||
|
||||
return requestId;
|
||||
};
|
||||
|
||||
const buildUrlsFromAPI = async ({ subdomain, email, password }) => {
|
||||
const [categoryId, sectionId, articleId, topicId, postId, userId, requestId] =
|
||||
await Promise.all([
|
||||
fetchCategoryId(subdomain),
|
||||
fetchSectionId(subdomain),
|
||||
fetchArticleId(subdomain),
|
||||
fetchTopicId(subdomain),
|
||||
fetchPostId(subdomain),
|
||||
fetchUserId(subdomain, email, password),
|
||||
fetchRequestId(subdomain, email, password),
|
||||
]);
|
||||
|
||||
return [
|
||||
`https://${subdomain}.zendesk.com/hc/en-us`,
|
||||
`https://${subdomain}.zendesk.com/hc/en-us/categories/${categoryId}`,
|
||||
`https://${subdomain}.zendesk.com/hc/en-us/sections/${sectionId}`,
|
||||
`https://${subdomain}.zendesk.com/hc/en-us/articles/${articleId}`,
|
||||
`https://${subdomain}.zendesk.com/hc/en-us/requests/new`,
|
||||
`https://${subdomain}.zendesk.com/hc/en-us/search?utf8=%E2%9C%93&query=Help+Center`,
|
||||
`https://${subdomain}.zendesk.com/hc/en-us/community/topics`,
|
||||
`https://${subdomain}.zendesk.com/hc/en-us/community/topics/${topicId}`,
|
||||
`https://${subdomain}.zendesk.com/hc/en-us/community/posts`,
|
||||
`https://${subdomain}.zendesk.com/hc/en-us/community/posts/${postId}`,
|
||||
`https://${subdomain}.zendesk.com/hc/en-us/profiles/${userId}`,
|
||||
`https://${subdomain}.zendesk.com/hc/contributions/posts?locale=en-us`,
|
||||
`https://${subdomain}.zendesk.com/hc/en-us/subscriptions`,
|
||||
`https://${subdomain}.zendesk.com/hc/en-us/requests`,
|
||||
`https://${subdomain}.zendesk.com/hc/en-us/requests/${requestId}`,
|
||||
`https://${subdomain}.zendesk.com/hc/en-us/community/posts/new`,
|
||||
];
|
||||
};
|
||||
|
||||
module.exports = buildUrlsFromAPI;
|
||||
25
bin/theme-upload.js
Normal file
25
bin/theme-upload.js
Normal file
@@ -0,0 +1,25 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
/* eslint-env node */
|
||||
const brandId = process.env.BRAND_ID;
|
||||
const { execSync } = require("child_process");
|
||||
|
||||
function zcli(command) {
|
||||
try {
|
||||
const data = execSync(`yarn zcli ${command} --json`);
|
||||
return JSON.parse(data.toString());
|
||||
} catch (e) {
|
||||
console.error(e.message);
|
||||
console.error(e.stdout.toString());
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
const { themeId } = zcli(`themes:import --brandId=${brandId}`);
|
||||
|
||||
zcli(`themes:publish --themeId=${themeId}`);
|
||||
|
||||
const { themes } = zcli(`themes:list --brandId=${brandId}`);
|
||||
|
||||
for (const { live, id } of themes) {
|
||||
if (!live) zcli(`themes:delete --themeId=${id}`);
|
||||
}
|
||||
10
bin/update-manifest-version.sh
Normal file
10
bin/update-manifest-version.sh
Normal file
@@ -0,0 +1,10 @@
|
||||
#!/bin/sh
|
||||
|
||||
# EXIT ON ERROR
|
||||
set -e
|
||||
|
||||
# UPDATE MANIFEST VERSION
|
||||
NEW_VERSION=$1
|
||||
mv manifest.json manifest.temp.json
|
||||
jq -r '.version |= "'${NEW_VERSION}'"' manifest.temp.json > manifest.json
|
||||
rm manifest.temp.json
|
||||
81
bin/update-modules-translations.mjs
Normal file
81
bin/update-modules-translations.mjs
Normal file
@@ -0,0 +1,81 @@
|
||||
/* eslint-env node */
|
||||
/**
|
||||
* This script is used for downloading the latest Zendesk official translation files for the modules in the `src/module` folder.
|
||||
*
|
||||
*/
|
||||
import { writeFile, readFile, mkdir } from "node:fs/promises";
|
||||
import { resolve } from "node:path";
|
||||
import { glob } from "glob";
|
||||
import { load } from "js-yaml";
|
||||
import { parseArgs } from "node:util";
|
||||
|
||||
const BASE_URL = `https://static.zdassets.com/translations`;
|
||||
|
||||
const { values: args } = parseArgs({
|
||||
options: {
|
||||
module: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
async function writeLocaleFile(name, filePath, outputDir) {
|
||||
const response = await fetch(`${BASE_URL}${filePath}`);
|
||||
const { translations } = await response.json();
|
||||
const outputPath = resolve(outputDir, `${name.toLocaleLowerCase()}.json`);
|
||||
await writeFile(
|
||||
outputPath,
|
||||
JSON.stringify(translations, null, 2) + "\n",
|
||||
"utf-8"
|
||||
);
|
||||
}
|
||||
|
||||
async function fetchModuleTranslations(moduleName, packageName) {
|
||||
try {
|
||||
const manifestResponse = await fetch(
|
||||
`${BASE_URL}/${packageName}/manifest.json`
|
||||
);
|
||||
console.log(`Downloading translations for ${moduleName}...`);
|
||||
|
||||
const outputDir = resolve(
|
||||
process.cwd(),
|
||||
`src/modules/${moduleName}/translations/locales`
|
||||
);
|
||||
|
||||
await mkdir(outputDir, { recursive: true });
|
||||
|
||||
const { json } = await manifestResponse.json();
|
||||
|
||||
await Promise.all(
|
||||
json.map(({ name, path }) => writeLocaleFile(name, path, outputDir))
|
||||
);
|
||||
|
||||
console.log(`Downloaded ${json.length} files.`);
|
||||
} catch (e) {
|
||||
console.log(`Error downloading translations for ${moduleName}: ${e}`);
|
||||
}
|
||||
}
|
||||
|
||||
// search for `src/modules/**/translations/en-us.yml` files, read it contents and return a map of module names and package names
|
||||
async function getModules() {
|
||||
const result = {};
|
||||
const sourceFilesGlob = args.module
|
||||
? `src/modules/${args.module}/translations/en-us.yml`
|
||||
: "src/modules/**/translations/en-us.yml";
|
||||
const files = await glob(sourceFilesGlob);
|
||||
|
||||
for (const file of files) {
|
||||
const content = await readFile(file);
|
||||
const parsedContent = load(content);
|
||||
const moduleName = file.split("/")[2];
|
||||
result[moduleName] = parsedContent.packages[0];
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
const modules = await getModules();
|
||||
|
||||
for (const [moduleName, packageName] of Object.entries(modules)) {
|
||||
await fetchModuleTranslations(moduleName, packageName);
|
||||
}
|
||||
77
bin/update-translations.js
Normal file
77
bin/update-translations.js
Normal file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fetch = require("node-fetch");
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
|
||||
const ZSAP_BASE = `https://static.zdassets.com/translations`;
|
||||
const SUPPORTED_LOCALES = [
|
||||
"ar",
|
||||
"bg",
|
||||
"cs",
|
||||
"da",
|
||||
"de",
|
||||
"el",
|
||||
"en-US",
|
||||
"en-ca",
|
||||
"en-gb",
|
||||
"es",
|
||||
"es-419",
|
||||
"es-es",
|
||||
"fi",
|
||||
"fr",
|
||||
"fr-ca",
|
||||
"he",
|
||||
"hi",
|
||||
"hu",
|
||||
"id",
|
||||
"it",
|
||||
"ja",
|
||||
"ko",
|
||||
"nl",
|
||||
"no",
|
||||
"pl",
|
||||
"pt",
|
||||
"pt-br",
|
||||
"ro",
|
||||
"ru",
|
||||
"sk",
|
||||
"sv",
|
||||
"th",
|
||||
"tr",
|
||||
"uk",
|
||||
"vi",
|
||||
"zh-cn",
|
||||
"zh-tw",
|
||||
];
|
||||
|
||||
(async function () {
|
||||
const resp = await fetch(
|
||||
`${ZSAP_BASE}/help_center_copenhagen_theme/manifest.json`
|
||||
);
|
||||
const manifest_json = await resp.json();
|
||||
|
||||
for (const targetLocale of SUPPORTED_LOCALES) {
|
||||
const locale = manifest_json.json.find(
|
||||
(entry) => entry.name === targetLocale
|
||||
);
|
||||
|
||||
console.log(`Downloading ${locale.name}...`);
|
||||
|
||||
const resp = await fetch(`${ZSAP_BASE}${locale.path}`);
|
||||
const translations = await resp.json();
|
||||
|
||||
const formattedTranslations = Object.entries(
|
||||
translations.translations
|
||||
).reduce((accumulator, [key, value]) => {
|
||||
accumulator[key.replace(/.+\./, "")] = value;
|
||||
return accumulator;
|
||||
}, {});
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join("translations", `${locale.name}.json`),
|
||||
JSON.stringify(formattedTranslations, null, 2) + "\n",
|
||||
"utf8"
|
||||
);
|
||||
}
|
||||
})();
|
||||
Reference in New Issue
Block a user