mirror of
https://github.com/monkeytypegame/monkeytype.git
synced 2025-10-09 15:15:58 +08:00
added json schema validation. part of #2516
This commit is contained in:
parent
211f191340
commit
c85b644b88
3 changed files with 400 additions and 1 deletions
|
@ -11,10 +11,17 @@ const { webpack } = require("webpack");
|
|||
const webpackDevConfig = require("./webpack.config.js");
|
||||
const webpackProdConfig = require("./webpack-production.config.js");
|
||||
const ts = require("gulp-typescript");
|
||||
const fs = require("fs");
|
||||
const V = require("jsonschema").Validator;
|
||||
const JSONValidator = new V();
|
||||
|
||||
const eslintConfig = "../.eslintrc.json";
|
||||
const tsProject = ts.createProject("tsconfig.json");
|
||||
|
||||
function escapeRegExp(str) {
|
||||
return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
|
||||
task("clean", function () {
|
||||
return src(["./public/"], { allowEmpty: true }).pipe(vinylPaths(del));
|
||||
});
|
||||
|
@ -33,6 +40,379 @@ task("lint-json", function () {
|
|||
.pipe(eslint.failAfterError());
|
||||
});
|
||||
|
||||
task("validate-json-schema", function () {
|
||||
return new Promise((resolve, reject) => {
|
||||
//fonts
|
||||
const fontsData = JSON.parse(
|
||||
fs.readFileSync("./static/fonts/_list.json", {
|
||||
encoding: "utf8",
|
||||
flag: "r",
|
||||
})
|
||||
);
|
||||
const fontsSchema = {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
name: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
required: ["name"],
|
||||
},
|
||||
};
|
||||
const fontsValidator = JSONValidator.validate(fontsData, fontsSchema);
|
||||
if (fontsValidator.valid) {
|
||||
console.log("Fonts JSON schema is \u001b[32mvalid\u001b[0m");
|
||||
} else {
|
||||
console.log("Fonts JSON schema is \u001b[31minvalid\u001b[0m");
|
||||
return reject(new Error(fontsValidator.errors));
|
||||
}
|
||||
|
||||
//funbox
|
||||
const funboxData = JSON.parse(
|
||||
fs.readFileSync("./static/funbox/_list.json", {
|
||||
encoding: "utf8",
|
||||
flag: "r",
|
||||
})
|
||||
);
|
||||
const funboxSchema = {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
name: { type: "string" },
|
||||
type: { type: "string" },
|
||||
info: { type: "string" },
|
||||
affectsWordGeneration: { type: "boolean" },
|
||||
},
|
||||
required: ["name", "type", "info"],
|
||||
},
|
||||
};
|
||||
const funboxValidator = JSONValidator.validate(funboxData, funboxSchema);
|
||||
if (funboxValidator.valid) {
|
||||
console.log("Funbox JSON schema is \u001b[32mvalid\u001b[0m");
|
||||
} else {
|
||||
console.log("Funbox JSON schema is \u001b[31minvalid\u001b[0m");
|
||||
return reject(new Error(funboxValidator.errors));
|
||||
}
|
||||
|
||||
//themes
|
||||
const themesData = JSON.parse(
|
||||
fs.readFileSync("./static/themes/_list.json", {
|
||||
encoding: "utf8",
|
||||
flag: "r",
|
||||
})
|
||||
);
|
||||
const themesSchema = {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
name: { type: "string" },
|
||||
bgColor: { type: "string" },
|
||||
mainColor: { type: "string" },
|
||||
},
|
||||
required: ["name", "bgColor", "mainColor"],
|
||||
},
|
||||
};
|
||||
const themesValidator = JSONValidator.validate(themesData, themesSchema);
|
||||
if (themesValidator.valid) {
|
||||
console.log("Themes JSON schema is \u001b[32mvalid\u001b[0m");
|
||||
} else {
|
||||
console.log("Themes JSON schema is \u001b[31minvalid\u001b[0m");
|
||||
return reject(new Error(themesValidator.errors));
|
||||
}
|
||||
|
||||
//languages
|
||||
const languagesData = JSON.parse(
|
||||
fs.readFileSync("./static/languages/_list.json", {
|
||||
encoding: "utf8",
|
||||
flag: "r",
|
||||
})
|
||||
);
|
||||
const languagesSchema = {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "string",
|
||||
},
|
||||
};
|
||||
const languagesValidator = JSONValidator.validate(
|
||||
languagesData,
|
||||
languagesSchema
|
||||
);
|
||||
if (languagesValidator.valid) {
|
||||
console.log("Languages list JSON schema is \u001b[32mvalid\u001b[0m");
|
||||
} else {
|
||||
console.log("Languages list JSON schema is \u001b[31minvalid\u001b[0m");
|
||||
return reject(new Error(languagesValidator.errors));
|
||||
}
|
||||
|
||||
//languages group
|
||||
const languagesGroupData = JSON.parse(
|
||||
fs.readFileSync("./static/languages/_groups.json", {
|
||||
encoding: "utf8",
|
||||
flag: "r",
|
||||
})
|
||||
);
|
||||
const languagesGroupSchema = {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
name: { type: "string" },
|
||||
languages: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
},
|
||||
required: ["name", "languages"],
|
||||
},
|
||||
};
|
||||
const languagesGroupValidator = JSONValidator.validate(
|
||||
languagesGroupData,
|
||||
languagesGroupSchema
|
||||
);
|
||||
if (languagesGroupValidator.valid) {
|
||||
console.log("Languages groups JSON schema is \u001b[32mvalid\u001b[0m");
|
||||
} else {
|
||||
console.log("Languages groups JSON schema is \u001b[31minvalid\u001b[0m");
|
||||
return reject(new Error(languagesGroupValidator.errors));
|
||||
}
|
||||
|
||||
//language files
|
||||
const languageFileSchema = {
|
||||
type: "object",
|
||||
properties: {
|
||||
name: { type: "string" },
|
||||
leftToRight: { type: "boolean" },
|
||||
noLazyMode: { type: "boolean" },
|
||||
bcp47: { type: "string" },
|
||||
words: {
|
||||
type: "array",
|
||||
items: { type: "string", minLength: 1 },
|
||||
},
|
||||
accents: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "array",
|
||||
items: { type: "string", minLength: 1 },
|
||||
minItems: 2,
|
||||
maxItems: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
required: ["name", "leftToRight", "words"],
|
||||
};
|
||||
let languageFilesAllGood = true;
|
||||
let languageFilesErrors;
|
||||
languagesData.forEach((language) => {
|
||||
const languageFileData = JSON.parse(
|
||||
fs.readFileSync(`./static/languages/${language}.json`, {
|
||||
encoding: "utf8",
|
||||
flag: "r",
|
||||
})
|
||||
);
|
||||
languageFileSchema.properties.name.pattern =
|
||||
"^" + escapeRegExp(language) + "$";
|
||||
const languageFileValidator = JSONValidator.validate(
|
||||
languageFileData,
|
||||
languageFileSchema
|
||||
);
|
||||
if (!languageFileValidator.valid) {
|
||||
languageFilesAllGood = false;
|
||||
languageFilesErrors = languageFileValidator.errors;
|
||||
}
|
||||
});
|
||||
if (languageFilesAllGood) {
|
||||
console.log(
|
||||
`Language word list JSON schemas are \u001b[32mvalid\u001b[0m`
|
||||
);
|
||||
} else {
|
||||
console.log(
|
||||
`Language word list JSON schemas are \u001b[31minvalid\u001b[0m`
|
||||
);
|
||||
return reject(new Error(languageFilesErrors));
|
||||
}
|
||||
|
||||
//quotes
|
||||
const quoteSchema = {
|
||||
type: "object",
|
||||
properties: {
|
||||
language: { type: "string" },
|
||||
groups: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "number",
|
||||
},
|
||||
minItems: 2,
|
||||
maxItems: 2,
|
||||
},
|
||||
},
|
||||
quotes: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
text: { type: "string" },
|
||||
source: { type: "string" },
|
||||
length: { type: "number" },
|
||||
id: { type: "number" },
|
||||
},
|
||||
required: ["text", "source", "length", "id"],
|
||||
},
|
||||
},
|
||||
},
|
||||
required: ["language", "groups", "quotes"],
|
||||
};
|
||||
const quoteIdsSchema = {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "number",
|
||||
},
|
||||
uniqueItems: true,
|
||||
};
|
||||
let quoteFilesAllGood = true;
|
||||
let quoteFilesErrors;
|
||||
let quoteIdsAllGood = true;
|
||||
let quoteIdsErrors;
|
||||
const quotesFiles = fs.readdirSync("./static/quotes/");
|
||||
quotesFiles.forEach((quotefilename) => {
|
||||
quotefilename = quotefilename.split(".")[0];
|
||||
const quoteData = JSON.parse(
|
||||
fs.readFileSync(`./static/quotes/${quotefilename}.json`, {
|
||||
encoding: "utf8",
|
||||
flag: "r",
|
||||
})
|
||||
);
|
||||
quoteSchema.properties.language.pattern =
|
||||
"^" + escapeRegExp(quotefilename) + "$";
|
||||
const quoteValidator = JSONValidator.validate(quoteData, quoteSchema);
|
||||
if (!quoteValidator.valid) {
|
||||
console.log(
|
||||
`Quote ${quotefilename} JSON schema is \u001b[31minvalid\u001b[0m`
|
||||
);
|
||||
quoteFilesAllGood = false;
|
||||
quoteFilesErrors = quoteValidator.errors;
|
||||
}
|
||||
const quoteIds = quoteData.quotes.map((quote) => quote.id);
|
||||
const quoteIdsValidator = JSONValidator.validate(
|
||||
quoteIds,
|
||||
quoteIdsSchema
|
||||
);
|
||||
if (!quoteIdsValidator.valid) {
|
||||
console.log(
|
||||
`Quote ${quotefilename} IDs are \u001b[31mnot unique\u001b[0m`
|
||||
);
|
||||
quoteIdsAllGood = false;
|
||||
quoteIdsErrors = quoteIdsValidator.errors;
|
||||
}
|
||||
});
|
||||
if (quoteFilesAllGood) {
|
||||
console.log(`Quote file JSON schemas are \u001b[32mvalid\u001b[0m`);
|
||||
} else {
|
||||
console.log(`Quote file JSON schemas are \u001b[31minvalid\u001b[0m`);
|
||||
return reject(new Error(quoteFilesErrors));
|
||||
}
|
||||
if (quoteIdsAllGood) {
|
||||
console.log(`Quote IDs are \u001b[32munique\u001b[0m`);
|
||||
} else {
|
||||
console.log(`Quote IDs are \u001b[31mnot unique\u001b[0m`);
|
||||
return reject(new Error(quoteIdsErrors));
|
||||
}
|
||||
|
||||
//challenges
|
||||
const challengesSchema = {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
name: { type: "string" },
|
||||
display: { type: "string" },
|
||||
autoRole: { type: "boolean" },
|
||||
type: { type: "string" },
|
||||
message: { type: "string" },
|
||||
parameters: {
|
||||
type: "array",
|
||||
},
|
||||
requirements: {
|
||||
type: "object",
|
||||
properties: {
|
||||
wpm: {
|
||||
type: "object",
|
||||
properties: {
|
||||
min: { type: "number" },
|
||||
max: { type: "number" },
|
||||
exact: { type: "number" },
|
||||
},
|
||||
},
|
||||
time: {
|
||||
type: "object",
|
||||
properties: {
|
||||
min: { type: "number" },
|
||||
max: { type: "number" },
|
||||
exact: { type: "number" },
|
||||
},
|
||||
},
|
||||
acc: {
|
||||
type: "object",
|
||||
properties: {
|
||||
min: { type: "number" },
|
||||
max: { type: "number" },
|
||||
exact: { type: "number" },
|
||||
},
|
||||
},
|
||||
raw: {
|
||||
type: "object",
|
||||
properties: {
|
||||
min: { type: "number" },
|
||||
max: { type: "number" },
|
||||
exact: { type: "number" },
|
||||
},
|
||||
},
|
||||
con: {
|
||||
type: "object",
|
||||
properties: {
|
||||
min: { type: "number" },
|
||||
max: { type: "number" },
|
||||
exact: { type: "number" },
|
||||
},
|
||||
},
|
||||
config: {
|
||||
type: "object",
|
||||
},
|
||||
funbox: { type: "string" },
|
||||
},
|
||||
},
|
||||
},
|
||||
required: ["name", "display", "type", "parameters"],
|
||||
},
|
||||
};
|
||||
const challengesData = JSON.parse(
|
||||
fs.readFileSync("./static/challenges/_list.json", {
|
||||
encoding: "utf8",
|
||||
flag: "r",
|
||||
})
|
||||
);
|
||||
const challengesValidator = JSONValidator.validate(
|
||||
challengesData,
|
||||
challengesSchema
|
||||
);
|
||||
if (challengesValidator.valid) {
|
||||
console.log("Challenges list JSON schema is \u001b[32mvalid\u001b[0m");
|
||||
} else {
|
||||
console.log("Challenges list JSON schema is \u001b[31minvalid\u001b[0m");
|
||||
return reject(new Error(challengesValidator.errors));
|
||||
}
|
||||
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
task("copy-src-contents", function () {
|
||||
return src("./src/scripts/**").pipe(dest("./dist/"));
|
||||
});
|
||||
|
@ -123,6 +503,7 @@ task(
|
|||
series(
|
||||
"lint",
|
||||
"lint-json",
|
||||
"validate-json-schema",
|
||||
"webpack-production",
|
||||
"static",
|
||||
"sass",
|
||||
|
|
16
package-lock.json
generated
16
package-lock.json
generated
|
@ -22,6 +22,7 @@
|
|||
"eslint-plugin-promise": "5.1.0",
|
||||
"eslint-plugin-require-path-exists": "1.1.9",
|
||||
"husky": "4.3.0",
|
||||
"jsonschema": "^1.4.0",
|
||||
"prettier": "2.1.2",
|
||||
"pretty-quick": "3.1.0"
|
||||
},
|
||||
|
@ -3645,6 +3646,15 @@
|
|||
"integrity": "sha512-fQzRfAbIBnR0IQvftw9FJveWiHp72Fg20giDrHz6TdfB12UH/uue0D3hm57UB5KgAVuniLMCaS8P1IMj9NR7cA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/jsonschema": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/jsonschema/-/jsonschema-1.4.0.tgz",
|
||||
"integrity": "sha512-/YgW6pRMr6M7C+4o8kS+B/2myEpHCrxO4PEWnqJNBFMjn7EWXqlQ4tGwL6xTHeRplwuZmcAncdvfOad1nT2yMw==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/jsonwebtoken": {
|
||||
"version": "8.5.1",
|
||||
"resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz",
|
||||
|
@ -8611,6 +8621,12 @@
|
|||
"integrity": "sha512-fQzRfAbIBnR0IQvftw9FJveWiHp72Fg20giDrHz6TdfB12UH/uue0D3hm57UB5KgAVuniLMCaS8P1IMj9NR7cA==",
|
||||
"dev": true
|
||||
},
|
||||
"jsonschema": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/jsonschema/-/jsonschema-1.4.0.tgz",
|
||||
"integrity": "sha512-/YgW6pRMr6M7C+4o8kS+B/2myEpHCrxO4PEWnqJNBFMjn7EWXqlQ4tGwL6xTHeRplwuZmcAncdvfOad1nT2yMw==",
|
||||
"dev": true
|
||||
},
|
||||
"jsonwebtoken": {
|
||||
"version": "8.5.1",
|
||||
"resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz",
|
||||
|
|
|
@ -11,7 +11,8 @@
|
|||
"install:all": "sh ./bin/install.sh",
|
||||
"lint": "./node_modules/.bin/eslint './backend/**/*.js' './frontend/src/scripts/**/*.js'",
|
||||
"build:live": "cd ./frontend && npm run build:live",
|
||||
"pretty": "prettier --check './backend/**/*.js' './frontend/src/**/*.{js,scss}' './frontend/static/**/*.{json,html}'"
|
||||
"pretty": "prettier --check './backend/**/*.js' './frontend/src/**/*.{js,scss}' './frontend/static/**/*.{json,html}'",
|
||||
"validate-json-schema": "cd frontend && npx gulp validate-json-schema"
|
||||
},
|
||||
"engines": {
|
||||
"npm": "8.1.2"
|
||||
|
@ -30,6 +31,7 @@
|
|||
"eslint-plugin-promise": "5.1.0",
|
||||
"eslint-plugin-require-path-exists": "1.1.9",
|
||||
"husky": "4.3.0",
|
||||
"jsonschema": "^1.4.0",
|
||||
"prettier": "2.1.2",
|
||||
"pretty-quick": "3.1.0"
|
||||
},
|
||||
|
|
Loading…
Add table
Reference in a new issue