Enable linting of the external/cmapscompress/
folder
Given that this our "own" code, there's no good reason as far as I'm concerned to not lint it.
This commit is contained in:
parent
b9f4d89254
commit
98e658ddf0
@ -4,7 +4,6 @@ docs/
|
||||
node_modules/
|
||||
external/bcmaps/
|
||||
external/webL10n/
|
||||
external/cmapscompress/
|
||||
external/builder/fixtures/
|
||||
external/builder/fixtures_esprima/
|
||||
external/quickjs/
|
||||
|
10
external/cmapscompress/.eslintrc
vendored
Normal file
10
external/cmapscompress/.eslintrc
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
{
|
||||
"extends": [
|
||||
"../.eslintrc"
|
||||
],
|
||||
|
||||
"rules": {
|
||||
// ECMAScript 6
|
||||
"no-var": "error",
|
||||
},
|
||||
}
|
43
external/cmapscompress/compress.js
vendored
43
external/cmapscompress/compress.js
vendored
@ -37,7 +37,7 @@ function compressCmap(srcPath, destPath, verify) {
|
||||
const first = item.items[0];
|
||||
const sequence = item.sequence === true;
|
||||
const flags = (item.type << 5) | (sequence ? 0x10 : 0);
|
||||
var nextStart, nextCode;
|
||||
let nextStart, nextCode;
|
||||
switch (item.type) {
|
||||
case 0:
|
||||
out +=
|
||||
@ -45,7 +45,7 @@ function compressCmap(srcPath, destPath, verify) {
|
||||
writeNumber(subitems.length);
|
||||
out += first.start + writeNumber(subHex(first.end, first.start));
|
||||
nextStart = incHex(first.end);
|
||||
for (var j = 1; j < subitems.length; j++) {
|
||||
for (let j = 1; j < subitems.length; j++) {
|
||||
out +=
|
||||
writeNumber(subHex(subitems[j].start, nextStart)) +
|
||||
writeNumber(subHex(subitems[j].end, subitems[j].start));
|
||||
@ -61,7 +61,7 @@ function compressCmap(srcPath, destPath, verify) {
|
||||
writeNumber(subHex(first.end, first.start)) +
|
||||
writeNumber(first.code);
|
||||
nextStart = incHex(first.end);
|
||||
for (var j = 1; j < subitems.length; j++) {
|
||||
for (let j = 1; j < subitems.length; j++) {
|
||||
out +=
|
||||
writeNumber(subHex(subitems[j].start, nextStart)) +
|
||||
writeNumber(subHex(subitems[j].end, subitems[j].start)) +
|
||||
@ -76,7 +76,7 @@ function compressCmap(srcPath, destPath, verify) {
|
||||
out += first.char + writeNumber(first.code);
|
||||
nextStart = incHex(first.char);
|
||||
nextCode = first.code + 1;
|
||||
for (var j = 1; j < subitems.length; j++) {
|
||||
for (let j = 1; j < subitems.length; j++) {
|
||||
out +=
|
||||
(sequence ? "" : writeNumber(subHex(subitems[j].char, nextStart))) +
|
||||
writeSigned(subitems[j].code - nextCode);
|
||||
@ -93,7 +93,7 @@ function compressCmap(srcPath, destPath, verify) {
|
||||
writeNumber(subHex(first.end, first.start)) +
|
||||
writeNumber(first.code);
|
||||
nextStart = incHex(first.end);
|
||||
for (var j = 1; j < subitems.length; j++) {
|
||||
for (let j = 1; j < subitems.length; j++) {
|
||||
out +=
|
||||
(sequence
|
||||
? ""
|
||||
@ -110,7 +110,7 @@ function compressCmap(srcPath, destPath, verify) {
|
||||
out += first.char + first.code;
|
||||
nextStart = incHex(first.char);
|
||||
nextCode = incHex(first.code);
|
||||
for (var j = 1; j < subitems.length; j++) {
|
||||
for (let j = 1; j < subitems.length; j++) {
|
||||
out +=
|
||||
(sequence ? "" : writeNumber(subHex(subitems[j].char, nextStart))) +
|
||||
writeSigned(subHex(subitems[j].code, nextCode));
|
||||
@ -127,7 +127,7 @@ function compressCmap(srcPath, destPath, verify) {
|
||||
writeNumber(subHex(first.end, first.start)) +
|
||||
first.code;
|
||||
nextStart = incHex(first.end);
|
||||
for (var j = 1; j < subitems.length; j++) {
|
||||
for (let j = 1; j < subitems.length; j++) {
|
||||
out +=
|
||||
(sequence
|
||||
? ""
|
||||
@ -140,11 +140,11 @@ function compressCmap(srcPath, destPath, verify) {
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(destPath, new Buffer(out, "hex"));
|
||||
fs.writeFileSync(destPath, Buffer.from(out, "hex"));
|
||||
|
||||
if (verify) {
|
||||
const result2 = parseCMap(out);
|
||||
const isGood = JSON.stringify(inputData) == JSON.stringify(result2);
|
||||
const isGood = JSON.stringify(inputData) === JSON.stringify(result2);
|
||||
if (!isGood) {
|
||||
throw new Error("Extracted data does not match the expected result");
|
||||
}
|
||||
@ -191,8 +191,9 @@ function parseCMap(binaryData) {
|
||||
return s;
|
||||
},
|
||||
readHexNumber(size) {
|
||||
const lengthInChars = (size + 1) << 1;
|
||||
const stack = [];
|
||||
const lengthInChars = (size + 1) << 1,
|
||||
stack = [];
|
||||
let last;
|
||||
do {
|
||||
const b = this.readByte();
|
||||
last = !(b & 0x80);
|
||||
@ -269,13 +270,13 @@ function parseCMap(binaryData) {
|
||||
}
|
||||
const ucs2DataSize = 1;
|
||||
const subitemsCount = reader.readNumber();
|
||||
var start, end, code, char;
|
||||
let start, end, code, char;
|
||||
switch (type) {
|
||||
case 0:
|
||||
start = reader.readHex(dataSize);
|
||||
end = addHex(reader.readHexNumber(dataSize), start);
|
||||
subitems.push({ start, end });
|
||||
for (var i = 1; i < subitemsCount; i++) {
|
||||
for (let i = 1; i < subitemsCount; i++) {
|
||||
start = addHex(reader.readHexNumber(dataSize), incHex(end));
|
||||
end = addHex(reader.readHexNumber(dataSize), start);
|
||||
subitems.push({ start, end });
|
||||
@ -286,7 +287,7 @@ function parseCMap(binaryData) {
|
||||
end = addHex(reader.readHexNumber(dataSize), start);
|
||||
code = reader.readNumber();
|
||||
subitems.push({ start, end, code });
|
||||
for (var i = 1; i < subitemsCount; i++) {
|
||||
for (let i = 1; i < subitemsCount; i++) {
|
||||
start = addHex(reader.readHexNumber(dataSize), incHex(end));
|
||||
end = addHex(reader.readHexNumber(dataSize), start);
|
||||
code = reader.readNumber();
|
||||
@ -297,7 +298,7 @@ function parseCMap(binaryData) {
|
||||
char = reader.readHex(dataSize);
|
||||
code = reader.readNumber();
|
||||
subitems.push({ char, code });
|
||||
for (var i = 1; i < subitemsCount; i++) {
|
||||
for (let i = 1; i < subitemsCount; i++) {
|
||||
char = sequence
|
||||
? incHex(char)
|
||||
: addHex(reader.readHexNumber(dataSize), incHex(char));
|
||||
@ -310,7 +311,7 @@ function parseCMap(binaryData) {
|
||||
end = addHex(reader.readHexNumber(dataSize), start);
|
||||
code = reader.readNumber();
|
||||
subitems.push({ start, end, code });
|
||||
for (var i = 1; i < subitemsCount; i++) {
|
||||
for (let i = 1; i < subitemsCount; i++) {
|
||||
start = sequence
|
||||
? incHex(end)
|
||||
: addHex(reader.readHexNumber(dataSize), incHex(end));
|
||||
@ -323,7 +324,7 @@ function parseCMap(binaryData) {
|
||||
char = reader.readHex(ucs2DataSize);
|
||||
code = reader.readHex(dataSize);
|
||||
subitems.push({ char, code });
|
||||
for (var i = 1; i < subitemsCount; i++) {
|
||||
for (let i = 1; i < subitemsCount; i++) {
|
||||
char = sequence
|
||||
? incHex(char)
|
||||
: addHex(reader.readHexNumber(ucs2DataSize), incHex(char));
|
||||
@ -336,7 +337,7 @@ function parseCMap(binaryData) {
|
||||
end = addHex(reader.readHexNumber(ucs2DataSize), start);
|
||||
code = reader.readHex(dataSize);
|
||||
subitems.push({ start, end, code });
|
||||
for (var i = 1; i < subitemsCount; i++) {
|
||||
for (let i = 1; i < subitemsCount; i++) {
|
||||
start = sequence
|
||||
? incHex(end)
|
||||
: addHex(reader.readHexNumber(ucs2DataSize), incHex(end));
|
||||
@ -368,7 +369,7 @@ function writeByte(b) {
|
||||
}
|
||||
function writeNumber(n) {
|
||||
if (typeof n === "string") {
|
||||
var s = "",
|
||||
let s = "",
|
||||
buffer = 0,
|
||||
bufferSize = 0;
|
||||
let i = n.length;
|
||||
@ -390,7 +391,7 @@ function writeNumber(n) {
|
||||
}
|
||||
return s;
|
||||
}
|
||||
var s = writeByte(n & 0x7f);
|
||||
let s = writeByte(n & 0x7f);
|
||||
n >>>= 7;
|
||||
while (n > 0) {
|
||||
s = writeByte((n & 0x7f) | 0x80) + s;
|
||||
@ -470,7 +471,7 @@ function incHex(a) {
|
||||
|
||||
exports.compressCmaps = function (src, dest, verify) {
|
||||
const files = fs.readdirSync(src).filter(function (fn) {
|
||||
return fn.indexOf(".") < 0; // skipping files with the extension
|
||||
return !fn.includes("."); // skipping files with the extension
|
||||
});
|
||||
files.forEach(function (fn) {
|
||||
const srcPath = path.join(src, fn);
|
||||
|
60
external/cmapscompress/optimize.js
vendored
60
external/cmapscompress/optimize.js
vendored
@ -14,7 +14,7 @@
|
||||
*/
|
||||
|
||||
exports.optimizeCMap = function (data) {
|
||||
var i = 1;
|
||||
let i = 1;
|
||||
while (i < data.body.length) {
|
||||
if (data.body[i - 1].type === data.body[i].type) {
|
||||
data.body[i - 1].items = data.body[i - 1].items.concat(
|
||||
@ -26,16 +26,16 @@ exports.optimizeCMap = function (data) {
|
||||
}
|
||||
}
|
||||
// split into groups with different lengths
|
||||
var i = 0;
|
||||
i = 0;
|
||||
while (i < data.body.length) {
|
||||
var item = data.body[i];
|
||||
const keys = Object.keys(item.items[0]).filter(function (i) {
|
||||
return typeof item.items[0][i] === "string";
|
||||
const item = data.body[i];
|
||||
const keys = Object.keys(item.items[0]).filter(function (val) {
|
||||
return typeof item.items[0][val] === "string";
|
||||
});
|
||||
var j = 1;
|
||||
let j = 1;
|
||||
while (j < item.items.length) {
|
||||
let different = false;
|
||||
for (var q = 0; q < keys.length && !different; q++) {
|
||||
for (let q = 0; q < keys.length && !different; q++) {
|
||||
different =
|
||||
item.items[j - 1][keys[q]].length !== item.items[j][keys[q]].length;
|
||||
}
|
||||
@ -53,13 +53,13 @@ exports.optimizeCMap = function (data) {
|
||||
i++;
|
||||
}
|
||||
// find sequences of single char ranges
|
||||
var i = 0;
|
||||
i = 0;
|
||||
while (i < data.body.length) {
|
||||
var item = data.body[i];
|
||||
const item = data.body[i];
|
||||
if (item.type === 3 || item.type === 5) {
|
||||
var j = 0;
|
||||
let j = 0;
|
||||
while (j < item.items.length) {
|
||||
var q = j;
|
||||
const q = j;
|
||||
while (
|
||||
j < item.items.length &&
|
||||
item.items[j].start === item.items[j].end
|
||||
@ -76,15 +76,15 @@ exports.optimizeCMap = function (data) {
|
||||
if (q > 0) {
|
||||
data.body.splice(i + 1, 0, {
|
||||
type: item.type - 1,
|
||||
items: item.items.splice(q, j - q).map(function (i) {
|
||||
return { char: i.start, code: i.code };
|
||||
items: item.items.splice(q, j - q).map(function (val) {
|
||||
return { char: val.start, code: val.code };
|
||||
}),
|
||||
});
|
||||
i++;
|
||||
} else {
|
||||
item.type -= 1;
|
||||
item.items = item.items.map(function (i) {
|
||||
return { char: i.start, code: i.code };
|
||||
item.items = item.items.map(function (val) {
|
||||
return { char: val.start, code: val.code };
|
||||
});
|
||||
}
|
||||
continue;
|
||||
@ -96,15 +96,15 @@ exports.optimizeCMap = function (data) {
|
||||
}
|
||||
|
||||
// find sequences of increasing code/ranges order
|
||||
var i = 0;
|
||||
i = 0;
|
||||
while (i < data.body.length) {
|
||||
var item = data.body[i];
|
||||
const item = data.body[i];
|
||||
if (item.type >= 2 && item.type <= 5) {
|
||||
var j = 1;
|
||||
let j = 1;
|
||||
const startProp = item.type === 2 || item.type === 4 ? "char" : "start";
|
||||
const endProp = item.type === 2 || item.type === 4 ? "char" : "end";
|
||||
while (j < item.items.length) {
|
||||
var q = j - 1;
|
||||
const q = j - 1;
|
||||
while (
|
||||
j < item.items.length &&
|
||||
incHex(item.items[j - 1][endProp]) === item.items[j][startProp]
|
||||
@ -137,18 +137,18 @@ exports.optimizeCMap = function (data) {
|
||||
}
|
||||
|
||||
// split non-sequences two groups where codes are close
|
||||
var i = 0;
|
||||
i = 0;
|
||||
while (i < data.body.length) {
|
||||
var item = data.body[i];
|
||||
const item = data.body[i];
|
||||
if (!item.sequence && (item.type === 2 || item.type === 3)) {
|
||||
const subitems = item.items;
|
||||
const codes = subitems.map(function (i) {
|
||||
return i.code;
|
||||
const codes = subitems.map(function (val) {
|
||||
return val.code;
|
||||
});
|
||||
codes.sort(function (a, b) {
|
||||
return a - b;
|
||||
});
|
||||
var maxDistance = 100,
|
||||
const maxDistance = 100,
|
||||
minItems = 10,
|
||||
itemsPerBucket = 50;
|
||||
if (
|
||||
@ -157,12 +157,12 @@ exports.optimizeCMap = function (data) {
|
||||
) {
|
||||
const gapsCount = Math.max(2, (subitems.length / itemsPerBucket) | 0);
|
||||
const gaps = [];
|
||||
for (var q = 0; q < gapsCount; q++) {
|
||||
for (let q = 0; q < gapsCount; q++) {
|
||||
gaps.push({ length: 0 });
|
||||
}
|
||||
for (var j = 1; j < codes.length; j++) {
|
||||
for (let j = 1; j < codes.length; j++) {
|
||||
const gapLength = codes[j] - codes[j - 1];
|
||||
var q = 0;
|
||||
let q = 0;
|
||||
while (q < gaps.length && gaps[q].length > gapLength) {
|
||||
q++;
|
||||
}
|
||||
@ -190,15 +190,15 @@ exports.optimizeCMap = function (data) {
|
||||
});
|
||||
if (groups.length > 1) {
|
||||
const buckets = [(item.items = [])];
|
||||
for (var j = 0; j < groups.length; j++) {
|
||||
for (let j = 0; j < groups.length; j++) {
|
||||
const newItem = { type: item.type, items: [] };
|
||||
buckets.push(newItem.items);
|
||||
i++;
|
||||
data.body.splice(i, 0, newItem);
|
||||
}
|
||||
for (var j = 0; j < subitems.length; j++) {
|
||||
for (let j = 0; j < subitems.length; j++) {
|
||||
const code = subitems[j].code;
|
||||
var q = 0;
|
||||
let q = 0;
|
||||
while (q < groups.length && groups[q] <= code) {
|
||||
q++;
|
||||
}
|
||||
|
28
external/cmapscompress/parse.js
vendored
28
external/cmapscompress/parse.js
vendored
@ -32,21 +32,21 @@ exports.parseAdobeCMap = function (content) {
|
||||
result.type = +m[1];
|
||||
m = /\/WMode\s+(\d+)+\s+def\b/.exec(body);
|
||||
result.wmode = +m[1];
|
||||
m = /\/([\w\-]+)\s+usecmap\b/.exec(body);
|
||||
m = /\/([\w-]+)\s+usecmap\b/.exec(body);
|
||||
if (m) {
|
||||
result.usecmap = m[1];
|
||||
}
|
||||
const re = /(\d+)\s+(begincodespacerange|beginnotdefrange|begincidchar|begincidrange|beginbfchar|beginbfrange)\n([\s\S]*?)\n(endcodespacerange|endnotdefrange|endcidchar|endcidrange|endbfchar|endbfrange)/g;
|
||||
while ((m = re.exec(body))) {
|
||||
const lines = m[3].toLowerCase().split("\n");
|
||||
var m2;
|
||||
|
||||
switch (m[2]) {
|
||||
case "begincodespacerange":
|
||||
result.body.push({
|
||||
type: 0,
|
||||
items: lines.map(function (line) {
|
||||
const m = /<(\w+)>\s+<(\w+)>/.exec(line);
|
||||
return { start: m[1], end: m[2] };
|
||||
const m2 = /<(\w+)>\s+<(\w+)>/.exec(line);
|
||||
return { start: m2[1], end: m2[2] };
|
||||
}),
|
||||
});
|
||||
break;
|
||||
@ -54,8 +54,8 @@ exports.parseAdobeCMap = function (content) {
|
||||
result.body.push({
|
||||
type: 1,
|
||||
items: lines.map(function (line) {
|
||||
const m = /<(\w+)>\s+<(\w+)>\s+(\d+)/.exec(line);
|
||||
return { start: m[1], end: m[2], code: +m[3] };
|
||||
const m2 = /<(\w+)>\s+<(\w+)>\s+(\d+)/.exec(line);
|
||||
return { start: m2[1], end: m2[2], code: +m2[3] };
|
||||
}),
|
||||
});
|
||||
break;
|
||||
@ -63,8 +63,8 @@ exports.parseAdobeCMap = function (content) {
|
||||
result.body.push({
|
||||
type: 2,
|
||||
items: lines.map(function (line) {
|
||||
const m = /<(\w+)>\s+(\d+)/.exec(line);
|
||||
return { char: m[1], code: +m[2] };
|
||||
const m2 = /<(\w+)>\s+(\d+)/.exec(line);
|
||||
return { char: m2[1], code: +m2[2] };
|
||||
}),
|
||||
});
|
||||
break;
|
||||
@ -72,8 +72,8 @@ exports.parseAdobeCMap = function (content) {
|
||||
result.body.push({
|
||||
type: 3,
|
||||
items: lines.map(function (line) {
|
||||
const m = /<(\w+)>\s+<(\w+)>\s+(\d+)/.exec(line);
|
||||
return { start: m[1], end: m[2], code: +m[3] };
|
||||
const m2 = /<(\w+)>\s+<(\w+)>\s+(\d+)/.exec(line);
|
||||
return { start: m2[1], end: m2[2], code: +m2[3] };
|
||||
}),
|
||||
});
|
||||
break;
|
||||
@ -81,8 +81,8 @@ exports.parseAdobeCMap = function (content) {
|
||||
result.body.push({
|
||||
type: 4,
|
||||
items: lines.map(function (line) {
|
||||
const m = /<(\w+)>\s+<(\w+)>/.exec(line);
|
||||
return { char: m[1], code: m[2] };
|
||||
const m2 = /<(\w+)>\s+<(\w+)>/.exec(line);
|
||||
return { char: m2[1], code: m2[2] };
|
||||
}),
|
||||
});
|
||||
break;
|
||||
@ -90,8 +90,8 @@ exports.parseAdobeCMap = function (content) {
|
||||
result.body.push({
|
||||
type: 5,
|
||||
items: lines.map(function (line) {
|
||||
const m = /<(\w+)>\s+<(\w+)>\s+<(\w+)>/.exec(line);
|
||||
return { start: m[1], end: m[2], code: m[3] };
|
||||
const m2 = /<(\w+)>\s+<(\w+)>\s+<(\w+)>/.exec(line);
|
||||
return { start: m2[1], end: m2[2], code: m2[3] };
|
||||
}),
|
||||
});
|
||||
break;
|
||||
|
Loading…
Reference in New Issue
Block a user