Merge pull request #15536 from Snuffleupagus/more-for-of-2
Use more `for...of` loops in the code-base
This commit is contained in:
commit
8c59cc72a3
@ -1343,8 +1343,7 @@ class Catalog {
|
||||
|
||||
const kidPromises = [];
|
||||
let found = false;
|
||||
for (let i = 0, ii = kids.length; i < ii; i++) {
|
||||
const kid = kids[i];
|
||||
for (const kid of kids) {
|
||||
if (!(kid instanceof Ref)) {
|
||||
throw new FormatError("Kid must be a reference.");
|
||||
}
|
||||
|
@ -476,10 +476,7 @@ class CFFParser {
|
||||
|
||||
createDict(Type, dict, strings) {
|
||||
const cffDict = new Type(strings);
|
||||
for (let i = 0, ii = dict.length; i < ii; ++i) {
|
||||
const pair = dict[i];
|
||||
const key = pair[0];
|
||||
const value = pair[1];
|
||||
for (const [key, value] of dict) {
|
||||
cffDict.setByKey(key, value);
|
||||
}
|
||||
return cffDict;
|
||||
@ -1110,15 +1107,14 @@ class CFFDict {
|
||||
if (!(key in this.keyToNameMap)) {
|
||||
return false;
|
||||
}
|
||||
const valueLength = value.length;
|
||||
// ignore empty values
|
||||
if (valueLength === 0) {
|
||||
if (value.length === 0) {
|
||||
return true;
|
||||
}
|
||||
// Ignore invalid values (fixes bug1068432.pdf and bug1308536.pdf).
|
||||
for (let i = 0; i < valueLength; i++) {
|
||||
if (isNaN(value[i])) {
|
||||
warn('Invalid CFFDict value: "' + value + '" for key "' + key + '".');
|
||||
for (const val of value) {
|
||||
if (isNaN(val)) {
|
||||
warn(`Invalid CFFDict value: "${value}" for key "${key}".`);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -1166,8 +1162,7 @@ class CFFDict {
|
||||
opcodes: {},
|
||||
order: [],
|
||||
};
|
||||
for (let i = 0, ii = layout.length; i < ii; ++i) {
|
||||
const entry = layout[i];
|
||||
for (const entry of layout) {
|
||||
const key = Array.isArray(entry[0])
|
||||
? (entry[0][0] << 8) + entry[0][1]
|
||||
: entry[0];
|
||||
@ -1401,8 +1396,7 @@ class CFFCompiler {
|
||||
if (cff.topDict.hasName("FontMatrix")) {
|
||||
const base = cff.topDict.getByName("FontMatrix");
|
||||
cff.topDict.removeByName("FontMatrix");
|
||||
for (let i = 0, ii = cff.fdArray.length; i < ii; i++) {
|
||||
const subDict = cff.fdArray[i];
|
||||
for (const subDict of cff.fdArray) {
|
||||
let matrix = base.slice(0);
|
||||
if (subDict.hasName("FontMatrix")) {
|
||||
matrix = Util.transform(matrix, subDict.getByName("FontMatrix"));
|
||||
@ -1564,8 +1558,7 @@ class CFFCompiler {
|
||||
|
||||
compileNameIndex(names) {
|
||||
const nameIndex = new CFFIndex();
|
||||
for (let i = 0, ii = names.length; i < ii; ++i) {
|
||||
const name = names[i];
|
||||
for (const name of names) {
|
||||
// OTS doesn't allow names to be over 127 characters.
|
||||
const length = Math.min(name.length, 127);
|
||||
let sanitizedName = new Array(length);
|
||||
@ -1604,8 +1597,7 @@ class CFFCompiler {
|
||||
compileTopDicts(dicts, length, removeCidKeys) {
|
||||
const fontDictTrackers = [];
|
||||
let fdArrayIndex = new CFFIndex();
|
||||
for (let i = 0, ii = dicts.length; i < ii; ++i) {
|
||||
const fontDict = dicts[i];
|
||||
for (const fontDict of dicts) {
|
||||
if (removeCidKeys) {
|
||||
fontDict.removeByName("CIDFontVersion");
|
||||
fontDict.removeByName("CIDFontRevision");
|
||||
@ -1723,8 +1715,8 @@ class CFFCompiler {
|
||||
|
||||
compileStringIndex(strings) {
|
||||
const stringIndex = new CFFIndex();
|
||||
for (let i = 0, ii = strings.length; i < ii; ++i) {
|
||||
stringIndex.add(stringToBytes(strings[i]));
|
||||
for (const string of strings) {
|
||||
stringIndex.add(stringToBytes(string));
|
||||
}
|
||||
return this.compileIndex(stringIndex);
|
||||
}
|
||||
@ -1908,9 +1900,7 @@ class CFFCompiler {
|
||||
if (trackers[i]) {
|
||||
trackers[i].offset(data.length);
|
||||
}
|
||||
for (let j = 0, jj = objects[i].length; j < jj; j++) {
|
||||
data.push(objects[i][j]);
|
||||
}
|
||||
data.push(...objects[i]);
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
@ -1407,8 +1407,8 @@ class PDFDocument {
|
||||
|
||||
function hexString(hash) {
|
||||
const buf = [];
|
||||
for (let i = 0, ii = hash.length; i < ii; i++) {
|
||||
const hex = hash[i].toString(16);
|
||||
for (const num of hash) {
|
||||
const hex = num.toString(16);
|
||||
buf.push(hex.padStart(2, "0"));
|
||||
}
|
||||
return buf.join("");
|
||||
|
@ -112,8 +112,8 @@ const deferred = Promise.resolve();
|
||||
function normalizeBlendMode(value, parsingArray = false) {
|
||||
if (Array.isArray(value)) {
|
||||
// Use the first *supported* BM value in the Array (fixes issue11279.pdf).
|
||||
for (let i = 0, ii = value.length; i < ii; i++) {
|
||||
const maybeBM = normalizeBlendMode(value[i], /* parsingArray = */ true);
|
||||
for (const val of value) {
|
||||
const maybeBM = normalizeBlendMode(val, /* parsingArray = */ true);
|
||||
if (maybeBM) {
|
||||
return maybeBM;
|
||||
}
|
||||
@ -1056,10 +1056,8 @@ class PartialEvaluator {
|
||||
let isSimpleGState = true;
|
||||
// This array holds the converted/processed state data.
|
||||
const gStateObj = [];
|
||||
const gStateKeys = gState.getKeys();
|
||||
let promise = Promise.resolve();
|
||||
for (let i = 0, ii = gStateKeys.length; i < ii; i++) {
|
||||
const key = gStateKeys[i];
|
||||
for (const key of gState.getKeys()) {
|
||||
const value = gState.get(key);
|
||||
switch (key) {
|
||||
case "Type":
|
||||
@ -3419,8 +3417,8 @@ class PartialEvaluator {
|
||||
if (encoding.has("Differences")) {
|
||||
const diffEncoding = encoding.get("Differences");
|
||||
let index = 0;
|
||||
for (let j = 0, jj = diffEncoding.length; j < jj; j++) {
|
||||
const data = xref.fetchIfRef(diffEncoding[j]);
|
||||
for (const entry of diffEncoding) {
|
||||
const data = xref.fetchIfRef(entry);
|
||||
if (typeof data === "number") {
|
||||
index = data;
|
||||
} else if (data instanceof Name) {
|
||||
@ -4150,8 +4148,8 @@ class PartialEvaluator {
|
||||
if (widths) {
|
||||
const glyphWidths = [];
|
||||
let j = firstChar;
|
||||
for (let i = 0, ii = widths.length; i < ii; i++) {
|
||||
glyphWidths[j++] = this.xref.fetchIfRef(widths[i]);
|
||||
for (const width of widths) {
|
||||
glyphWidths[j++] = this.xref.fetchIfRef(width);
|
||||
}
|
||||
newProperties.widths = glyphWidths;
|
||||
} else {
|
||||
|
@ -2795,7 +2795,6 @@ class Font {
|
||||
const cmapPlatformId = cmapTable.platformId;
|
||||
const cmapEncodingId = cmapTable.encodingId;
|
||||
const cmapMappings = cmapTable.mappings;
|
||||
const cmapMappingsLength = cmapMappings.length;
|
||||
let baseEncoding = [],
|
||||
forcePostTable = false;
|
||||
if (
|
||||
@ -2860,18 +2859,18 @@ class Font {
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < cmapMappingsLength; ++i) {
|
||||
if (cmapMappings[i].charCode !== unicodeOrCharCode) {
|
||||
for (const mapping of cmapMappings) {
|
||||
if (mapping.charCode !== unicodeOrCharCode) {
|
||||
continue;
|
||||
}
|
||||
charCodeToGlyphId[charCode] = cmapMappings[i].glyphId;
|
||||
charCodeToGlyphId[charCode] = mapping.glyphId;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (cmapPlatformId === 0) {
|
||||
// Default Unicode semantics, use the charcodes as is.
|
||||
for (let i = 0; i < cmapMappingsLength; ++i) {
|
||||
charCodeToGlyphId[cmapMappings[i].charCode] = cmapMappings[i].glyphId;
|
||||
for (const mapping of cmapMappings) {
|
||||
charCodeToGlyphId[mapping.charCode] = mapping.glyphId;
|
||||
}
|
||||
// Always prefer the BaseEncoding/Differences arrays, when they exist
|
||||
// (fixes issue13433.pdf).
|
||||
@ -2888,8 +2887,8 @@ class Font {
|
||||
// special range since some PDFs have char codes outside of this range
|
||||
// (e.g. 0x2013) which when masked would overwrite other values in the
|
||||
// cmap.
|
||||
for (let i = 0; i < cmapMappingsLength; ++i) {
|
||||
let charCode = cmapMappings[i].charCode;
|
||||
for (const mapping of cmapMappings) {
|
||||
let charCode = mapping.charCode;
|
||||
if (
|
||||
cmapPlatformId === 3 &&
|
||||
charCode >= 0xf000 &&
|
||||
@ -2897,7 +2896,7 @@ class Font {
|
||||
) {
|
||||
charCode &= 0xff;
|
||||
}
|
||||
charCodeToGlyphId[charCode] = cmapMappings[i].glyphId;
|
||||
charCodeToGlyphId[charCode] = mapping.glyphId;
|
||||
}
|
||||
}
|
||||
|
||||
@ -3093,8 +3092,7 @@ class Font {
|
||||
// to begin with.
|
||||
continue;
|
||||
}
|
||||
for (let i = 0, ii = charCodes.length; i < ii; i++) {
|
||||
const charCode = charCodes[i];
|
||||
for (const charCode of charCodes) {
|
||||
// Find a fontCharCode that maps to the base and accent glyphs.
|
||||
// If one doesn't exists, create it.
|
||||
const charCodeToGlyphId = newMapping.charCodeToGlyphId;
|
||||
@ -3212,8 +3210,7 @@ class Font {
|
||||
// trying to estimate space character width
|
||||
const possibleSpaceReplacements = ["space", "minus", "one", "i", "I"];
|
||||
let width;
|
||||
for (let i = 0, ii = possibleSpaceReplacements.length; i < ii; i++) {
|
||||
const glyphName = possibleSpaceReplacements[i];
|
||||
for (const glyphName of possibleSpaceReplacements) {
|
||||
// if possible, getting width by glyph name
|
||||
if (glyphName in this.widths) {
|
||||
width = this.widths[glyphName];
|
||||
|
@ -187,9 +187,9 @@ class PDFFunction {
|
||||
}
|
||||
|
||||
const fnArray = [];
|
||||
for (let j = 0, jj = fnObj.length; j < jj; j++) {
|
||||
for (const fn of fnObj) {
|
||||
fnArray.push(
|
||||
this.parse({ xref, isEvalSupported, fn: xref.fetchIfRef(fnObj[j]) })
|
||||
this.parse({ xref, isEvalSupported, fn: xref.fetchIfRef(fn) })
|
||||
);
|
||||
}
|
||||
return function (src, srcOffset, dest, destOffset) {
|
||||
@ -364,12 +364,9 @@ class PDFFunction {
|
||||
throw new FormatError("Bad domain for stiched function");
|
||||
}
|
||||
|
||||
const fnRefs = dict.get("Functions");
|
||||
const fns = [];
|
||||
for (let i = 0, ii = fnRefs.length; i < ii; ++i) {
|
||||
fns.push(
|
||||
this.parse({ xref, isEvalSupported, fn: xref.fetchIfRef(fnRefs[i]) })
|
||||
);
|
||||
for (const fn of dict.get("Functions")) {
|
||||
fns.push(this.parse({ xref, isEvalSupported, fn: xref.fetchIfRef(fn) }));
|
||||
}
|
||||
|
||||
const bounds = toNumberArray(dict.getArray("Bounds"));
|
||||
|
@ -774,8 +774,7 @@ class JpegImage {
|
||||
function prepareComponents(frame) {
|
||||
const mcusPerLine = Math.ceil(frame.samplesPerLine / 8 / frame.maxH);
|
||||
const mcusPerColumn = Math.ceil(frame.scanLines / 8 / frame.maxV);
|
||||
for (let i = 0, ii = frame.components.length; i < ii; i++) {
|
||||
const component = frame.components[i];
|
||||
for (const component of frame.components) {
|
||||
const blocksPerLine = Math.ceil(
|
||||
(Math.ceil(frame.samplesPerLine / 8) * component.h) / frame.maxH
|
||||
);
|
||||
@ -1080,9 +1079,7 @@ class JpegImage {
|
||||
this.jfif = jfif;
|
||||
this.adobe = adobe;
|
||||
this.components = [];
|
||||
for (let i = 0, ii = frame.components.length; i < ii; i++) {
|
||||
const component = frame.components[i];
|
||||
|
||||
for (const component of frame.components) {
|
||||
// Prevent errors when DQT markers are placed after SOF{n} markers,
|
||||
// by assigning the `quantizationTable` entry after the entire image
|
||||
// has been parsed (fixes issue7406.pdf).
|
||||
@ -1391,11 +1388,9 @@ class JpegImage {
|
||||
const data = this._getLinearizedBlockData(width, height, isSourcePDF);
|
||||
|
||||
if (this.numComponents === 1 && forceRGB) {
|
||||
const dataLength = data.length;
|
||||
const rgbData = new Uint8ClampedArray(dataLength * 3);
|
||||
const rgbData = new Uint8ClampedArray(data.length * 3);
|
||||
let offset = 0;
|
||||
for (let i = 0; i < dataLength; i++) {
|
||||
const grayColor = data[i];
|
||||
for (const grayColor of data) {
|
||||
rgbData[offset++] = grayColor;
|
||||
rgbData[offset++] = grayColor;
|
||||
rgbData[offset++] = grayColor;
|
||||
|
@ -71,8 +71,8 @@ class ObjectLoader {
|
||||
this.refSet = new RefSet();
|
||||
// Setup the initial nodes to visit.
|
||||
const nodesToVisit = [];
|
||||
for (let i = 0, ii = keys.length; i < ii; i++) {
|
||||
const rawValue = dict.getRaw(keys[i]);
|
||||
for (const key of keys) {
|
||||
const rawValue = dict.getRaw(key);
|
||||
// Skip nodes that are guaranteed to be empty.
|
||||
if (rawValue !== undefined) {
|
||||
nodesToVisit.push(rawValue);
|
||||
|
@ -37,8 +37,8 @@ function writeData(dest, offset, data) {
|
||||
}
|
||||
} else {
|
||||
// treating everything else as array
|
||||
for (let i = 0, ii = data.length; i < ii; i++) {
|
||||
dest[offset++] = data[i] & 0xff;
|
||||
for (const num of data) {
|
||||
dest[offset++] = num & 0xff;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -291,8 +291,8 @@ class Type1Font {
|
||||
|
||||
getType2Charstrings(type1Charstrings) {
|
||||
const type2Charstrings = [];
|
||||
for (let i = 0, ii = type1Charstrings.length; i < ii; i++) {
|
||||
type2Charstrings.push(type1Charstrings[i].charstring);
|
||||
for (const type1Charstring of type1Charstrings) {
|
||||
type2Charstrings.push(type1Charstring.charstring);
|
||||
}
|
||||
return type2Charstrings;
|
||||
}
|
||||
|
@ -498,8 +498,8 @@ class SimpleXMLParser extends XMLParserBase {
|
||||
if (!lastElement) {
|
||||
return null;
|
||||
}
|
||||
for (let i = 0, ii = lastElement.childNodes.length; i < ii; i++) {
|
||||
lastElement.childNodes[i].parentNode = lastElement;
|
||||
for (const childNode of lastElement.childNodes) {
|
||||
childNode.parentNode = lastElement;
|
||||
}
|
||||
return lastElement;
|
||||
}
|
||||
|
@ -551,14 +551,14 @@ class XRef {
|
||||
}
|
||||
}
|
||||
// reading XRef streams
|
||||
for (let i = 0, ii = xrefStms.length; i < ii; ++i) {
|
||||
this.startXRefQueue.push(xrefStms[i]);
|
||||
for (const xrefStm of xrefStms) {
|
||||
this.startXRefQueue.push(xrefStm);
|
||||
this.readXRef(/* recoveryMode */ true);
|
||||
}
|
||||
// finding main trailer
|
||||
let trailerDict;
|
||||
for (let i = 0, ii = trailers.length; i < ii; ++i) {
|
||||
stream.pos = trailers[i];
|
||||
for (const trailer of trailers) {
|
||||
stream.pos = trailer;
|
||||
const parser = new Parser({
|
||||
lexer: new Lexer(stream),
|
||||
xref: this,
|
||||
|
@ -844,8 +844,7 @@ function copyCtxState(sourceCtx, destCtx) {
|
||||
"globalCompositeOperation",
|
||||
"font",
|
||||
];
|
||||
for (let i = 0, ii = properties.length; i < ii; i++) {
|
||||
const property = properties[i];
|
||||
for (const property of properties) {
|
||||
if (sourceCtx[property] !== undefined) {
|
||||
destCtx[property] = sourceCtx[property];
|
||||
}
|
||||
@ -1538,11 +1537,7 @@ class CanvasGraphics {
|
||||
}
|
||||
|
||||
setGState(states) {
|
||||
for (let i = 0, ii = states.length; i < ii; i++) {
|
||||
const state = states[i];
|
||||
const key = state[0];
|
||||
const value = state[1];
|
||||
|
||||
for (const [key, value] of states) {
|
||||
switch (key) {
|
||||
case "LW":
|
||||
this.setLineWidth(value);
|
||||
@ -1569,11 +1564,11 @@ class CanvasGraphics {
|
||||
this.setFont(value[0], value[1]);
|
||||
break;
|
||||
case "CA":
|
||||
this.current.strokeAlpha = state[1];
|
||||
this.current.strokeAlpha = value;
|
||||
break;
|
||||
case "ca":
|
||||
this.current.fillAlpha = state[1];
|
||||
this.ctx.globalAlpha = state[1];
|
||||
this.current.fillAlpha = value;
|
||||
this.ctx.globalAlpha = value;
|
||||
break;
|
||||
case "BM":
|
||||
this.ctx.globalCompositeOperation = value;
|
||||
|
@ -299,8 +299,7 @@ class FreeTextEditor extends AnnotationEditor {
|
||||
return this.editorDiv.innerText;
|
||||
}
|
||||
const buffer = [];
|
||||
for (let i = 0, ii = divs.length; i < ii; i++) {
|
||||
const div = divs[i];
|
||||
for (const div of divs) {
|
||||
const first = div.firstChild;
|
||||
if (first?.nodeName === "#text") {
|
||||
buffer.push(first.data);
|
||||
|
@ -214,8 +214,8 @@ function render(task) {
|
||||
}
|
||||
|
||||
if (!task._textContentStream) {
|
||||
for (let i = 0; i < textDivsLength; i++) {
|
||||
task._layoutText(textDivs[i]);
|
||||
for (const textDiv of textDivs) {
|
||||
task._layoutText(textDiv);
|
||||
}
|
||||
}
|
||||
|
||||
@ -304,26 +304,26 @@ class TextLayerRenderTask {
|
||||
* @private
|
||||
*/
|
||||
_processItems(items, styleCache) {
|
||||
for (let i = 0, len = items.length; i < len; i++) {
|
||||
if (items[i].str === undefined) {
|
||||
for (const item of items) {
|
||||
if (item.str === undefined) {
|
||||
if (
|
||||
items[i].type === "beginMarkedContentProps" ||
|
||||
items[i].type === "beginMarkedContent"
|
||||
item.type === "beginMarkedContentProps" ||
|
||||
item.type === "beginMarkedContent"
|
||||
) {
|
||||
const parent = this._container;
|
||||
this._container = document.createElement("span");
|
||||
this._container.classList.add("markedContent");
|
||||
if (items[i].id !== null) {
|
||||
this._container.setAttribute("id", `${items[i].id}`);
|
||||
if (item.id !== null) {
|
||||
this._container.setAttribute("id", `${item.id}`);
|
||||
}
|
||||
parent.append(this._container);
|
||||
} else if (items[i].type === "endMarkedContent") {
|
||||
} else if (item.type === "endMarkedContent") {
|
||||
this._container = this._container.parentNode;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
this._textContentItemsStr.push(items[i].str);
|
||||
appendText(this, items[i], styleCache, this._layoutTextCtx);
|
||||
this._textContentItemsStr.push(item.str);
|
||||
appendText(this, item, styleCache, this._layoutTextCtx);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -273,8 +273,7 @@ class TextHighlighter {
|
||||
let clearedUntilDivIdx = -1;
|
||||
|
||||
// Clear all current matches.
|
||||
for (let i = 0, ii = matches.length; i < ii; i++) {
|
||||
const match = matches[i];
|
||||
for (const match of matches) {
|
||||
const begin = Math.max(clearedUntilDivIdx, match.begin.divIdx);
|
||||
for (let n = begin, end = match.end.divIdx; n <= end; n++) {
|
||||
const div = textDivs[n];
|
||||
|
Loading…
Reference in New Issue
Block a user