Use more for...of
loops in the code-base
Note that these cases, which are all in older code, were found using the [`unicorn/no-for-loop`](https://github.com/sindresorhus/eslint-plugin-unicorn/blob/main/docs/rules/no-for-loop.md) ESLint plugin rule. However, note that I've opted not to enable this rule by default since there's still *some* cases where I do think that it makes sense to allow "regular" for-loops.
This commit is contained in:
parent
5bfba89b0a
commit
37ebc28756
@ -95,8 +95,8 @@ limitations under the License.
|
||||
url: CRX_BASE_URL + "*:*",
|
||||
},
|
||||
function (tabsFromLastSession) {
|
||||
for (var i = 0; i < tabsFromLastSession.length; ++i) {
|
||||
chrome.tabs.reload(tabsFromLastSession[i].id);
|
||||
for (const { id } of tabsFromLastSession) {
|
||||
chrome.tabs.reload(id);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
@ -54,8 +54,7 @@ function isPdfDownloadable(details) {
|
||||
* @returns {undefined|{name: string, value: string}} The header, if found.
|
||||
*/
|
||||
function getHeaderFromHeaders(headers, headerName) {
|
||||
for (var i = 0; i < headers.length; ++i) {
|
||||
var header = headers[i];
|
||||
for (const header of headers) {
|
||||
if (header.name.toLowerCase() === headerName) {
|
||||
return header;
|
||||
}
|
||||
|
@ -110,8 +110,7 @@ limitations under the License.
|
||||
id = "";
|
||||
var buf = new Uint8Array(5);
|
||||
crypto.getRandomValues(buf);
|
||||
for (var i = 0; i < buf.length; ++i) {
|
||||
var c = buf[i];
|
||||
for (const c of buf) {
|
||||
id += (c >>> 4).toString(16) + (c & 0xf).toString(16);
|
||||
}
|
||||
localStorage.telemetryDeduplicationId = id;
|
||||
|
4
external/cmapscompress/compress.js
vendored
4
external/cmapscompress/compress.js
vendored
@ -218,8 +218,8 @@ function parseCMap(binaryData) {
|
||||
const sign = fromHexDigit(num[num.length - 1]) & 1 ? 15 : 0;
|
||||
let c = 0;
|
||||
let result = "";
|
||||
for (let i = 0; i < num.length; i++) {
|
||||
c = (c << 4) | fromHexDigit(num[i]);
|
||||
for (const digit of num) {
|
||||
c = (c << 4) | fromHexDigit(digit);
|
||||
result += toHexDigit(sign ? (c >> 1) ^ sign : c >> 1);
|
||||
c &= 1;
|
||||
}
|
||||
|
6
external/cmapscompress/optimize.js
vendored
6
external/cmapscompress/optimize.js
vendored
@ -196,13 +196,13 @@ exports.optimizeCMap = function (data) {
|
||||
i++;
|
||||
data.body.splice(i, 0, newItem);
|
||||
}
|
||||
for (let j = 0; j < subitems.length; j++) {
|
||||
const code = subitems[j].code;
|
||||
for (const subitem of subitems) {
|
||||
const { code } = subitem;
|
||||
let q = 0;
|
||||
while (q < groups.length && groups[q] <= code) {
|
||||
q++;
|
||||
}
|
||||
buckets[q].push(subitems[j]);
|
||||
buckets[q].push(subitem);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -782,8 +782,7 @@ gulp.task("locale", function () {
|
||||
subfolders.sort();
|
||||
let viewerOutput = "";
|
||||
const locales = [];
|
||||
for (let i = 0; i < subfolders.length; i++) {
|
||||
const locale = subfolders[i];
|
||||
for (const locale of subfolders) {
|
||||
const dirPath = L10N_DIR + locale;
|
||||
if (!checkDir(dirPath)) {
|
||||
continue;
|
||||
|
@ -1682,9 +1682,7 @@ class CFFCompiler {
|
||||
compileDict(dict, offsetTracker) {
|
||||
const out = [];
|
||||
// The dictionary keys must be in a certain order.
|
||||
const order = dict.order;
|
||||
for (let i = 0; i < order.length; ++i) {
|
||||
const key = order[i];
|
||||
for (const key of dict.order) {
|
||||
if (!(key in dict.values)) {
|
||||
continue;
|
||||
}
|
||||
|
@ -219,9 +219,8 @@ class Type1Font {
|
||||
|
||||
getCharset() {
|
||||
const charset = [".notdef"];
|
||||
const charstrings = this.charstrings;
|
||||
for (let glyphId = 0; glyphId < charstrings.length; glyphId++) {
|
||||
charset.push(charstrings[glyphId].glyphName);
|
||||
for (const { glyphName } of this.charstrings) {
|
||||
charset.push(glyphName);
|
||||
}
|
||||
return charset;
|
||||
}
|
||||
|
@ -576,7 +576,7 @@ const Type1Parser = (function Type1ParserClosure() {
|
||||
privateData,
|
||||
},
|
||||
};
|
||||
let token, length, data, lenIV, encoded;
|
||||
let token, length, data, lenIV;
|
||||
while ((token = this.getToken()) !== null) {
|
||||
if (token !== "/") {
|
||||
continue;
|
||||
@ -604,7 +604,7 @@ const Type1Parser = (function Type1ParserClosure() {
|
||||
this.getToken(); // read in 'RD' or '-|'
|
||||
data = length > 0 ? stream.getBytes(length) : new Uint8Array(0);
|
||||
lenIV = program.properties.privateData.lenIV;
|
||||
encoded = this.readCharStrings(data, lenIV);
|
||||
const encoded = this.readCharStrings(data, lenIV);
|
||||
this.nextChar();
|
||||
token = this.getToken(); // read in 'ND' or '|-'
|
||||
if (token === "noaccess") {
|
||||
@ -629,7 +629,7 @@ const Type1Parser = (function Type1ParserClosure() {
|
||||
this.getToken(); // read in 'RD' or '-|'
|
||||
data = length > 0 ? stream.getBytes(length) : new Uint8Array(0);
|
||||
lenIV = program.properties.privateData.lenIV;
|
||||
encoded = this.readCharStrings(data, lenIV);
|
||||
const encoded = this.readCharStrings(data, lenIV);
|
||||
this.nextChar();
|
||||
token = this.getToken(); // read in 'NP' or '|'
|
||||
if (token === "noaccess") {
|
||||
@ -675,9 +675,7 @@ const Type1Parser = (function Type1ParserClosure() {
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < charstrings.length; i++) {
|
||||
const glyph = charstrings[i].glyph;
|
||||
encoded = charstrings[i].encoded;
|
||||
for (const { encoded, glyph } of charstrings) {
|
||||
const charString = new Type1CharString();
|
||||
const error = charString.convert(
|
||||
encoded,
|
||||
|
@ -263,8 +263,8 @@ class WorkerMessageHandler {
|
||||
// There may be a chance that `newPdfManager` is not initialized for
|
||||
// the first few runs of `readchunk` block of code. Be sure to send
|
||||
// all cached chunks, if any, to chunked_stream via pdf_manager.
|
||||
for (let i = 0; i < cachedChunks.length; i++) {
|
||||
newPdfManager.sendProgressiveData(cachedChunks[i]);
|
||||
for (const chunk of cachedChunks) {
|
||||
newPdfManager.sendProgressiveData(chunk);
|
||||
}
|
||||
|
||||
cachedChunks = [];
|
||||
|
@ -2211,8 +2211,7 @@ class CanvasGraphics {
|
||||
|
||||
ctx.save();
|
||||
ctx.beginPath();
|
||||
for (let i = 0; i < paths.length; i++) {
|
||||
const path = paths[i];
|
||||
for (const path of paths) {
|
||||
ctx.setTransform.apply(ctx, path.transform);
|
||||
ctx.translate(path.x, path.y);
|
||||
path.addToPath(ctx, path.fontSize);
|
||||
|
@ -301,8 +301,8 @@ window.onload = function () {
|
||||
|
||||
// Bind an event handler to each image link
|
||||
const images = document.getElementsByClassName("image");
|
||||
for (let i = 0; i < images.length; i++) {
|
||||
images[i].addEventListener(
|
||||
for (const image of images) {
|
||||
image.addEventListener(
|
||||
"click",
|
||||
function (e) {
|
||||
showImages(e.target.id);
|
||||
@ -407,9 +407,9 @@ window.onload = function () {
|
||||
function flashPixels(on) {
|
||||
const stroke = on ? "#FF0000" : "#CCC";
|
||||
const strokeWidth = on ? "2px" : "1px";
|
||||
for (let i = 0; i < gFlashingPixels.length; i++) {
|
||||
gFlashingPixels[i].setAttribute("stroke", stroke);
|
||||
gFlashingPixels[i].setAttribute("stroke-width", strokeWidth);
|
||||
for (const pixel of gFlashingPixels) {
|
||||
pixel.setAttribute("stroke", stroke);
|
||||
pixel.setAttribute("stroke-width", strokeWidth);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -30,17 +30,13 @@ function parseOptions() {
|
||||
|
||||
function group(stats, groupBy) {
|
||||
const vals = [];
|
||||
for (let i = 0; i < stats.length; i++) {
|
||||
const curStat = stats[i];
|
||||
for (const curStat of stats) {
|
||||
const keyArr = [];
|
||||
for (let j = 0; j < groupBy.length; j++) {
|
||||
keyArr.push(curStat[groupBy[j]]);
|
||||
for (const entry of groupBy) {
|
||||
keyArr.push(curStat[entry]);
|
||||
}
|
||||
const key = keyArr.join(",");
|
||||
if (vals[key] === undefined) {
|
||||
vals[key] = [];
|
||||
}
|
||||
vals[key].push(curStat.time);
|
||||
(vals[key] ||= []).push(curStat.time);
|
||||
}
|
||||
return vals;
|
||||
}
|
||||
@ -134,8 +130,7 @@ function stat(baseline, current) {
|
||||
return s.length;
|
||||
});
|
||||
rows.push(labels);
|
||||
for (let k = 0; k < keys.length; k++) {
|
||||
const key = keys[k];
|
||||
for (const key of keys) {
|
||||
const baselineMean = mean(baselineGroup[key]);
|
||||
const currentMean = mean(currentGroup[key]);
|
||||
const row = key.split(",");
|
||||
@ -172,8 +167,7 @@ function stat(baseline, current) {
|
||||
// print output
|
||||
console.log("-- Grouped By " + options.groupBy.join(", ") + " --");
|
||||
const groupCount = options.groupBy.length;
|
||||
for (let r = 0; r < rows.length; r++) {
|
||||
const row = rows[r];
|
||||
for (const row of rows) {
|
||||
for (let i = 0; i < row.length; i++) {
|
||||
row[i] = pad(row[i], width[i], i < groupCount ? "right" : "left");
|
||||
}
|
||||
|
@ -416,10 +416,10 @@ describe("function", function () {
|
||||
"destOffset",
|
||||
compiledCode
|
||||
);
|
||||
for (let i = 0; i < samples.length; i++) {
|
||||
const out = new Float32Array(samples[i].output.length);
|
||||
fn(samples[i].input, 0, out, 0);
|
||||
expect(Array.prototype.slice.call(out, 0)).toEqual(samples[i].output);
|
||||
for (const { input, output } of samples) {
|
||||
const out = new Float32Array(output.length);
|
||||
fn(input, 0, out, 0);
|
||||
expect(Array.prototype.slice.call(out, 0)).toEqual(output);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user