Use more for...of
loops in the code-base
Note that these cases, which are all in older code, were found using the [`unicorn/no-for-loop`](https://github.com/sindresorhus/eslint-plugin-unicorn/blob/main/docs/rules/no-for-loop.md) ESLint plugin rule. However, note that I've opted not to enable this rule by default since there's still *some* cases where I do think that it makes sense to allow "regular" for-loops.
This commit is contained in:
parent
5bfba89b0a
commit
37ebc28756
@ -95,8 +95,8 @@ limitations under the License.
|
|||||||
url: CRX_BASE_URL + "*:*",
|
url: CRX_BASE_URL + "*:*",
|
||||||
},
|
},
|
||||||
function (tabsFromLastSession) {
|
function (tabsFromLastSession) {
|
||||||
for (var i = 0; i < tabsFromLastSession.length; ++i) {
|
for (const { id } of tabsFromLastSession) {
|
||||||
chrome.tabs.reload(tabsFromLastSession[i].id);
|
chrome.tabs.reload(id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -54,8 +54,7 @@ function isPdfDownloadable(details) {
|
|||||||
* @returns {undefined|{name: string, value: string}} The header, if found.
|
* @returns {undefined|{name: string, value: string}} The header, if found.
|
||||||
*/
|
*/
|
||||||
function getHeaderFromHeaders(headers, headerName) {
|
function getHeaderFromHeaders(headers, headerName) {
|
||||||
for (var i = 0; i < headers.length; ++i) {
|
for (const header of headers) {
|
||||||
var header = headers[i];
|
|
||||||
if (header.name.toLowerCase() === headerName) {
|
if (header.name.toLowerCase() === headerName) {
|
||||||
return header;
|
return header;
|
||||||
}
|
}
|
||||||
|
@ -110,8 +110,7 @@ limitations under the License.
|
|||||||
id = "";
|
id = "";
|
||||||
var buf = new Uint8Array(5);
|
var buf = new Uint8Array(5);
|
||||||
crypto.getRandomValues(buf);
|
crypto.getRandomValues(buf);
|
||||||
for (var i = 0; i < buf.length; ++i) {
|
for (const c of buf) {
|
||||||
var c = buf[i];
|
|
||||||
id += (c >>> 4).toString(16) + (c & 0xf).toString(16);
|
id += (c >>> 4).toString(16) + (c & 0xf).toString(16);
|
||||||
}
|
}
|
||||||
localStorage.telemetryDeduplicationId = id;
|
localStorage.telemetryDeduplicationId = id;
|
||||||
|
4
external/cmapscompress/compress.js
vendored
4
external/cmapscompress/compress.js
vendored
@ -218,8 +218,8 @@ function parseCMap(binaryData) {
|
|||||||
const sign = fromHexDigit(num[num.length - 1]) & 1 ? 15 : 0;
|
const sign = fromHexDigit(num[num.length - 1]) & 1 ? 15 : 0;
|
||||||
let c = 0;
|
let c = 0;
|
||||||
let result = "";
|
let result = "";
|
||||||
for (let i = 0; i < num.length; i++) {
|
for (const digit of num) {
|
||||||
c = (c << 4) | fromHexDigit(num[i]);
|
c = (c << 4) | fromHexDigit(digit);
|
||||||
result += toHexDigit(sign ? (c >> 1) ^ sign : c >> 1);
|
result += toHexDigit(sign ? (c >> 1) ^ sign : c >> 1);
|
||||||
c &= 1;
|
c &= 1;
|
||||||
}
|
}
|
||||||
|
6
external/cmapscompress/optimize.js
vendored
6
external/cmapscompress/optimize.js
vendored
@ -196,13 +196,13 @@ exports.optimizeCMap = function (data) {
|
|||||||
i++;
|
i++;
|
||||||
data.body.splice(i, 0, newItem);
|
data.body.splice(i, 0, newItem);
|
||||||
}
|
}
|
||||||
for (let j = 0; j < subitems.length; j++) {
|
for (const subitem of subitems) {
|
||||||
const code = subitems[j].code;
|
const { code } = subitem;
|
||||||
let q = 0;
|
let q = 0;
|
||||||
while (q < groups.length && groups[q] <= code) {
|
while (q < groups.length && groups[q] <= code) {
|
||||||
q++;
|
q++;
|
||||||
}
|
}
|
||||||
buckets[q].push(subitems[j]);
|
buckets[q].push(subitem);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -782,8 +782,7 @@ gulp.task("locale", function () {
|
|||||||
subfolders.sort();
|
subfolders.sort();
|
||||||
let viewerOutput = "";
|
let viewerOutput = "";
|
||||||
const locales = [];
|
const locales = [];
|
||||||
for (let i = 0; i < subfolders.length; i++) {
|
for (const locale of subfolders) {
|
||||||
const locale = subfolders[i];
|
|
||||||
const dirPath = L10N_DIR + locale;
|
const dirPath = L10N_DIR + locale;
|
||||||
if (!checkDir(dirPath)) {
|
if (!checkDir(dirPath)) {
|
||||||
continue;
|
continue;
|
||||||
|
@ -1682,9 +1682,7 @@ class CFFCompiler {
|
|||||||
compileDict(dict, offsetTracker) {
|
compileDict(dict, offsetTracker) {
|
||||||
const out = [];
|
const out = [];
|
||||||
// The dictionary keys must be in a certain order.
|
// The dictionary keys must be in a certain order.
|
||||||
const order = dict.order;
|
for (const key of dict.order) {
|
||||||
for (let i = 0; i < order.length; ++i) {
|
|
||||||
const key = order[i];
|
|
||||||
if (!(key in dict.values)) {
|
if (!(key in dict.values)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -219,9 +219,8 @@ class Type1Font {
|
|||||||
|
|
||||||
getCharset() {
|
getCharset() {
|
||||||
const charset = [".notdef"];
|
const charset = [".notdef"];
|
||||||
const charstrings = this.charstrings;
|
for (const { glyphName } of this.charstrings) {
|
||||||
for (let glyphId = 0; glyphId < charstrings.length; glyphId++) {
|
charset.push(glyphName);
|
||||||
charset.push(charstrings[glyphId].glyphName);
|
|
||||||
}
|
}
|
||||||
return charset;
|
return charset;
|
||||||
}
|
}
|
||||||
|
@ -576,7 +576,7 @@ const Type1Parser = (function Type1ParserClosure() {
|
|||||||
privateData,
|
privateData,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
let token, length, data, lenIV, encoded;
|
let token, length, data, lenIV;
|
||||||
while ((token = this.getToken()) !== null) {
|
while ((token = this.getToken()) !== null) {
|
||||||
if (token !== "/") {
|
if (token !== "/") {
|
||||||
continue;
|
continue;
|
||||||
@ -604,7 +604,7 @@ const Type1Parser = (function Type1ParserClosure() {
|
|||||||
this.getToken(); // read in 'RD' or '-|'
|
this.getToken(); // read in 'RD' or '-|'
|
||||||
data = length > 0 ? stream.getBytes(length) : new Uint8Array(0);
|
data = length > 0 ? stream.getBytes(length) : new Uint8Array(0);
|
||||||
lenIV = program.properties.privateData.lenIV;
|
lenIV = program.properties.privateData.lenIV;
|
||||||
encoded = this.readCharStrings(data, lenIV);
|
const encoded = this.readCharStrings(data, lenIV);
|
||||||
this.nextChar();
|
this.nextChar();
|
||||||
token = this.getToken(); // read in 'ND' or '|-'
|
token = this.getToken(); // read in 'ND' or '|-'
|
||||||
if (token === "noaccess") {
|
if (token === "noaccess") {
|
||||||
@ -629,7 +629,7 @@ const Type1Parser = (function Type1ParserClosure() {
|
|||||||
this.getToken(); // read in 'RD' or '-|'
|
this.getToken(); // read in 'RD' or '-|'
|
||||||
data = length > 0 ? stream.getBytes(length) : new Uint8Array(0);
|
data = length > 0 ? stream.getBytes(length) : new Uint8Array(0);
|
||||||
lenIV = program.properties.privateData.lenIV;
|
lenIV = program.properties.privateData.lenIV;
|
||||||
encoded = this.readCharStrings(data, lenIV);
|
const encoded = this.readCharStrings(data, lenIV);
|
||||||
this.nextChar();
|
this.nextChar();
|
||||||
token = this.getToken(); // read in 'NP' or '|'
|
token = this.getToken(); // read in 'NP' or '|'
|
||||||
if (token === "noaccess") {
|
if (token === "noaccess") {
|
||||||
@ -675,9 +675,7 @@ const Type1Parser = (function Type1ParserClosure() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (let i = 0; i < charstrings.length; i++) {
|
for (const { encoded, glyph } of charstrings) {
|
||||||
const glyph = charstrings[i].glyph;
|
|
||||||
encoded = charstrings[i].encoded;
|
|
||||||
const charString = new Type1CharString();
|
const charString = new Type1CharString();
|
||||||
const error = charString.convert(
|
const error = charString.convert(
|
||||||
encoded,
|
encoded,
|
||||||
|
@ -263,8 +263,8 @@ class WorkerMessageHandler {
|
|||||||
// There may be a chance that `newPdfManager` is not initialized for
|
// There may be a chance that `newPdfManager` is not initialized for
|
||||||
// the first few runs of `readchunk` block of code. Be sure to send
|
// the first few runs of `readchunk` block of code. Be sure to send
|
||||||
// all cached chunks, if any, to chunked_stream via pdf_manager.
|
// all cached chunks, if any, to chunked_stream via pdf_manager.
|
||||||
for (let i = 0; i < cachedChunks.length; i++) {
|
for (const chunk of cachedChunks) {
|
||||||
newPdfManager.sendProgressiveData(cachedChunks[i]);
|
newPdfManager.sendProgressiveData(chunk);
|
||||||
}
|
}
|
||||||
|
|
||||||
cachedChunks = [];
|
cachedChunks = [];
|
||||||
|
@ -2211,8 +2211,7 @@ class CanvasGraphics {
|
|||||||
|
|
||||||
ctx.save();
|
ctx.save();
|
||||||
ctx.beginPath();
|
ctx.beginPath();
|
||||||
for (let i = 0; i < paths.length; i++) {
|
for (const path of paths) {
|
||||||
const path = paths[i];
|
|
||||||
ctx.setTransform.apply(ctx, path.transform);
|
ctx.setTransform.apply(ctx, path.transform);
|
||||||
ctx.translate(path.x, path.y);
|
ctx.translate(path.x, path.y);
|
||||||
path.addToPath(ctx, path.fontSize);
|
path.addToPath(ctx, path.fontSize);
|
||||||
|
@ -301,8 +301,8 @@ window.onload = function () {
|
|||||||
|
|
||||||
// Bind an event handler to each image link
|
// Bind an event handler to each image link
|
||||||
const images = document.getElementsByClassName("image");
|
const images = document.getElementsByClassName("image");
|
||||||
for (let i = 0; i < images.length; i++) {
|
for (const image of images) {
|
||||||
images[i].addEventListener(
|
image.addEventListener(
|
||||||
"click",
|
"click",
|
||||||
function (e) {
|
function (e) {
|
||||||
showImages(e.target.id);
|
showImages(e.target.id);
|
||||||
@ -407,9 +407,9 @@ window.onload = function () {
|
|||||||
function flashPixels(on) {
|
function flashPixels(on) {
|
||||||
const stroke = on ? "#FF0000" : "#CCC";
|
const stroke = on ? "#FF0000" : "#CCC";
|
||||||
const strokeWidth = on ? "2px" : "1px";
|
const strokeWidth = on ? "2px" : "1px";
|
||||||
for (let i = 0; i < gFlashingPixels.length; i++) {
|
for (const pixel of gFlashingPixels) {
|
||||||
gFlashingPixels[i].setAttribute("stroke", stroke);
|
pixel.setAttribute("stroke", stroke);
|
||||||
gFlashingPixels[i].setAttribute("stroke-width", strokeWidth);
|
pixel.setAttribute("stroke-width", strokeWidth);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -30,17 +30,13 @@ function parseOptions() {
|
|||||||
|
|
||||||
function group(stats, groupBy) {
|
function group(stats, groupBy) {
|
||||||
const vals = [];
|
const vals = [];
|
||||||
for (let i = 0; i < stats.length; i++) {
|
for (const curStat of stats) {
|
||||||
const curStat = stats[i];
|
|
||||||
const keyArr = [];
|
const keyArr = [];
|
||||||
for (let j = 0; j < groupBy.length; j++) {
|
for (const entry of groupBy) {
|
||||||
keyArr.push(curStat[groupBy[j]]);
|
keyArr.push(curStat[entry]);
|
||||||
}
|
}
|
||||||
const key = keyArr.join(",");
|
const key = keyArr.join(",");
|
||||||
if (vals[key] === undefined) {
|
(vals[key] ||= []).push(curStat.time);
|
||||||
vals[key] = [];
|
|
||||||
}
|
|
||||||
vals[key].push(curStat.time);
|
|
||||||
}
|
}
|
||||||
return vals;
|
return vals;
|
||||||
}
|
}
|
||||||
@ -134,8 +130,7 @@ function stat(baseline, current) {
|
|||||||
return s.length;
|
return s.length;
|
||||||
});
|
});
|
||||||
rows.push(labels);
|
rows.push(labels);
|
||||||
for (let k = 0; k < keys.length; k++) {
|
for (const key of keys) {
|
||||||
const key = keys[k];
|
|
||||||
const baselineMean = mean(baselineGroup[key]);
|
const baselineMean = mean(baselineGroup[key]);
|
||||||
const currentMean = mean(currentGroup[key]);
|
const currentMean = mean(currentGroup[key]);
|
||||||
const row = key.split(",");
|
const row = key.split(",");
|
||||||
@ -172,8 +167,7 @@ function stat(baseline, current) {
|
|||||||
// print output
|
// print output
|
||||||
console.log("-- Grouped By " + options.groupBy.join(", ") + " --");
|
console.log("-- Grouped By " + options.groupBy.join(", ") + " --");
|
||||||
const groupCount = options.groupBy.length;
|
const groupCount = options.groupBy.length;
|
||||||
for (let r = 0; r < rows.length; r++) {
|
for (const row of rows) {
|
||||||
const row = rows[r];
|
|
||||||
for (let i = 0; i < row.length; i++) {
|
for (let i = 0; i < row.length; i++) {
|
||||||
row[i] = pad(row[i], width[i], i < groupCount ? "right" : "left");
|
row[i] = pad(row[i], width[i], i < groupCount ? "right" : "left");
|
||||||
}
|
}
|
||||||
|
@ -416,10 +416,10 @@ describe("function", function () {
|
|||||||
"destOffset",
|
"destOffset",
|
||||||
compiledCode
|
compiledCode
|
||||||
);
|
);
|
||||||
for (let i = 0; i < samples.length; i++) {
|
for (const { input, output } of samples) {
|
||||||
const out = new Float32Array(samples[i].output.length);
|
const out = new Float32Array(output.length);
|
||||||
fn(samples[i].input, 0, out, 0);
|
fn(input, 0, out, 0);
|
||||||
expect(Array.prototype.slice.call(out, 0)).toEqual(samples[i].output);
|
expect(Array.prototype.slice.call(out, 0)).toEqual(output);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user