From 9e262ae7fa4e8614d7950850b27020ca2f50bef8 Mon Sep 17 00:00:00 2001 From: Jonas Jenwald Date: Fri, 24 Jan 2020 09:48:21 +0100 Subject: [PATCH] Enable the ESLint `prefer-const` rule globally (PR 11450 follow-up) Please find additional details about the ESLint rule at https://eslint.org/docs/rules/prefer-const With the recent introduction of Prettier this sort of mass enabling of ESLint rules becomes a lot easier, since the code will be automatically reformatted as necessary to account for e.g. changed line lengths. Note that this patch is generated automatically, by using the ESLint `--fix` argument, and will thus require some additional clean-up (which is done separately). --- .eslintrc | 2 +- src/core/annotation.js | 4 +- src/core/ccitt.js | 18 +-- src/core/ccitt_stream.js | 2 +- src/core/cff_parser.js | 18 +-- src/core/chunked_stream.js | 2 +- src/core/cmap.js | 8 +- src/core/colorspace.js | 232 ++++++++++++++--------------- src/core/crypto.js | 44 +++--- src/core/evaluator.js | 62 ++++---- src/core/font_renderer.js | 6 +- src/core/fonts.js | 42 +++--- src/core/function.js | 6 +- src/core/image.js | 2 +- src/core/jbig2.js | 96 ++++++------ src/core/jbig2_stream.js | 14 +- src/core/jpeg_stream.js | 18 +-- src/core/jpg.js | 6 +- src/core/jpx.js | 2 +- src/core/jpx_stream.js | 32 ++-- src/core/obj.js | 42 +++--- src/core/parser.js | 4 +- src/core/pattern.js | 12 +- src/core/primitives.js | 6 +- src/core/stream.js | 8 +- src/display/api.js | 4 +- src/display/api_compatibility.js | 2 +- src/display/canvas.js | 8 +- src/display/content_disposition.js | 22 +-- src/display/font_loader.js | 24 +-- src/display/metadata.js | 16 +- src/display/network_utils.js | 6 +- src/display/node_stream.js | 36 ++--- src/display/pattern_helper.js | 2 +- src/display/text_layer.js | 10 +- src/pdf.js | 12 +- src/shared/compatibility.js | 7 +- src/shared/util.js | 2 +- test/add_test.js | 12 +- test/driver.js | 16 +- test/unit/annotation_spec.js | 12 +- test/unit/api_spec.js | 45 +++--- test/unit/cmap_spec.js | 4 +- test/unit/colorspace_spec.js | 188 +++++++++++------------ test/unit/custom_spec.js | 6 +- test/unit/fetch_stream_spec.js | 2 +- test/unit/message_handler_spec.js | 68 ++++----- test/unit/node_stream_spec.js | 78 +++++----- test/unit/pdf_history_spec.js | 14 +- test/unit/stream_spec.js | 2 +- test/unit/test_utils.js | 8 +- test/unit/ui_utils_spec.js | 18 +-- test/unit/util_spec.js | 24 +-- web/.eslintrc | 1 - 54 files changed, 676 insertions(+), 661 deletions(-) diff --git a/.eslintrc b/.eslintrc index fe0e560eb..187da5ad6 100644 --- a/.eslintrc +++ b/.eslintrc @@ -161,7 +161,7 @@ "object-shorthand": ["error", "always", { "avoidQuotes": true, }], - "prefer-const": "off", + "prefer-const": "error", "sort-imports": ["error", { "ignoreCase": true, }], diff --git a/src/core/annotation.js b/src/core/annotation.js index 2ef5ca64c..f1dd0146e 100644 --- a/src/core/annotation.js +++ b/src/core/annotation.js @@ -1058,8 +1058,8 @@ class ChoiceWidgetAnnotation extends WidgetAnnotation { if (Array.isArray(options)) { const xref = params.xref; for (let i = 0, ii = options.length; i < ii; i++) { - let option = xref.fetchIfRef(options[i]); - let isOptionArray = Array.isArray(option); + const option = xref.fetchIfRef(options[i]); + const isOptionArray = Array.isArray(option); this.data.options[i] = { exportValue: isOptionArray ? xref.fetchIfRef(option[0]) : option, diff --git a/src/core/ccitt.js b/src/core/ccitt.js index eaeb2f4ec..7ac491089 100644 --- a/src/core/ccitt.js +++ b/src/core/ccitt.js @@ -27,7 +27,7 @@ import { info } from "../shared/util.js"; -let CCITTFaxDecoder = (function CCITTFaxDecoder() { +const CCITTFaxDecoder = (function CCITTFaxDecoder() { const ccittEOL = -2; const ccittEOF = -1; const twoDimPass = 0; @@ -515,9 +515,9 @@ let CCITTFaxDecoder = (function CCITTFaxDecoder() { if (this.eof) { return -1; } - let refLine = this.refLine; - let codingLine = this.codingLine; - let columns = this.columns; + const refLine = this.refLine; + const codingLine = this.codingLine; + const columns = this.columns; let refPos, blackPixels, bits, i; @@ -850,7 +850,7 @@ let CCITTFaxDecoder = (function CCITTFaxDecoder() { * @private */ _addPixels(a1, blackPixels) { - let codingLine = this.codingLine; + const codingLine = this.codingLine; let codingPos = this.codingPos; if (a1 > codingLine[codingPos]) { @@ -872,7 +872,7 @@ let CCITTFaxDecoder = (function CCITTFaxDecoder() { * @private */ _addPixelsNeg(a1, blackPixels) { - let codingLine = this.codingLine; + const codingLine = this.codingLine; let codingPos = this.codingPos; if (a1 > codingLine[codingPos]) { @@ -911,7 +911,7 @@ let CCITTFaxDecoder = (function CCITTFaxDecoder() { * @private */ _findTableCode(start, end, table, limit) { - let limitValue = limit || 0; + const limitValue = limit || 0; for (let i = start; i <= end; ++i) { let code = this._lookBits(i); if (code === ccittEOF) { @@ -921,7 +921,7 @@ let CCITTFaxDecoder = (function CCITTFaxDecoder() { code <<= end - i; } if (!limitValue || code >= limitValue) { - let p = table[code - limitValue]; + const p = table[code - limitValue]; if (p[0] === i) { this._eatBits(i); return [true, p[1], true]; @@ -945,7 +945,7 @@ let CCITTFaxDecoder = (function CCITTFaxDecoder() { return p[1]; } } else { - let result = this._findTableCode(1, 7, twoDimTable); + const result = this._findTableCode(1, 7, twoDimTable); if (result[0] && result[2]) { return result[1]; } diff --git a/src/core/ccitt_stream.js b/src/core/ccitt_stream.js index 7b6b04a56..a82102502 100644 --- a/src/core/ccitt_stream.js +++ b/src/core/ccitt_stream.js @@ -48,7 +48,7 @@ var CCITTFaxStream = (function CCITTFaxStreamClosure() { CCITTFaxStream.prototype.readBlock = function() { while (!this.eof) { - let c = this.ccittFaxDecoder.readNextChar(); + const c = this.ccittFaxDecoder.readNextChar(); if (c === -1) { this.eof = true; return; diff --git a/src/core/cff_parser.js b/src/core/cff_parser.js index 2877d3e92..e61ff1b91 100644 --- a/src/core/cff_parser.js +++ b/src/core/cff_parser.js @@ -1730,7 +1730,7 @@ var CFFCompiler = (function CFFCompilerClosure() { // Freetype requires the number of charset strings be correct and MacOS // requires a valid mapping for printing. let out; - let numGlyphsLessNotDef = numGlyphs - 1; + const numGlyphsLessNotDef = numGlyphs - 1; if (isCIDFont) { // In a CID font, the charset is a mapping of CIDs not SIDs so just // create an identity mapping. @@ -1742,16 +1742,16 @@ var CFFCompiler = (function CFFCompilerClosure() { numGlyphsLessNotDef & 0xff, ]); } else { - let length = 1 + numGlyphsLessNotDef * 2; + const length = 1 + numGlyphsLessNotDef * 2; out = new Uint8Array(length); out[0] = 0; // format 0 let charsetIndex = 0; - let numCharsets = charset.charset.length; + const numCharsets = charset.charset.length; let warned = false; for (let i = 1; i < out.length; i += 2) { let sid = 0; if (charsetIndex < numCharsets) { - let name = charset.charset[charsetIndex++]; + const name = charset.charset[charsetIndex++]; sid = strings.getSID(name); if (sid === -1) { sid = 0; @@ -1771,7 +1771,7 @@ var CFFCompiler = (function CFFCompilerClosure() { return this.compileTypedArray(encoding.raw); }, compileFDSelect: function CFFCompiler_compileFDSelect(fdSelect) { - let format = fdSelect.format; + const format = fdSelect.format; let out, i; switch (format) { case 0: @@ -1782,9 +1782,9 @@ var CFFCompiler = (function CFFCompilerClosure() { } break; case 3: - let start = 0; + const start = 0; let lastFD = fdSelect.fdSelect[0]; - let ranges = [ + const ranges = [ format, 0, // nRanges place holder 0, // nRanges place holder @@ -1793,14 +1793,14 @@ var CFFCompiler = (function CFFCompilerClosure() { lastFD, ]; for (i = 1; i < fdSelect.fdSelect.length; i++) { - let currentFD = fdSelect.fdSelect[i]; + const currentFD = fdSelect.fdSelect[i]; if (currentFD !== lastFD) { ranges.push((i >> 8) & 0xff, i & 0xff, currentFD); lastFD = currentFD; } } // 3 bytes are pushed for every range and there are 3 header bytes. - let numRanges = (ranges.length - 3) / 3; + const numRanges = (ranges.length - 3) / 3; ranges[1] = (numRanges >> 8) & 0xff; ranges[2] = numRanges & 0xff; // sentinel diff --git a/src/core/chunked_stream.js b/src/core/chunked_stream.js index 9da75f2f2..4cc41bf20 100644 --- a/src/core/chunked_stream.js +++ b/src/core/chunked_stream.js @@ -496,7 +496,7 @@ class ChunkedStreamManager { } onReceiveData(args) { - let chunk = args.chunk; + const chunk = args.chunk; const isProgressive = args.begin === undefined; const begin = isProgressive ? this.progressiveDataLength : args.begin; const end = begin + chunk.byteLength; diff --git a/src/core/cmap.js b/src/core/cmap.js index 859e10dda..be729f7f5 100644 --- a/src/core/cmap.js +++ b/src/core/cmap.js @@ -267,8 +267,8 @@ class CMap { // indices in the *billions*. For such tables we use for..in, which isn't // ideal because it stringifies the indices for all present elements, but // it does avoid iterating over every undefined entry. - let map = this._map; - let length = map.length; + const map = this._map; + const length = map.length; if (length <= 0x10000) { for (let i = 0; i < length; i++) { if (map[i] !== undefined) { @@ -276,7 +276,7 @@ class CMap { } } } else { - for (let i in map) { + for (const i in map) { callback(i, map[i]); } } @@ -289,7 +289,7 @@ class CMap { if (map.length <= 0x10000) { return map.indexOf(value); } - for (let charCode in map) { + for (const charCode in map) { if (map[charCode] === value) { return charCode | 0; } diff --git a/src/core/colorspace.js b/src/core/colorspace.js index 4d5ebea8c..8b4c567a5 100644 --- a/src/core/colorspace.js +++ b/src/core/colorspace.js @@ -37,12 +37,12 @@ import { isDict, isName, isStream } from "./primitives.js"; function resizeRgbImage(src, dest, w1, h1, w2, h2, alpha01) { const COMPONENTS = 3; alpha01 = alpha01 !== 1 ? 0 : alpha01; - let xRatio = w1 / w2; - let yRatio = h1 / h2; + const xRatio = w1 / w2; + const yRatio = h1 / h2; let newIndex = 0, oldIndex; - let xScaled = new Uint16Array(w2); - let w1Scanline = w1 * COMPONENTS; + const xScaled = new Uint16Array(w2); + const w1Scanline = w1 * COMPONENTS; for (let i = 0; i < w2; i++) { xScaled[i] = Math.floor(i * xRatio) * COMPONENTS; @@ -74,7 +74,7 @@ class ColorSpace { * of the rgb components, each value ranging from [0,255]. */ getRgb(src, srcOffset) { - let rgb = new Uint8ClampedArray(3); + const rgb = new Uint8ClampedArray(3); this.getRgbItem(src, srcOffset, rgb, 0); return rgb; } @@ -148,10 +148,10 @@ class ColorSpace { 'ColorSpace.fillRgb: Unsupported "dest" type.' ); } - let count = originalWidth * originalHeight; + const count = originalWidth * originalHeight; let rgbBuf = null; - let numComponentColors = 1 << bpc; - let needsResizing = originalHeight !== height || originalWidth !== width; + const numComponentColors = 1 << bpc; + const needsResizing = originalHeight !== height || originalWidth !== width; if (this.isPassthrough(bpc)) { rgbBuf = comps; @@ -170,14 +170,14 @@ class ColorSpace { // TODO it may be worth while to cache the color map. While running // testing I never hit a cache so I will leave that out for now (perhaps // we are reparsing colorspaces too much?). - let allColors = + const allColors = bpc <= 8 ? new Uint8Array(numComponentColors) : new Uint16Array(numComponentColors); for (let i = 0; i < numComponentColors; i++) { allColors[i] = i; } - let colorMap = new Uint8ClampedArray(numComponentColors * 3); + const colorMap = new Uint8ClampedArray(numComponentColors * 3); this.getRgbBuffer( allColors, 0, @@ -260,12 +260,12 @@ class ColorSpace { } static parse(cs, xref, res, pdfFunctionFactory) { - let IR = this.parseToIR(cs, xref, res, pdfFunctionFactory); + const IR = this.parseToIR(cs, xref, res, pdfFunctionFactory); return this.fromIR(IR); } static fromIR(IR) { - let name = Array.isArray(IR) ? IR[0] : IR; + const name = Array.isArray(IR) ? IR[0] : IR; let whitePoint, blackPoint, gamma; switch (name) { @@ -284,7 +284,7 @@ class ColorSpace { whitePoint = IR[1]; blackPoint = IR[2]; gamma = IR[3]; - let matrix = IR[4]; + const matrix = IR[4]; return new CalRGBCS(whitePoint, blackPoint, gamma, matrix); case "PatternCS": let basePatternCS = IR[1]; @@ -293,19 +293,19 @@ class ColorSpace { } return new PatternCS(basePatternCS); case "IndexedCS": - let baseIndexedCS = IR[1]; - let hiVal = IR[2]; - let lookup = IR[3]; + const baseIndexedCS = IR[1]; + const hiVal = IR[2]; + const lookup = IR[3]; return new IndexedCS(this.fromIR(baseIndexedCS), hiVal, lookup); case "AlternateCS": - let numComps = IR[1]; - let alt = IR[2]; - let tintFn = IR[3]; + const numComps = IR[1]; + const alt = IR[2]; + const tintFn = IR[3]; return new AlternateCS(numComps, this.fromIR(alt), tintFn); case "LabCS": whitePoint = IR[1]; blackPoint = IR[2]; - let range = IR[3]; + const range = IR[3]; return new LabCS(whitePoint, blackPoint, range); default: throw new FormatError(`Unknown colorspace name: ${name}`); @@ -329,9 +329,9 @@ class ColorSpace { return ["PatternCS", null]; default: if (isDict(res)) { - let colorSpaces = res.get("ColorSpace"); + const colorSpaces = res.get("ColorSpace"); if (isDict(colorSpaces)) { - let resCS = colorSpaces.get(cs.name); + const resCS = colorSpaces.get(cs.name); if (resCS) { if (isName(resCS)) { return this.parseToIR(resCS, xref, res, pdfFunctionFactory); @@ -345,7 +345,7 @@ class ColorSpace { } } if (Array.isArray(cs)) { - let mode = xref.fetchIfRef(cs[0]).name; + const mode = xref.fetchIfRef(cs[0]).name; let numComps, params, alt, whitePoint, blackPoint, gamma; switch (mode) { @@ -369,18 +369,18 @@ class ColorSpace { whitePoint = params.getArray("WhitePoint"); blackPoint = params.getArray("BlackPoint"); gamma = params.getArray("Gamma"); - let matrix = params.getArray("Matrix"); + const matrix = params.getArray("Matrix"); return ["CalRGBCS", whitePoint, blackPoint, gamma, matrix]; case "ICCBased": - let stream = xref.fetchIfRef(cs[1]); - let dict = stream.dict; + const stream = xref.fetchIfRef(cs[1]); + const dict = stream.dict; numComps = dict.get("N"); alt = dict.get("Alternate"); if (alt) { - let altIR = this.parseToIR(alt, xref, res, pdfFunctionFactory); + const altIR = this.parseToIR(alt, xref, res, pdfFunctionFactory); // Parse the /Alternate CS to ensure that the number of components // are correct, and also (indirectly) that it is not a PatternCS. - let altCS = this.fromIR(altIR, pdfFunctionFactory); + const altCS = this.fromIR(altIR, pdfFunctionFactory); if (altCS.numComps === numComps) { return altIR; } @@ -407,13 +407,13 @@ class ColorSpace { return ["PatternCS", basePatternCS]; case "Indexed": case "I": - let baseIndexedCS = this.parseToIR( + const baseIndexedCS = this.parseToIR( cs[1], xref, res, pdfFunctionFactory ); - let hiVal = xref.fetchIfRef(cs[2]) + 1; + const hiVal = xref.fetchIfRef(cs[2]) + 1; let lookup = xref.fetchIfRef(cs[3]); if (isStream(lookup)) { lookup = lookup.getBytes(); @@ -421,16 +421,16 @@ class ColorSpace { return ["IndexedCS", baseIndexedCS, hiVal, lookup]; case "Separation": case "DeviceN": - let name = xref.fetchIfRef(cs[1]); + const name = xref.fetchIfRef(cs[1]); numComps = Array.isArray(name) ? name.length : 1; alt = this.parseToIR(cs[2], xref, res, pdfFunctionFactory); - let tintFn = pdfFunctionFactory.create(xref.fetchIfRef(cs[3])); + const tintFn = pdfFunctionFactory.create(xref.fetchIfRef(cs[3])); return ["AlternateCS", numComps, alt, tintFn]; case "Lab": params = xref.fetchIfRef(cs[1]); whitePoint = params.getArray("WhitePoint"); blackPoint = params.getArray("BlackPoint"); - let range = params.getArray("Range"); + const range = params.getArray("Range"); return ["LabCS", whitePoint, blackPoint, range]; default: throw new FormatError(`unimplemented color space object "${mode}"`); @@ -505,7 +505,7 @@ class AlternateCS extends ColorSpace { 'AlternateCS.getRgbItem: Unsupported "dest" type.' ); } - let tmpBuf = this.tmpBuf; + const tmpBuf = this.tmpBuf; this.tintFn(src, srcOffset, tmpBuf, 0); this.base.getRgbItem(tmpBuf, 0, dest, destOffset); } @@ -520,21 +520,21 @@ class AlternateCS extends ColorSpace { 'AlternateCS.getRgbBuffer: Unsupported "dest" type.' ); } - let tintFn = this.tintFn; - let base = this.base; - let scale = 1 / ((1 << bits) - 1); - let baseNumComps = base.numComps; - let usesZeroToOneRange = base.usesZeroToOneRange; - let isPassthrough = + const tintFn = this.tintFn; + const base = this.base; + const scale = 1 / ((1 << bits) - 1); + const baseNumComps = base.numComps; + const usesZeroToOneRange = base.usesZeroToOneRange; + const isPassthrough = (base.isPassthrough(8) || !usesZeroToOneRange) && alpha01 === 0; let pos = isPassthrough ? destOffset : 0; - let baseBuf = isPassthrough + const baseBuf = isPassthrough ? dest : new Uint8ClampedArray(baseNumComps * count); - let numComps = this.numComps; + const numComps = this.numComps; - let scaled = new Float32Array(numComps); - let tinted = new Float32Array(baseNumComps); + const scaled = new Float32Array(numComps); + const tinted = new Float32Array(baseNumComps); let i, j; for (i = 0; i < count; i++) { @@ -585,12 +585,12 @@ class IndexedCS extends ColorSpace { this.base = base; this.highVal = highVal; - let baseNumComps = base.numComps; - let length = baseNumComps * highVal; + const baseNumComps = base.numComps; + const length = baseNumComps * highVal; if (isStream(lookup)) { this.lookup = new Uint8Array(length); - let bytes = lookup.getBytes(length); + const bytes = lookup.getBytes(length); this.lookup.set(bytes); } else if (isString(lookup)) { this.lookup = new Uint8Array(length); @@ -614,8 +614,8 @@ class IndexedCS extends ColorSpace { 'IndexedCS.getRgbItem: Unsupported "dest" type.' ); } - let numComps = this.base.numComps; - let start = src[srcOffset] * numComps; + const numComps = this.base.numComps; + const start = src[srcOffset] * numComps; this.base.getRgbBuffer(this.lookup, start, 1, dest, destOffset, 8, 0); } @@ -629,13 +629,13 @@ class IndexedCS extends ColorSpace { 'IndexedCS.getRgbBuffer: Unsupported "dest" type.' ); } - let base = this.base; - let numComps = base.numComps; - let outputDelta = base.getOutputLength(numComps, alpha01); - let lookup = this.lookup; + const base = this.base; + const numComps = base.numComps; + const outputDelta = base.getOutputLength(numComps, alpha01); + const lookup = this.lookup; for (let i = 0; i < count; ++i) { - let lookupPos = src[srcOffset++] * numComps; + const lookupPos = src[srcOffset++] * numComps; base.getRgbBuffer(lookup, lookupPos, 1, dest, destOffset, 8, alpha01); destOffset += outputDelta; } @@ -679,7 +679,7 @@ class DeviceGrayCS extends ColorSpace { 'DeviceGrayCS.getRgbItem: Unsupported "dest" type.' ); } - let c = src[srcOffset] * 255; + const c = src[srcOffset] * 255; dest[destOffset] = dest[destOffset + 1] = dest[destOffset + 2] = c; } @@ -693,11 +693,11 @@ class DeviceGrayCS extends ColorSpace { 'DeviceGrayCS.getRgbBuffer: Unsupported "dest" type.' ); } - let scale = 255 / ((1 << bits) - 1); + const scale = 255 / ((1 << bits) - 1); let j = srcOffset, q = destOffset; for (let i = 0; i < count; ++i) { - let c = scale * src[j++]; + const c = scale * src[j++]; dest[q++] = c; dest[q++] = c; dest[q++] = c; @@ -747,7 +747,7 @@ class DeviceRgbCS extends ColorSpace { dest.set(src.subarray(srcOffset, srcOffset + count * 3), destOffset); return; } - let scale = 255 / ((1 << bits) - 1); + const scale = 255 / ((1 << bits) - 1); let j = srcOffset, q = destOffset; for (let i = 0; i < count; ++i) { @@ -778,10 +778,10 @@ const DeviceCmykCS = (function DeviceCmykCSClosure() { // CMYK color conversion using the estimation below: // f(A, B,.. N) = Acc+Bcm+Ccy+Dck+c+Fmm+Gmy+Hmk+Im+Jyy+Kyk+Ly+Mkk+Nk+255 function convertToRgb(src, srcOffset, srcScale, dest, destOffset) { - let c = src[srcOffset] * srcScale; - let m = src[srcOffset + 1] * srcScale; - let y = src[srcOffset + 2] * srcScale; - let k = src[srcOffset + 3] * srcScale; + const c = src[srcOffset] * srcScale; + const m = src[srcOffset + 1] * srcScale; + const y = src[srcOffset + 2] * srcScale; + const k = src[srcOffset + 3] * srcScale; dest[destOffset] = 255 + @@ -864,7 +864,7 @@ const DeviceCmykCS = (function DeviceCmykCSClosure() { 'DeviceCmykCS.getRgbBuffer: Unsupported "dest" type.' ); } - let scale = 1 / ((1 << bits) - 1); + const scale = 1 / ((1 << bits) - 1); for (let i = 0; i < count; i++) { convertToRgb(src, srcOffset, scale, dest, destOffset); srcOffset += 4; @@ -888,15 +888,15 @@ const CalGrayCS = (function CalGrayCSClosure() { function convertToRgb(cs, src, srcOffset, dest, destOffset, scale) { // A represents a gray component of a calibrated gray space. // A <---> AG in the spec - let A = src[srcOffset] * scale; - let AG = Math.pow(A, cs.G); + const A = src[srcOffset] * scale; + const AG = Math.pow(A, cs.G); // Computes L as per spec. ( = cs.YW * AG ) // Except if other than default BlackPoint values are used. - let L = cs.YW * AG; + const L = cs.YW * AG; // http://www.poynton.com/notes/colour_and_gamma/ColorFAQ.html, Ch 4. // Convert values to rgb range [0, 255]. - let val = Math.max(295.8 * Math.pow(L, 0.333333333333333333) - 40.8, 0); + const val = Math.max(295.8 * Math.pow(L, 0.333333333333333333) - 40.8, 0); dest[destOffset] = val; dest[destOffset + 1] = val; dest[destOffset + 2] = val; @@ -977,7 +977,7 @@ const CalGrayCS = (function CalGrayCSClosure() { 'CalGrayCS.getRgbBuffer: Unsupported "dest" type.' ); } - let scale = 1 / ((1 << bits) - 1); + const scale = 1 / ((1 << bits) - 1); for (let i = 0; i < count; ++i) { convertToRgb(this, src, srcOffset, dest, destOffset, scale); @@ -1022,9 +1022,9 @@ const CalRGBCS = (function CalRGBCSClosure() { const FLAT_WHITEPOINT_MATRIX = new Float32Array([1, 1, 1]); - let tempNormalizeMatrix = new Float32Array(3); - let tempConvertMatrix1 = new Float32Array(3); - let tempConvertMatrix2 = new Float32Array(3); + const tempNormalizeMatrix = new Float32Array(3); + const tempConvertMatrix1 = new Float32Array(3); + const tempConvertMatrix2 = new Float32Array(3); const DECODE_L_CONSTANT = Math.pow((8 + 16) / 116, 3) / 8.0; @@ -1090,25 +1090,25 @@ const CalRGBCS = (function CalRGBCSClosure() { // http://www.adobe.com/content/dam/Adobe/en/devnet/photoshop/sdk/ // AdobeBPC.pdf. // The destination blackPoint is the default blackPoint [0, 0, 0]. - let zeroDecodeL = decodeL(0); + const zeroDecodeL = decodeL(0); - let X_DST = zeroDecodeL; - let X_SRC = decodeL(sourceBlackPoint[0]); + const X_DST = zeroDecodeL; + const X_SRC = decodeL(sourceBlackPoint[0]); - let Y_DST = zeroDecodeL; - let Y_SRC = decodeL(sourceBlackPoint[1]); + const Y_DST = zeroDecodeL; + const Y_SRC = decodeL(sourceBlackPoint[1]); - let Z_DST = zeroDecodeL; - let Z_SRC = decodeL(sourceBlackPoint[2]); + const Z_DST = zeroDecodeL; + const Z_SRC = decodeL(sourceBlackPoint[2]); - let X_Scale = (1 - X_DST) / (1 - X_SRC); - let X_Offset = 1 - X_Scale; + const X_Scale = (1 - X_DST) / (1 - X_SRC); + const X_Offset = 1 - X_Scale; - let Y_Scale = (1 - Y_DST) / (1 - Y_SRC); - let Y_Offset = 1 - Y_Scale; + const Y_Scale = (1 - Y_DST) / (1 - Y_SRC); + const Y_Offset = 1 - Y_Scale; - let Z_Scale = (1 - Z_DST) / (1 - Z_SRC); - let Z_Offset = 1 - Z_Scale; + const Z_Scale = (1 - Z_DST) / (1 - Z_SRC); + const Z_Offset = 1 - Z_Scale; result[0] = XYZ_Flat[0] * X_Scale + X_Offset; result[1] = XYZ_Flat[1] * Y_Scale + Y_Offset; @@ -1125,20 +1125,20 @@ const CalRGBCS = (function CalRGBCSClosure() { return; } - let LMS = result; + const LMS = result; matrixProduct(BRADFORD_SCALE_MATRIX, XYZ_In, LMS); - let LMS_Flat = tempNormalizeMatrix; + const LMS_Flat = tempNormalizeMatrix; convertToFlat(sourceWhitePoint, LMS, LMS_Flat); matrixProduct(BRADFORD_SCALE_INVERSE_MATRIX, LMS_Flat, result); } function normalizeWhitePointToD65(sourceWhitePoint, XYZ_In, result) { - let LMS = result; + const LMS = result; matrixProduct(BRADFORD_SCALE_MATRIX, XYZ_In, LMS); - let LMS_D65 = tempNormalizeMatrix; + const LMS_D65 = tempNormalizeMatrix; convertToD65(sourceWhitePoint, LMS, LMS_D65); matrixProduct(BRADFORD_SCALE_INVERSE_MATRIX, LMS_D65, result); @@ -1147,41 +1147,41 @@ const CalRGBCS = (function CalRGBCSClosure() { function convertToRgb(cs, src, srcOffset, dest, destOffset, scale) { // A, B and C represent a red, green and blue components of a calibrated // rgb space. - let A = adjustToRange(0, 1, src[srcOffset] * scale); - let B = adjustToRange(0, 1, src[srcOffset + 1] * scale); - let C = adjustToRange(0, 1, src[srcOffset + 2] * scale); + const A = adjustToRange(0, 1, src[srcOffset] * scale); + const B = adjustToRange(0, 1, src[srcOffset + 1] * scale); + const C = adjustToRange(0, 1, src[srcOffset + 2] * scale); // A <---> AGR in the spec // B <---> BGG in the spec // C <---> CGB in the spec - let AGR = Math.pow(A, cs.GR); - let BGG = Math.pow(B, cs.GG); - let CGB = Math.pow(C, cs.GB); + const AGR = Math.pow(A, cs.GR); + const BGG = Math.pow(B, cs.GG); + const CGB = Math.pow(C, cs.GB); // Computes intermediate variables L, M, N as per spec. // To decode X, Y, Z values map L, M, N directly to them. - let X = cs.MXA * AGR + cs.MXB * BGG + cs.MXC * CGB; - let Y = cs.MYA * AGR + cs.MYB * BGG + cs.MYC * CGB; - let Z = cs.MZA * AGR + cs.MZB * BGG + cs.MZC * CGB; + const X = cs.MXA * AGR + cs.MXB * BGG + cs.MXC * CGB; + const Y = cs.MYA * AGR + cs.MYB * BGG + cs.MYC * CGB; + const Z = cs.MZA * AGR + cs.MZB * BGG + cs.MZC * CGB; // The following calculations are based on this document: // http://www.adobe.com/content/dam/Adobe/en/devnet/photoshop/sdk/ // AdobeBPC.pdf. - let XYZ = tempConvertMatrix1; + const XYZ = tempConvertMatrix1; XYZ[0] = X; XYZ[1] = Y; XYZ[2] = Z; - let XYZ_Flat = tempConvertMatrix2; + const XYZ_Flat = tempConvertMatrix2; normalizeWhitePointToFlat(cs.whitePoint, XYZ, XYZ_Flat); - let XYZ_Black = tempConvertMatrix1; + const XYZ_Black = tempConvertMatrix1; compensateBlackPoint(cs.blackPoint, XYZ_Flat, XYZ_Black); - let XYZ_D65 = tempConvertMatrix2; + const XYZ_D65 = tempConvertMatrix2; normalizeWhitePointToD65(FLAT_WHITEPOINT_MATRIX, XYZ_Black, XYZ_D65); - let SRGB = tempConvertMatrix1; + const SRGB = tempConvertMatrix1; matrixProduct(SRGB_D65_XYZ_TO_RGB_MATRIX, XYZ_D65, SRGB); // Convert the values to rgb range [0, 255]. @@ -1204,14 +1204,14 @@ const CalRGBCS = (function CalRGBCSClosure() { matrix = matrix || new Float32Array([1, 0, 0, 0, 1, 0, 0, 0, 1]); // Translate arguments to spec variables. - let XW = whitePoint[0]; - let YW = whitePoint[1]; - let ZW = whitePoint[2]; + const XW = whitePoint[0]; + const YW = whitePoint[1]; + const ZW = whitePoint[2]; this.whitePoint = whitePoint; - let XB = blackPoint[0]; - let YB = blackPoint[1]; - let ZB = blackPoint[2]; + const XB = blackPoint[0]; + const YB = blackPoint[1]; + const ZB = blackPoint[2]; this.blackPoint = blackPoint; this.GR = gamma[0]; @@ -1276,7 +1276,7 @@ const CalRGBCS = (function CalRGBCSClosure() { 'CalRGBCS.getRgbBuffer: Unsupported "dest" type.' ); } - let scale = 1 / ((1 << bits) - 1); + const scale = 1 / ((1 << bits) - 1); for (let i = 0; i < count; ++i) { convertToRgb(this, src, srcOffset, dest, destOffset, scale); @@ -1343,13 +1343,13 @@ const LabCS = (function LabCSClosure() { } // Computes intermediate variables X,Y,Z as per spec - let M = (Ls + 16) / 116; - let L = M + as / 500; - let N = M - bs / 200; + const M = (Ls + 16) / 116; + const L = M + as / 500; + const N = M - bs / 200; - let X = cs.XW * fn_g(L); - let Y = cs.YW * fn_g(M); - let Z = cs.ZW * fn_g(N); + const X = cs.XW * fn_g(L); + const Y = cs.YW * fn_g(M); + const Z = cs.ZW * fn_g(N); let r, g, b; // Using different conversions for D50 and D65 white points, @@ -1442,7 +1442,7 @@ const LabCS = (function LabCSClosure() { 'LabCS.getRgbBuffer: Unsupported "dest" type.' ); } - let maxVal = (1 << bits) - 1; + const maxVal = (1 << bits) - 1; for (let i = 0; i < count; i++) { convertToRgb(this, src, srcOffset, maxVal, dest, destOffset); srcOffset += 3; diff --git a/src/core/crypto.js b/src/core/crypto.js index 629c514f3..0846260a8 100644 --- a/src/core/crypto.js +++ b/src/core/crypto.js @@ -825,7 +825,7 @@ class AESBaseCipher { _decrypt(input, key) { let t, u, v; - let state = new Uint8Array(16); + const state = new Uint8Array(16); state.set(input); // AddRoundKey @@ -862,10 +862,10 @@ class AESBaseCipher { } // InvMixColumns for (let j = 0; j < 16; j += 4) { - let s0 = this._mix[state[j]]; - let s1 = this._mix[state[j + 1]]; - let s2 = this._mix[state[j + 2]]; - let s3 = this._mix[state[j + 3]]; + const s0 = this._mix[state[j]]; + const s1 = this._mix[state[j + 1]]; + const s2 = this._mix[state[j + 2]]; + const s3 = this._mix[state[j + 3]]; t = s0 ^ (s1 >>> 8) ^ @@ -912,7 +912,7 @@ class AESBaseCipher { const s = this._s; let t, u, v; - let state = new Uint8Array(16); + const state = new Uint8Array(16); state.set(input); for (let j = 0; j < 16; ++j) { @@ -946,10 +946,10 @@ class AESBaseCipher { state[15] = t; // MixColumns for (let j = 0; j < 16; j += 4) { - let s0 = state[j + 0]; - let s1 = state[j + 1]; - let s2 = state[j + 2]; - let s3 = state[j + 3]; + const s0 = state[j + 0]; + const s1 = state[j + 1]; + const s2 = state[j + 2]; + const s3 = state[j + 3]; t = s0 ^ s1 ^ s2 ^ s3; state[j + 0] ^= t ^ this._mixCol[s0 ^ s1]; state[j + 1] ^= t ^ this._mixCol[s1 ^ s2]; @@ -993,7 +993,7 @@ class AESBaseCipher { } _decryptBlock2(data, finalize) { - let sourceLength = data.length; + const sourceLength = data.length; let buffer = this.buffer, bufferLength = this.bufferPosition; let result = [], @@ -1006,7 +1006,7 @@ class AESBaseCipher { continue; } // buffer is full, decrypting - let plain = this._decrypt(buffer, this._key); + const plain = this._decrypt(buffer, this._key); // xor-ing the IV vector to get plain text for (let j = 0; j < 16; ++j) { plain[j] ^= iv[j]; @@ -1027,7 +1027,7 @@ class AESBaseCipher { let outputLength = 16 * result.length; if (finalize) { // undo a padding that is described in RFC 2898 - let lastBlock = result[result.length - 1]; + const lastBlock = result[result.length - 1]; let psLen = lastBlock[15]; if (psLen <= 16) { for (let i = 15, ii = 16 - psLen; i >= ii; --i) { @@ -1041,7 +1041,7 @@ class AESBaseCipher { result[result.length - 1] = lastBlock.subarray(0, 16 - psLen); } } - let output = new Uint8Array(outputLength); + const output = new Uint8Array(outputLength); for (let i = 0, j = 0, ii = result.length; i < ii; ++i, j += 16) { output.set(result[i], j); } @@ -1049,7 +1049,7 @@ class AESBaseCipher { } decryptBlock(data, finalize, iv = null) { - let sourceLength = data.length; + const sourceLength = data.length; let buffer = this.buffer, bufferLength = this.bufferPosition; // If an IV is not supplied, wait for IV values. They are at the start @@ -1080,10 +1080,10 @@ class AESBaseCipher { } encrypt(data, iv) { - let sourceLength = data.length; + const sourceLength = data.length; let buffer = this.buffer, bufferLength = this.bufferPosition; - let result = []; + const result = []; if (!iv) { iv = new Uint8Array(16); @@ -1099,7 +1099,7 @@ class AESBaseCipher { } // buffer is full, encrypting - let cipher = this._encrypt(buffer, this._key); + const cipher = this._encrypt(buffer, this._key); iv = cipher; result.push(cipher); buffer = new Uint8Array(16); @@ -1113,8 +1113,8 @@ class AESBaseCipher { return new Uint8Array(0); } // combining plain text blocks into one - let outputLength = 16 * result.length; - let output = new Uint8Array(outputLength); + const outputLength = 16 * result.length; + const output = new Uint8Array(outputLength); for (let i = 0, j = 0, ii = result.length; i < ii; ++i, j += 16) { output.set(result[i], j); } @@ -1163,7 +1163,7 @@ class AES128Cipher extends AESBaseCipher { const s = this._s; const rcon = this._rcon; - let result = new Uint8Array(b); + const result = new Uint8Array(b); result.set(cipherKey); for (let j = 16, i = 1; j < b; ++i) { @@ -1208,7 +1208,7 @@ class AES256Cipher extends AESBaseCipher { const b = 240; const s = this._s; - let result = new Uint8Array(b); + const result = new Uint8Array(b); result.set(cipherKey); let r = 1; diff --git a/src/core/evaluator.js b/src/core/evaluator.js index 34147e921..799344be0 100644 --- a/src/core/evaluator.js +++ b/src/core/evaluator.js @@ -472,7 +472,7 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { !(image instanceof JpegStream) && w + h < SMALL_IMAGE_DIMENSIONS ) { - let imageObj = new PDFImage({ + const imageObj = new PDFImage({ xref: this.xref, res: resources, image, @@ -652,7 +652,7 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { // we will build a map of integer values in range 0..255 to be fast. var transferObj = smask.get("TR"); if (isPDFFunction(transferObj)) { - let transferFn = this.pdfFunctionFactory.create(transferObj); + const transferFn = this.pdfFunctionFactory.create(transferObj); var transferMap = new Uint8Array(256); var tmp = new Float32Array(1); for (var i = 0; i < 256; i++) { @@ -683,11 +683,11 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { task ) { // Create an IR of the pattern code. - let tilingOpList = new OperatorList(); + const tilingOpList = new OperatorList(); // Merge the available resources, to prevent issues when the patternDict // is missing some /Resources entries (fixes issue6541.pdf). - let resourcesArray = [patternDict.get("Resources"), resources]; - let patternResources = Dict.merge(this.xref, resourcesArray); + const resourcesArray = [patternDict.get("Resources"), resources]; + const patternResources = Dict.merge(this.xref, resourcesArray); return this.getOperatorList({ stream: pattern, @@ -807,8 +807,8 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { var gStateKeys = gState.getKeys(); var promise = Promise.resolve(); for (var i = 0, ii = gStateKeys.length; i < ii; i++) { - let key = gStateKeys[i]; - let value = gState.get(key); + const key = gStateKeys[i]; + const value = gState.get(key); switch (key) { case "Type": break; @@ -1206,7 +1206,7 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { } return new Promise(function promiseBody(resolve, reject) { - let next = function(promise) { + const next = function(promise) { Promise.all([promise, operatorList.ready]).then(function() { try { promiseBody(resolve, reject); @@ -1252,7 +1252,7 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { ); } - let xobj = xobjs.get(name); + const xobj = xobjs.get(name); if (!xobj) { operatorList.addOp(fn, args); resolveXObject(); @@ -1262,7 +1262,7 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { throw new FormatError("XObject should be a stream"); } - let type = xobj.dict.get("Subtype"); + const type = xobj.dict.get("Subtype"); if (!isName(type)) { throw new FormatError("XObject should have a Name subtype"); } @@ -1887,7 +1887,7 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { } function enqueueChunk() { - let length = textContent.items.length; + const length = textContent.items.length; if (length > 0) { sink.enqueue(textContent, length); textContent.items = []; @@ -1898,7 +1898,7 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { var timeSlotManager = new TimeSlotManager(); return new Promise(function promiseBody(resolve, reject) { - let next = function(promise) { + const next = function(promise) { enqueueChunk(); Promise.all([promise, sink.ready]).then(function() { try { @@ -2142,7 +2142,7 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { ); } - let xobj = xobjs.get(name); + const xobj = xobjs.get(name); if (!xobj) { resolveXObject(); return; @@ -2151,7 +2151,7 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { throw new FormatError("XObject should be a stream"); } - let type = xobj.dict.get("Subtype"); + const type = xobj.dict.get("Subtype"); if (!isName(type)) { throw new FormatError("XObject should have a Name subtype"); } @@ -2167,10 +2167,10 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { // data can otherwise prevent `restore` operators from // executing. // NOTE: Only an issue when `options.ignoreErrors === true`. - let currentState = stateManager.state.clone(); - let xObjStateManager = new StateManager(currentState); + const currentState = stateManager.state.clone(); + const xObjStateManager = new StateManager(currentState); - let matrix = xobj.dict.getArray("Matrix"); + const matrix = xobj.dict.getArray("Matrix"); if (Array.isArray(matrix) && matrix.length === 6) { xObjStateManager.transform(matrix); } @@ -2178,7 +2178,7 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { // Enqueue the `textContent` chunk before parsing the /Form // XObject. enqueueChunk(); - let sinkWrapper = { + const sinkWrapper = { enqueueInvoked: false, enqueue(chunk, size) { @@ -2416,10 +2416,10 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { let toUnicode = [], charcode, glyphName; - let encoding = properties.defaultEncoding.slice(); - let baseEncodingName = properties.baseEncodingName; + const encoding = properties.defaultEncoding.slice(); + const baseEncodingName = properties.baseEncodingName; // Merge in the differences array. - let differences = properties.differences; + const differences = properties.differences; for (charcode in differences) { glyphName = differences[charcode]; if (glyphName === ".notdef") { @@ -2429,7 +2429,7 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { } encoding[charcode] = glyphName; } - let glyphsUnicodeMap = getGlyphsUnicode(); + const glyphsUnicodeMap = getGlyphsUnicode(); for (charcode in encoding) { // a) Map the character code to a character name. glyphName = encoding[charcode]; @@ -2482,7 +2482,7 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { break; default: // 'uniXXXX'/'uXXXX{XX}' glyphs - let unicode = getUnicodeForGlyph(glyphName, glyphsUnicodeMap); + const unicode = getUnicodeForGlyph(glyphName, glyphsUnicodeMap); if (unicode !== -1) { code = unicode; } @@ -2492,7 +2492,7 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { // equals `charcode`, using the glyph defined in the baseEncoding // seems to yield a better `toUnicode` mapping (fixes issue 5070). if (baseEncodingName && code === +charcode) { - let baseEncoding = getEncoding(baseEncodingName); + const baseEncoding = getEncoding(baseEncodingName); if (baseEncoding && (glyphName = baseEncoding[charcode])) { toUnicode[charcode] = String.fromCharCode( glyphsUnicodeMap[glyphName] @@ -2562,12 +2562,12 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { // b) Obtain the registry and ordering of the character collection used // by the font’s CMap (for example, Adobe and Japan1) from its // CIDSystemInfo dictionary. - let registry = properties.cidSystemInfo.registry; - let ordering = properties.cidSystemInfo.ordering; + const registry = properties.cidSystemInfo.registry; + const ordering = properties.cidSystemInfo.ordering; // c) Construct a second CMap name by concatenating the registry and // ordering obtained in step (b) in the format registry–ordering–UCS2 // (for example, Adobe–Japan1–UCS2). - let ucs2CMapName = Name.get(registry + "-" + ordering + "-UCS2"); + const ucs2CMapName = Name.get(registry + "-" + ordering + "-UCS2"); // d) Obtain the CMap with the name constructed in step (c) (available // from the ASN Web site; see the Bibliography). return CMapFactory.create({ @@ -2575,15 +2575,15 @@ var PartialEvaluator = (function PartialEvaluatorClosure() { fetchBuiltInCMap: this.fetchBuiltInCMap, useCMap: null, }).then(function(ucs2CMap) { - let cMap = properties.cMap; - let toUnicode = []; + const cMap = properties.cMap; + const toUnicode = []; cMap.forEach(function(charcode, cid) { if (cid > 0xffff) { throw new FormatError("Max size of CID is 65,535"); } // e) Map the CID obtained in step (a) according to the CMap // obtained in step (d), producing a Unicode value. - let ucs2 = ucs2CMap.lookup(cid); + const ucs2 = ucs2CMap.lookup(cid); if (ucs2) { toUnicode[charcode] = String.fromCharCode( (ucs2.charCodeAt(0) << 8) + ucs2.charCodeAt(1) @@ -3204,7 +3204,7 @@ var TranslatedFont = (function TranslatedFontClosure() { var charProcOperatorList = Object.create(null); for (var i = 0, n = charProcKeys.length; i < n; ++i) { - let key = charProcKeys[i]; + const key = charProcKeys[i]; loadCharProcsPromise = loadCharProcsPromise.then(function() { var glyphStream = charProcs.get(key); var operatorList = new OperatorList(); diff --git a/src/core/font_renderer.js b/src/core/font_renderer.js index acb733c1c..672bc620f 100644 --- a/src/core/font_renderer.js +++ b/src/core/font_renderer.js @@ -425,7 +425,7 @@ var FontRendererFactory = (function FontRendererFactoryClosure() { n = stack.pop(); subrCode = null; if (font.isCFFCIDFont) { - let fdIndex = font.fdSelect.getFDIndex(glyphId); + const fdIndex = font.fdSelect.getFDIndex(glyphId); if (fdIndex >= 0 && fdIndex < font.fdArray.length) { let fontDict = font.fdArray[fdIndex], subrs; @@ -757,9 +757,9 @@ var FontRendererFactory = (function FontRendererFactoryClosure() { if (this.isCFFCIDFont) { // Top DICT's FontMatrix can be ignored because CFFCompiler always // removes it and copies to FDArray DICTs. - let fdIndex = this.fdSelect.getFDIndex(glyphId); + const fdIndex = this.fdSelect.getFDIndex(glyphId); if (fdIndex >= 0 && fdIndex < this.fdArray.length) { - let fontDict = this.fdArray[fdIndex]; + const fontDict = this.fdArray[fdIndex]; fontMatrix = fontDict.getByName("FontMatrix") || FONT_IDENTITY_MATRIX; } else { warn("Invalid fd index for glyph index."); diff --git a/src/core/fonts.js b/src/core/fonts.js index f75b8e364..6cb730728 100644 --- a/src/core/fonts.js +++ b/src/core/fonts.js @@ -294,11 +294,11 @@ var ToUnicodeMap = (function ToUnicodeMapClosure() { charCodeOf(value) { // `Array.prototype.indexOf` is *extremely* inefficient for arrays which // are both very sparse and very large (see issue8372.pdf). - let map = this._map; + const map = this._map; if (map.length <= 0x10000) { return map.indexOf(value); } - for (let charCode in map) { + for (const charCode in map) { if (map[charCode] === value) { return charCode | 0; } @@ -697,7 +697,7 @@ var Font = (function FontClosure() { } function isTrueTypeCollectionFile(file) { - let header = file.peekBytes(4); + const header = file.peekBytes(4); return bytesToString(header) === "ttcf"; } @@ -1311,7 +1311,7 @@ var Font = (function FontClosure() { map[+charCode] = SupplementalGlyphMapForArialBlack[charCode]; } } else if (/Calibri/i.test(name)) { - let SupplementalGlyphMapForCalibri = getSupplementalGlyphMapForCalibri(); + const SupplementalGlyphMapForCalibri = getSupplementalGlyphMapForCalibri(); for (charCode in SupplementalGlyphMapForCalibri) { map[+charCode] = SupplementalGlyphMapForCalibri[charCode]; } @@ -1397,7 +1397,7 @@ var Font = (function FontClosure() { ]; function readTables(file, numTables) { - let tables = Object.create(null); + const tables = Object.create(null); tables["OS/2"] = null; tables["cmap"] = null; tables["head"] = null; @@ -1408,7 +1408,7 @@ var Font = (function FontClosure() { tables["post"] = null; for (let i = 0; i < numTables; i++) { - let table = readTableEntry(font); + const table = readTableEntry(font); if (!VALID_TABLES.includes(table.tag)) { continue; // skipping table if it's not a required or optional table } @@ -1460,18 +1460,18 @@ var Font = (function FontClosure() { } function readTrueTypeCollectionHeader(ttc) { - let ttcTag = bytesToString(ttc.getBytes(4)); + const ttcTag = bytesToString(ttc.getBytes(4)); assert(ttcTag === "ttcf", "Must be a TrueType Collection font."); - let majorVersion = ttc.getUint16(); - let minorVersion = ttc.getUint16(); - let numFonts = ttc.getInt32() >>> 0; - let offsetTable = []; + const majorVersion = ttc.getUint16(); + const minorVersion = ttc.getUint16(); + const numFonts = ttc.getInt32() >>> 0; + const offsetTable = []; for (let i = 0; i < numFonts; i++) { offsetTable.push(ttc.getInt32() >>> 0); } - let header = { + const header = { ttcTag, majorVersion, minorVersion, @@ -1493,23 +1493,23 @@ var Font = (function FontClosure() { } function readTrueTypeCollectionData(ttc, fontName) { - let { numFonts, offsetTable } = readTrueTypeCollectionHeader(ttc); + const { numFonts, offsetTable } = readTrueTypeCollectionHeader(ttc); for (let i = 0; i < numFonts; i++) { ttc.pos = (ttc.start || 0) + offsetTable[i]; - let potentialHeader = readOpenTypeHeader(ttc); - let potentialTables = readTables(ttc, potentialHeader.numTables); + const potentialHeader = readOpenTypeHeader(ttc); + const potentialTables = readTables(ttc, potentialHeader.numTables); if (!potentialTables["name"]) { throw new FormatError( 'TrueType Collection font must contain a "name" table.' ); } - let nameTable = readNameTable(potentialTables["name"]); + const nameTable = readNameTable(potentialTables["name"]); for (let j = 0, jj = nameTable.length; j < jj; j++) { for (let k = 0, kk = nameTable[j].length; k < kk; k++) { - let nameEntry = nameTable[j][k]; + const nameEntry = nameTable[j][k]; if (nameEntry && nameEntry.replace(/\s/g, "") === fontName) { return { header: potentialHeader, @@ -2327,7 +2327,7 @@ var Font = (function FontClosure() { } else { ttContext.functionsUsed[funcId] = true; if (funcId in ttContext.functionsStackDeltas) { - let newStackLength = + const newStackLength = stack.length + ttContext.functionsStackDeltas[funcId]; if (newStackLength < 0) { warn("TT: CALL invalid functions stack delta."); @@ -2525,7 +2525,7 @@ var Font = (function FontClosure() { let header, tables; if (isTrueTypeCollectionFile(font)) { - let ttcData = readTrueTypeCollectionData(font, this.name); + const ttcData = readTrueTypeCollectionData(font, this.name); header = ttcData.header; tables = ttcData.tables; } else { @@ -3709,8 +3709,8 @@ var Type1Font = (function Type1FontClosure() { var charsetArray = [".notdef"]; var i, ii; for (i = 0; i < count; i++) { - let glyphName = charstrings[i].glyphName; - let index = CFFStandardStrings.indexOf(glyphName); + const glyphName = charstrings[i].glyphName; + const index = CFFStandardStrings.indexOf(glyphName); if (index === -1) { strings.add(glyphName); } diff --git a/src/core/function.js b/src/core/function.js index c4aba96d9..5cc4247d9 100644 --- a/src/core/function.js +++ b/src/core/function.js @@ -24,7 +24,7 @@ import { import { isDict, isStream } from "./primitives.js"; import { PostScriptLexer, PostScriptParser } from "./ps_parser.js"; -let IsEvalSupportedCached = { +const IsEvalSupportedCached = { get value() { return shadow(this, "value", isEvalSupported()); }, @@ -150,7 +150,7 @@ var PDFFunction = (function PDFFunctionClosure() { }, parse({ xref, isEvalSupported, fn }) { - let IR = this.getIR({ xref, isEvalSupported, fn }); + const IR = this.getIR({ xref, isEvalSupported, fn }); return this.fromIR({ xref, isEvalSupported, IR }); }, @@ -480,7 +480,7 @@ var PDFFunction = (function PDFFunctionClosure() { var code = IR[3]; if (isEvalSupported && IsEvalSupportedCached.value) { - let compiled = new PostScriptCompiler().compile(code, domain, range); + const compiled = new PostScriptCompiler().compile(code, domain, range); if (compiled) { // Compiled function consists of simple expressions such as addition, // subtraction, Math.max, and also contains 'var' and 'return' diff --git a/src/core/image.js b/src/core/image.js index c6083574a..d8e3398f4 100644 --- a/src/core/image.js +++ b/src/core/image.js @@ -195,7 +195,7 @@ var PDFImage = (function PDFImageClosure() { ); } } - let resources = isInline ? res : null; + const resources = isInline ? res : null; this.colorSpace = ColorSpace.parse( colorSpace, xref, diff --git a/src/core/jbig2.js b/src/core/jbig2.js index 18127a3b3..331afcb8f 100644 --- a/src/core/jbig2.js +++ b/src/core/jbig2.js @@ -345,7 +345,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { decodingContext ) { if (mmr) { - let input = new Reader( + const input = new Reader( decodingContext.data, decodingContext.start, decodingContext.end @@ -625,7 +625,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { currentHeight += deltaHeight; let currentWidth = 0, totalWidth = 0; - let firstSymbol = huffman ? symbolWidths.length : 0; + const firstSymbol = huffman ? symbolWidths.length : 0; while (true) { var deltaWidth = huffman ? huffmanTables.tableDeltaWidth.decode(huffmanInput) @@ -703,7 +703,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { } if (huffman && !refinement) { // 6.5.9 Height class collective bitmap - let bitmapSize = huffmanTables.tableBitmapSize.decode(huffmanInput); + const bitmapSize = huffmanTables.tableBitmapSize.decode(huffmanInput); huffmanInput.byteAlign(); let collectiveBitmap; if (bitmapSize === 0) { @@ -715,8 +715,8 @@ var Jbig2Image = (function Jbig2ImageClosure() { ); } else { // MMR collective bitmap - let originalEnd = huffmanInput.end; - let bitmapEnd = huffmanInput.position + bitmapSize; + const originalEnd = huffmanInput.end; + const bitmapEnd = huffmanInput.position + bitmapSize; huffmanInput.end = bitmapEnd; collectiveBitmap = decodeMMRBitmap( huffmanInput, @@ -727,7 +727,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { huffmanInput.end = originalEnd; huffmanInput.position = bitmapEnd; } - let numberOfSymbolsDecoded = symbolWidths.length; + const numberOfSymbolsDecoded = symbolWidths.length; if (firstSymbol === numberOfSymbolsDecoded - 1) { // collectiveBitmap is a single symbol. newSymbols.push(collectiveBitmap); @@ -954,7 +954,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { template, decodingContext ) { - let at = []; + const at = []; if (!mmr) { at.push({ x: -patternWidth, @@ -975,8 +975,8 @@ var Jbig2Image = (function Jbig2ImageClosure() { }); } } - let collectiveWidth = (maxPatternIndex + 1) * patternWidth; - let collectiveBitmap = decodeBitmap( + const collectiveWidth = (maxPatternIndex + 1) * patternWidth; + const collectiveBitmap = decodeBitmap( mmr, collectiveWidth, patternHeight, @@ -1023,7 +1023,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { gridVectorY, decodingContext ) { - let skip = null; + const skip = null; if (enableSkip) { throw new Jbig2Error("skip is not supported"); } @@ -1036,7 +1036,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { } // Prepare bitmap. - let regionBitmap = []; + const regionBitmap = []; let i, j, row; for (i = 0; i < regionHeight; i++) { row = new Uint8Array(regionWidth); @@ -1048,12 +1048,12 @@ var Jbig2Image = (function Jbig2ImageClosure() { regionBitmap.push(row); } - let numberOfPatterns = patterns.length; - let pattern0 = patterns[0]; - let patternWidth = pattern0[0].length, + const numberOfPatterns = patterns.length; + const pattern0 = patterns[0]; + const patternWidth = pattern0[0].length, patternHeight = pattern0.length; - let bitsPerValue = log2(numberOfPatterns); - let at = []; + const bitsPerValue = log2(numberOfPatterns); + const at = []; if (!mmr) { at.push({ x: template <= 1 ? 3 : 2, @@ -1403,8 +1403,8 @@ var Jbig2Image = (function Jbig2ImageClosure() { break; case 16: // PatternDictionary // 7.4.4. Pattern dictionary segment syntax - let patternDictionary = {}; - let patternDictionaryFlags = data[position++]; + const patternDictionary = {}; + const patternDictionaryFlags = data[position++]; patternDictionary.mmr = !!(patternDictionaryFlags & 1); patternDictionary.template = (patternDictionaryFlags >> 1) & 3; patternDictionary.patternWidth = data[position++]; @@ -1416,10 +1416,10 @@ var Jbig2Image = (function Jbig2ImageClosure() { case 22: // ImmediateHalftoneRegion case 23: // ImmediateLosslessHalftoneRegion // 7.4.5 Halftone region segment syntax - let halftoneRegion = {}; + const halftoneRegion = {}; halftoneRegion.info = readRegionSegmentInformation(data, position); position += RegionSegmentInformationFieldLength; - let halftoneRegionFlags = data[position++]; + const halftoneRegionFlags = data[position++]; halftoneRegion.mmr = !!(halftoneRegionFlags & 1); halftoneRegion.template = (halftoneRegionFlags >> 1) & 3; halftoneRegion.enableSkip = !!(halftoneRegionFlags & 8); @@ -1539,7 +1539,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { throw new Jbig2Error("parseJbig2 - invalid header."); } - let header = Object.create(null); + const header = Object.create(null); position += 8; const flags = data[position++]; header.randomAccess = !(flags & 1); @@ -1548,13 +1548,13 @@ var Jbig2Image = (function Jbig2ImageClosure() { position += 4; } - let segments = readSegments(header, data, position, end); - let visitor = new SimpleSegmentVisitor(); + const segments = readSegments(header, data, position, end); + const visitor = new SimpleSegmentVisitor(); processSegments(segments, visitor); const { width, height } = visitor.currentPageInfo; const bitPacked = visitor.buffer; - let imgData = new Uint8ClampedArray(width * height); + const imgData = new Uint8ClampedArray(width * height); let q = 0, k = 0; for (let i = 0; i < height; i++) { @@ -1691,7 +1691,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { var inputSymbols = []; for (var i = 0, ii = referredSegments.length; i < ii; i++) { - let referredSymbols = symbols[referredSegments[i]]; + const referredSymbols = symbols[referredSegments[i]]; // referredSymbols is undefined when we have a reference to a Tables // segment instead of a SymbolDictionary. if (referredSymbols) { @@ -1729,7 +1729,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { var symbols = this.symbols; var inputSymbols = []; for (var i = 0, ii = referredSegments.length; i < ii; i++) { - let referredSymbols = symbols[referredSegments[i]]; + const referredSymbols = symbols[referredSegments[i]]; // referredSymbols is undefined when we have a reference to a Tables // segment instead of a SymbolDictionary. if (referredSymbols) { @@ -1780,7 +1780,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { if (!patterns) { this.patterns = patterns = {}; } - let decodingContext = new DecodingContext(data, start, end); + const decodingContext = new DecodingContext(data, start, end); patterns[currentSegment] = decodePatternDictionary( dictionary.mmr, dictionary.patternWidth, @@ -1792,10 +1792,10 @@ var Jbig2Image = (function Jbig2ImageClosure() { }, onImmediateHalftoneRegion(region, referredSegments, data, start, end) { // HalftoneRegion refers to exactly one PatternDictionary. - let patterns = this.patterns[referredSegments[0]]; - let regionInfo = region.info; - let decodingContext = new DecodingContext(data, start, end); - let bitmap = decodeHalftoneRegion( + const patterns = this.patterns[referredSegments[0]]; + const regionInfo = region.info; + const decodingContext = new DecodingContext(data, start, end); + const bitmap = decodeHalftoneRegion( region.mmr, patterns, region.template, @@ -1864,7 +1864,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { HuffmanTreeNode.prototype = { buildTree(line, shift) { - let bit = (line.prefixCode >> shift) & 1; + const bit = (line.prefixCode >> shift) & 1; if (shift <= 0) { // Create a leaf node. this.children[bit] = new HuffmanTreeNode(line); @@ -1882,10 +1882,10 @@ var Jbig2Image = (function Jbig2ImageClosure() { if (this.isOOB) { return null; } - let htOffset = reader.readBits(this.rangeLength); + const htOffset = reader.readBits(this.rangeLength); return this.rangeLow + (this.isLowerRange ? -htOffset : htOffset); } - let node = this.children[reader.readBit()]; + const node = this.children[reader.readBit()]; if (!node) { throw new Jbig2Error("invalid Huffman data"); } @@ -1923,7 +1923,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { prefixLengthMax = Math.max(prefixLengthMax, lines[i].prefixLength); } - let histogram = new Uint32Array(prefixLengthMax + 1); + const histogram = new Uint32Array(prefixLengthMax + 1); for (i = 0; i < linesLength; i++) { histogram[lines[i].prefixLength]++; } @@ -1954,14 +1954,14 @@ var Jbig2Image = (function Jbig2ImageClosure() { function decodeTablesSegment(data, start, end) { // Decodes a Tables segment, i.e., a custom Huffman table. // Annex B.2 Code table structure. - let flags = data[start]; - let lowestValue = readUint32(data, start + 1) & 0xffffffff; - let highestValue = readUint32(data, start + 5) & 0xffffffff; - let reader = new Reader(data, start + 9, end); + const flags = data[start]; + const lowestValue = readUint32(data, start + 1) & 0xffffffff; + const highestValue = readUint32(data, start + 5) & 0xffffffff; + const reader = new Reader(data, start + 9, end); - let prefixSizeBits = ((flags >> 1) & 7) + 1; - let rangeSizeBits = ((flags >> 4) & 7) + 1; - let lines = []; + const prefixSizeBits = ((flags >> 1) & 7) + 1; + const rangeSizeBits = ((flags >> 4) & 7) + 1; + const lines = []; let prefixLength, rangeLength, currentRangeLow = lowestValue; @@ -1995,7 +1995,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { return new HuffmanTable(lines, false); } - let standardTablesCache = {}; + const standardTablesCache = {}; function getStandardTable(number) { // Annex B.5 Standard Huffman tables. @@ -2281,7 +2281,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { this.currentByte = this.data[this.position++]; this.shift = 7; } - let bit = (this.currentByte >> this.shift) & 1; + const bit = (this.currentByte >> this.shift) & 1; this.shift--; return bit; }, @@ -2344,7 +2344,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { codes.push(new HuffmanLine([i, codeLength, 0, 0])); } // Assign Huffman codes for RUNCODEs. - let runCodesTable = new HuffmanTable(codes, false); + const runCodesTable = new HuffmanTable(codes, false); // Read a Huffman code using the assignment above. // Interpret the RUNCODE codes and the additional bits (if any). @@ -2382,7 +2382,7 @@ var Jbig2Image = (function Jbig2ImageClosure() { } } reader.byteAlign(); - let symbolIDTable = new HuffmanTable(codes, false); + const symbolIDTable = new HuffmanTable(codes, false); // 7.4.3.1.6 Text region segment Huffman table selection @@ -2550,14 +2550,14 @@ var Jbig2Image = (function Jbig2ImageClosure() { function decodeMMRBitmap(input, width, height, endOfBlock) { // MMR is the same compression algorithm as the PDF filter // CCITTFaxDecode with /K -1. - let params = { + const params = { K: -1, Columns: width, Rows: height, BlackIs1: true, EndOfBlock: endOfBlock, }; - let decoder = new CCITTFaxDecoder(input, params); + const decoder = new CCITTFaxDecoder(input, params); let bitmap = [], x, y, diff --git a/src/core/jbig2_stream.js b/src/core/jbig2_stream.js index 7572c28d3..a53ced2ec 100644 --- a/src/core/jbig2_stream.js +++ b/src/core/jbig2_stream.js @@ -22,7 +22,7 @@ import { shadow } from "../shared/util.js"; * For JBIG2's we use a library to decode these images and * the stream behaves like all the other DecodeStreams. */ -let Jbig2Stream = (function Jbig2StreamClosure() { +const Jbig2Stream = (function Jbig2StreamClosure() { function Jbig2Stream(stream, maybeLength, dict, params) { this.stream = stream; this.maybeLength = maybeLength; @@ -51,19 +51,19 @@ let Jbig2Stream = (function Jbig2StreamClosure() { if (this.eof) { return; } - let jbig2Image = new Jbig2Image(); + const jbig2Image = new Jbig2Image(); - let chunks = []; + const chunks = []; if (isDict(this.params)) { - let globalsStream = this.params.get("JBIG2Globals"); + const globalsStream = this.params.get("JBIG2Globals"); if (isStream(globalsStream)) { - let globals = globalsStream.getBytes(); + const globals = globalsStream.getBytes(); chunks.push({ data: globals, start: 0, end: globals.length }); } } chunks.push({ data: this.bytes, start: 0, end: this.bytes.length }); - let data = jbig2Image.parseChunks(chunks); - let dataLength = data.length; + const data = jbig2Image.parseChunks(chunks); + const dataLength = data.length; // JBIG2 had black as 1 and white as 0, inverting the colors for (let i = 0; i < dataLength; i++) { diff --git a/src/core/jpeg_stream.js b/src/core/jpeg_stream.js index a7b10310c..582e194f3 100644 --- a/src/core/jpeg_stream.js +++ b/src/core/jpeg_stream.js @@ -25,7 +25,7 @@ import { JpegImage } from "./jpg.js"; * a library to decode these images and the stream behaves like all the other * DecodeStreams. */ -let JpegStream = (function JpegStreamClosure() { +const JpegStream = (function JpegStreamClosure() { function JpegStream(stream, maybeLength, dict, params) { // Some images may contain 'junk' before the SOI (start-of-image) marker. // Note: this seems to mainly affect inline images. @@ -64,19 +64,19 @@ let JpegStream = (function JpegStreamClosure() { if (this.eof) { return; } - let jpegOptions = { + const jpegOptions = { decodeTransform: undefined, colorTransform: undefined, }; // Checking if values need to be transformed before conversion. - let decodeArr = this.dict.getArray("Decode", "D"); + const decodeArr = this.dict.getArray("Decode", "D"); if (this.forceRGB && Array.isArray(decodeArr)) { - let bitsPerComponent = this.dict.get("BitsPerComponent") || 8; - let decodeArrLength = decodeArr.length; - let transform = new Int32Array(decodeArrLength); + const bitsPerComponent = this.dict.get("BitsPerComponent") || 8; + const decodeArrLength = decodeArr.length; + const transform = new Int32Array(decodeArrLength); let transformNeeded = false; - let maxValue = (1 << bitsPerComponent) - 1; + const maxValue = (1 << bitsPerComponent) - 1; for (let i = 0; i < decodeArrLength; i += 2) { transform[i] = ((decodeArr[i + 1] - decodeArr[i]) * 256) | 0; transform[i + 1] = (decodeArr[i] * maxValue) | 0; @@ -90,7 +90,7 @@ let JpegStream = (function JpegStreamClosure() { } // Fetching the 'ColorTransform' entry, if it exists. if (isDict(this.params)) { - let colorTransform = this.params.get("ColorTransform"); + const colorTransform = this.params.get("ColorTransform"); if (Number.isInteger(colorTransform)) { jpegOptions.colorTransform = colorTransform; } @@ -98,7 +98,7 @@ let JpegStream = (function JpegStreamClosure() { const jpegImage = new JpegImage(jpegOptions); jpegImage.parse(this.bytes); - let data = jpegImage.getData({ + const data = jpegImage.getData({ width: this.drawWidth, height: this.drawHeight, forceRGB: this.forceRGB, diff --git a/src/core/jpg.js b/src/core/jpg.js index 30f4c8d80..a17948e89 100644 --- a/src/core/jpg.js +++ b/src/core/jpg.js @@ -274,8 +274,8 @@ var JpegImage = (function JpegImageClosure() { var s; var rs; while (k <= e) { - let offsetZ = offset + dctZigZag[k]; - let sign = component.blockData[offsetZ] < 0 ? -1 : 1; + const offsetZ = offset + dctZigZag[k]; + const sign = component.blockData[offsetZ] < 0 ? -1 : 1; switch (successiveACState) { case 0: // initial state rs = decodeHuffman(component.huffmanTableAC); @@ -1035,7 +1035,7 @@ var JpegImage = (function JpegImageClosure() { offset -= 3; break; } - let nextFileMarker = findNextFileMarker(data, offset - 2); + const nextFileMarker = findNextFileMarker(data, offset - 2); if (nextFileMarker && nextFileMarker.invalid) { warn( "JpegImage.parse - unexpected data, current marker is: " + diff --git a/src/core/jpx.js b/src/core/jpx.js index 0d0eaed1c..513d36b31 100644 --- a/src/core/jpx.js +++ b/src/core/jpx.js @@ -1541,7 +1541,7 @@ var JpxImage = (function JpxImageClosure() { y0 = y0items[j] + offset; y1 = y1items[j]; y2 = y2items[j]; - let g = y0 - ((y2 + y1) >> 2); + const g = y0 - ((y2 + y1) >> 2); out[pos++] = (g + y2) >> shift; out[pos++] = g >> shift; diff --git a/src/core/jpx_stream.js b/src/core/jpx_stream.js index ee41c8d37..04ec03ac8 100644 --- a/src/core/jpx_stream.js +++ b/src/core/jpx_stream.js @@ -21,7 +21,7 @@ import { shadow } from "../shared/util.js"; * For JPEG 2000's we use a library to decode these images and * the stream behaves like all the other DecodeStreams. */ -let JpxStream = (function JpxStreamClosure() { +const JpxStream = (function JpxStreamClosure() { function JpxStream(stream, maybeLength, dict, params) { this.stream = stream; this.maybeLength = maybeLength; @@ -50,33 +50,33 @@ let JpxStream = (function JpxStreamClosure() { if (this.eof) { return; } - let jpxImage = new JpxImage(); + const jpxImage = new JpxImage(); jpxImage.parse(this.bytes); - let width = jpxImage.width; - let height = jpxImage.height; - let componentsCount = jpxImage.componentsCount; - let tileCount = jpxImage.tiles.length; + const width = jpxImage.width; + const height = jpxImage.height; + const componentsCount = jpxImage.componentsCount; + const tileCount = jpxImage.tiles.length; if (tileCount === 1) { this.buffer = jpxImage.tiles[0].items; } else { - let data = new Uint8ClampedArray(width * height * componentsCount); + const data = new Uint8ClampedArray(width * height * componentsCount); for (let k = 0; k < tileCount; k++) { - let tileComponents = jpxImage.tiles[k]; - let tileWidth = tileComponents.width; - let tileHeight = tileComponents.height; - let tileLeft = tileComponents.left; - let tileTop = tileComponents.top; + const tileComponents = jpxImage.tiles[k]; + const tileWidth = tileComponents.width; + const tileHeight = tileComponents.height; + const tileLeft = tileComponents.left; + const tileTop = tileComponents.top; - let src = tileComponents.items; + const src = tileComponents.items; let srcPosition = 0; let dataPosition = (width * tileTop + tileLeft) * componentsCount; - let imgRowSize = width * componentsCount; - let tileRowSize = tileWidth * componentsCount; + const imgRowSize = width * componentsCount; + const tileRowSize = tileWidth * componentsCount; for (let j = 0; j < tileHeight; j++) { - let rowBytes = src.subarray(srcPosition, srcPosition + tileRowSize); + const rowBytes = src.subarray(srcPosition, srcPosition + tileRowSize); data.set(rowBytes, dataPosition); srcPosition += tileRowSize; dataPosition += imgRowSize; diff --git a/src/core/obj.js b/src/core/obj.js index 51b957fd1..79bd820ce 100644 --- a/src/core/obj.js +++ b/src/core/obj.js @@ -267,7 +267,7 @@ class Catalog { dests = Object.create(null); if (obj instanceof NameTree) { const names = obj.getAll(); - for (let name in names) { + for (const name in names) { dests[name] = fetchDestination(names[name]); } } else if (obj instanceof Dict) { @@ -1527,11 +1527,11 @@ var XRef = (function XRefClosure() { // we won't skip over a new 'obj' operator in corrupt files where // 'endobj' operators are missing (fixes issue9105_reduced.pdf). while (startPos < buffer.length) { - let endPos = startPos + skipUntil(buffer, startPos, objBytes) + 4; + const endPos = startPos + skipUntil(buffer, startPos, objBytes) + 4; contentLength = endPos - position; - let checkPos = Math.max(endPos - CHECK_CONTENT_LENGTH, startPos); - let tokenStr = bytesToString(buffer.subarray(checkPos, endPos)); + const checkPos = Math.max(endPos - CHECK_CONTENT_LENGTH, startPos); + const tokenStr = bytesToString(buffer.subarray(checkPos, endPos)); // Check if the current object ends with an 'endobj' operator. if (endobjRegExp.test(tokenStr)) { @@ -1539,7 +1539,7 @@ var XRef = (function XRefClosure() { } else { // Check if an "obj" occurrence is actually a new object, // i.e. the current object is missing the 'endobj' operator. - let objToken = nestedObjRegExp.exec(tokenStr); + const objToken = nestedObjRegExp.exec(tokenStr); if (objToken && objToken[1]) { warn( @@ -1552,7 +1552,7 @@ var XRef = (function XRefClosure() { } startPos = endPos; } - let content = buffer.subarray(position, position + contentLength); + const content = buffer.subarray(position, position + contentLength); // checking XRef stream suspect // (it shall have '/XRef' and next char is not a letter) @@ -1597,7 +1597,7 @@ var XRef = (function XRefClosure() { continue; } // read the trailer dictionary - let dict = parser.getObj(); + const dict = parser.getObj(); if (!isDict(dict)) { continue; } @@ -1634,7 +1634,7 @@ var XRef = (function XRefClosure() { // Keep track of already parsed XRef tables, to prevent an infinite loop // when parsing corrupt PDF files where e.g. the /Prev entries create a // circular dependency between tables (fixes bug1393476.pdf). - let startXRefParsedCache = Object.create(null); + const startXRefParsedCache = Object.create(null); try { while (this.startXRefQueue.length) { @@ -2178,7 +2178,7 @@ var FileSpec = (function FileSpecClosure() { * that have references to the catalog or other pages since that will cause the * entire PDF document object graph to be traversed. */ -let ObjectLoader = (function() { +const ObjectLoader = (function() { function mayHaveChildren(value) { return ( value instanceof Ref || @@ -2190,17 +2190,17 @@ let ObjectLoader = (function() { function addChildren(node, nodesToVisit) { if (node instanceof Dict || isStream(node)) { - let dict = node instanceof Dict ? node : node.dict; - let dictKeys = dict.getKeys(); + const dict = node instanceof Dict ? node : node.dict; + const dictKeys = dict.getKeys(); for (let i = 0, ii = dictKeys.length; i < ii; i++) { - let rawValue = dict.getRaw(dictKeys[i]); + const rawValue = dict.getRaw(dictKeys[i]); if (mayHaveChildren(rawValue)) { nodesToVisit.push(rawValue); } } } else if (Array.isArray(node)) { for (let i = 0, ii = node.length; i < ii; i++) { - let value = node[i]; + const value = node[i]; if (mayHaveChildren(value)) { nodesToVisit.push(value); } @@ -2226,12 +2226,12 @@ let ObjectLoader = (function() { return undefined; } - let { keys, dict } = this; + const { keys, dict } = this; this.refSet = new RefSet(); // Setup the initial nodes to visit. - let nodesToVisit = []; + const nodesToVisit = []; for (let i = 0, ii = keys.length; i < ii; i++) { - let rawValue = dict.getRaw(keys[i]); + const rawValue = dict.getRaw(keys[i]); // Skip nodes that are guaranteed to be empty. if (rawValue !== undefined) { nodesToVisit.push(rawValue); @@ -2241,8 +2241,8 @@ let ObjectLoader = (function() { }, async _walk(nodesToVisit) { - let nodesToRevisit = []; - let pendingRequests = []; + const nodesToRevisit = []; + const pendingRequests = []; // DFS walk of the object graph. while (nodesToVisit.length) { let currentNode = nodesToVisit.pop(); @@ -2265,10 +2265,10 @@ let ObjectLoader = (function() { } } if (currentNode && currentNode.getBaseStreams) { - let baseStreams = currentNode.getBaseStreams(); + const baseStreams = currentNode.getBaseStreams(); let foundMissingData = false; for (let i = 0, ii = baseStreams.length; i < ii; i++) { - let stream = baseStreams[i]; + const stream = baseStreams[i]; if (stream.allChunksLoaded && !stream.allChunksLoaded()) { foundMissingData = true; pendingRequests.push({ begin: stream.start, end: stream.end }); @@ -2286,7 +2286,7 @@ let ObjectLoader = (function() { await this.xref.stream.manager.requestRanges(pendingRequests); for (let i = 0, ii = nodesToRevisit.length; i < ii; i++) { - let node = nodesToRevisit[i]; + const node = nodesToRevisit[i]; // Remove any reference nodes from the current `RefSet` so they // aren't skipped when we revist them. if (node instanceof Ref) { diff --git a/src/core/parser.js b/src/core/parser.js index 2d42802e9..b36f4f74b 100644 --- a/src/core/parser.js +++ b/src/core/parser.js @@ -693,8 +693,8 @@ class Parser { let maybeLength = length; if (Array.isArray(filter)) { - let filterArray = filter; - let paramsArray = params; + const filterArray = filter; + const paramsArray = params; for (let i = 0, ii = filterArray.length; i < ii; ++i) { filter = this.xref.fetchIfRef(filterArray[i]); if (!isName(filter)) { diff --git a/src/core/pattern.js b/src/core/pattern.js index cf5e5cdc1..4e0907837 100644 --- a/src/core/pattern.js +++ b/src/core/pattern.js @@ -937,12 +937,12 @@ Shadings.Dummy = (function DummyClosure() { })(); function getTilingPatternIR(operatorList, dict, args) { - let matrix = dict.getArray("Matrix"); - let bbox = Util.normalizeRect(dict.getArray("BBox")); - let xstep = dict.get("XStep"); - let ystep = dict.get("YStep"); - let paintType = dict.get("PaintType"); - let tilingType = dict.get("TilingType"); + const matrix = dict.getArray("Matrix"); + const bbox = Util.normalizeRect(dict.getArray("BBox")); + const xstep = dict.get("XStep"); + const ystep = dict.get("YStep"); + const paintType = dict.get("PaintType"); + const tilingType = dict.get("TilingType"); // Ensure that the pattern has a non-zero width and height, to prevent errors // in `pattern_helper.js` (fixes issue8330.pdf). diff --git a/src/core/primitives.js b/src/core/primitives.js index f1cdd7317..1501d199e 100644 --- a/src/core/primitives.js +++ b/src/core/primitives.js @@ -153,14 +153,14 @@ var Dict = (function DictClosure() { Dict.empty = new Dict(null); Dict.merge = function(xref, dictArray) { - let mergedDict = new Dict(xref); + const mergedDict = new Dict(xref); for (let i = 0, ii = dictArray.length; i < ii; i++) { - let dict = dictArray[i]; + const dict = dictArray[i]; if (!isDict(dict)) { continue; } - for (let keyName in dict._map) { + for (const keyName in dict._map) { if (mergedDict._map[keyName] !== undefined) { continue; } diff --git a/src/core/stream.js b/src/core/stream.js index 1d1839c01..253b261dd 100644 --- a/src/core/stream.js +++ b/src/core/stream.js @@ -76,7 +76,7 @@ var Stream = (function StreamClosure() { var strEnd = this.end; if (!length) { - let subarray = bytes.subarray(pos, strEnd); + const subarray = bytes.subarray(pos, strEnd); // `this.bytes` is always a `Uint8Array` here. return forceClamped ? new Uint8ClampedArray(subarray) : subarray; } @@ -85,7 +85,7 @@ var Stream = (function StreamClosure() { end = strEnd; } this.pos = end; - let subarray = bytes.subarray(pos, end); + const subarray = bytes.subarray(pos, end); // `this.bytes` is always a `Uint8Array` here. return forceClamped ? new Uint8ClampedArray(subarray) : subarray; }, @@ -134,7 +134,7 @@ var Stream = (function StreamClosure() { var StringStream = (function StringStreamClosure() { function StringStream(str) { - let bytes = stringToBytes(str); + const bytes = stringToBytes(str); Stream.call(this, bytes); } @@ -235,7 +235,7 @@ var DecodeStream = (function DecodeStreamClosure() { } this.pos = end; - let subarray = this.buffer.subarray(pos, end); + const subarray = this.buffer.subarray(pos, end); // `this.buffer` is either a `Uint8Array` or `Uint8ClampedArray` here. return forceClamped && !(subarray instanceof Uint8ClampedArray) ? new Uint8ClampedArray(subarray) diff --git a/src/display/api.js b/src/display/api.js index 44cb2d871..f0d777326 100644 --- a/src/display/api.js +++ b/src/display/api.js @@ -1828,7 +1828,9 @@ const PDFWorker = (function PDFWorkerClosure() { }); const sendTest = () => { - let testObj = new Uint8Array([this.postMessageTransfers ? 255 : 0]); + const testObj = new Uint8Array([ + this.postMessageTransfers ? 255 : 0, + ]); // Some versions of Opera throw a DATA_CLONE_ERR on serializing the // typed array. Also, checking if we can use transfers. try { diff --git a/src/display/api_compatibility.js b/src/display/api_compatibility.js index 293fc0fbc..810cf90b6 100644 --- a/src/display/api_compatibility.js +++ b/src/display/api_compatibility.js @@ -13,7 +13,7 @@ * limitations under the License. */ -let compatibilityParams = Object.create(null); +const compatibilityParams = Object.create(null); if (typeof PDFJSDev === "undefined" || PDFJSDev.test("GENERIC")) { const { isNodeJS } = require("../shared/is_node.js"); diff --git a/src/display/canvas.js b/src/display/canvas.js index 3b981326e..eaec1a1d5 100644 --- a/src/display/canvas.js +++ b/src/display/canvas.js @@ -769,7 +769,7 @@ var CanvasGraphics = (function CanvasGraphicsClosure() { var backdrop = smask.backdrop || null; if (!smask.transferMap && webGLContext.isEnabled) { - let composed = webGLContext.composeSMask({ + const composed = webGLContext.composeSMask({ layer: layerCtx.canvas, mask, properties: { @@ -1274,7 +1274,7 @@ var CanvasGraphics = (function CanvasGraphicsClosure() { // but the line width needs to be adjusted by the current transform, so // we must scale it. To properly fix this we should be using a pattern // transform instead (see #10955). - let transform = ctx.mozCurrentTransform; + const transform = ctx.mozCurrentTransform; const scale = Util.singularValueDecompose2dScale(transform)[0]; ctx.strokeStyle = strokeColor.getPattern(ctx, this); ctx.lineWidth = Math.max( @@ -1501,7 +1501,7 @@ var CanvasGraphics = (function CanvasGraphicsClosure() { var isAddToPathSet = !!( textRenderingMode & TextRenderingMode.ADD_TO_PATH_FLAG ); - let patternFill = current.patternFill && font.data; + const patternFill = current.patternFill && font.data; var addToPath; if (font.disableFontFace || isAddToPathSet || patternFill) { @@ -1612,7 +1612,7 @@ var CanvasGraphics = (function CanvasGraphicsClosure() { // TODO: Patterns are not applied correctly to text if a non-embedded // font is used. E.g. issue 8111 and ShowText-ShadingPattern.pdf. ctx.save(); - let pattern = current.fillColor.getPattern(ctx, this); + const pattern = current.fillColor.getPattern(ctx, this); patternTransform = ctx.mozCurrentTransform; ctx.restore(); ctx.fillStyle = pattern; diff --git a/src/display/content_disposition.js b/src/display/content_disposition.js index 19c66c670..db920ac44 100644 --- a/src/display/content_disposition.js +++ b/src/display/content_disposition.js @@ -47,7 +47,7 @@ function getFilenameFromContentDispositionHeader(contentDisposition) { tmp = rfc2231getparam(contentDisposition); if (tmp) { // RFC 2047, section - let filename = rfc2047decode(tmp); + const filename = rfc2047decode(tmp); return fixupEncoding(filename); } @@ -84,8 +84,8 @@ function getFilenameFromContentDispositionHeader(contentDisposition) { return value; } try { - let decoder = new TextDecoder(encoding, { fatal: true }); - let bytes = Array.from(value, function(ch) { + const decoder = new TextDecoder(encoding, { fatal: true }); + const bytes = Array.from(value, function(ch) { return ch.charCodeAt(0) & 0xff; }); value = decoder.decode(new Uint8Array(bytes)); @@ -120,7 +120,7 @@ function getFilenameFromContentDispositionHeader(contentDisposition) { match; // Iterate over all filename*n= and filename*n*= with n being an integer // of at least zero. Any non-zero number must not start with '0'. - let iter = toParamRegExp("filename\\*((?!0\\d)\\d+)(\\*?)", "ig"); + const iter = toParamRegExp("filename\\*((?!0\\d)\\d+)(\\*?)", "ig"); while ((match = iter.exec(contentDisposition)) !== null) { let [, n, quot, part] = match; n = parseInt(n, 10); @@ -133,7 +133,7 @@ function getFilenameFromContentDispositionHeader(contentDisposition) { } matches[n] = [quot, part]; } - let parts = []; + const parts = []; for (let n = 0; n < matches.length; ++n) { if (!(n in matches)) { // Numbers must be consecutive. Truncate when there is a hole. @@ -153,10 +153,10 @@ function getFilenameFromContentDispositionHeader(contentDisposition) { } function rfc2616unquote(value) { if (value.startsWith('"')) { - let parts = value.slice(1).split('\\"'); + const parts = value.slice(1).split('\\"'); // Find the first unescaped " and terminate there. for (let i = 0; i < parts.length; ++i) { - let quotindex = parts[i].indexOf('"'); + const quotindex = parts[i].indexOf('"'); if (quotindex !== -1) { parts[i] = parts[i].slice(0, quotindex); parts.length = i + 1; // Truncates and stop the iteration. @@ -169,17 +169,17 @@ function getFilenameFromContentDispositionHeader(contentDisposition) { } function rfc5987decode(extvalue) { // Decodes "ext-value" from RFC 5987. - let encodingend = extvalue.indexOf("'"); + const encodingend = extvalue.indexOf("'"); if (encodingend === -1) { // Some servers send "filename*=" without encoding 'language' prefix, // e.g. in https://github.com/Rob--W/open-in-browser/issues/26 // Let's accept the value like Firefox (57) (Chrome 62 rejects it). return extvalue; } - let encoding = extvalue.slice(0, encodingend); - let langvalue = extvalue.slice(encodingend + 1); + const encoding = extvalue.slice(0, encodingend); + const langvalue = extvalue.slice(encodingend + 1); // Ignore language (RFC 5987 section 3.2.1, and RFC 6266 section 4.1 ). - let value = langvalue.replace(/^[^']*'/, ""); + const value = langvalue.replace(/^[^']*'/, ""); return textdecode(encoding, value); } function rfc2047decode(value) { diff --git a/src/display/font_loader.js b/src/display/font_loader.js index c10a0ba5a..ba9421aa8 100644 --- a/src/display/font_loader.js +++ b/src/display/font_loader.js @@ -271,17 +271,17 @@ if (typeof PDFJSDev !== "undefined" && PDFJSDev.test("MOZCENTRAL")) { ); } function spliceString(s, offset, remove, insert) { - let chunk1 = s.substring(0, offset); - let chunk2 = s.substring(offset + remove); + const chunk1 = s.substring(0, offset); + const chunk2 = s.substring(offset + remove); return chunk1 + insert + chunk2; } let i, ii; // The temporary canvas is used to determine if fonts are loaded. - let canvas = document.createElement("canvas"); + const canvas = document.createElement("canvas"); canvas.width = 1; canvas.height = 1; - let ctx = canvas.getContext("2d"); + const ctx = canvas.getContext("2d"); let called = 0; function isFontReady(name, callback) { @@ -294,7 +294,7 @@ if (typeof PDFJSDev !== "undefined" && PDFJSDev.test("MOZCENTRAL")) { } ctx.font = "30px " + name; ctx.fillText(".", 0, 20); - let imageData = ctx.getImageData(0, 0, 1, 1); + const imageData = ctx.getImageData(0, 0, 1, 1); if (imageData.data[3] > 0) { callback(); return; @@ -309,7 +309,7 @@ if (typeof PDFJSDev !== "undefined" && PDFJSDev.test("MOZCENTRAL")) { // TODO: This could maybe be made faster by avoiding the btoa of the full // font by splitting it in chunks before hand and padding the font id. let data = this._loadTestFont; - let COMMENT_OFFSET = 976; // has to be on 4 byte boundary (for checksum) + const COMMENT_OFFSET = 976; // has to be on 4 byte boundary (for checksum) data = spliceString( data, COMMENT_OFFSET, @@ -317,8 +317,8 @@ if (typeof PDFJSDev !== "undefined" && PDFJSDev.test("MOZCENTRAL")) { loadTestFontId ); // CFF checksum is important for IE, adjusting it - let CFF_CHECKSUM_OFFSET = 16; - let XXXX_VALUE = 0x58585858; // the "comment" filled with 'X' + const CFF_CHECKSUM_OFFSET = 16; + const XXXX_VALUE = 0x58585858; // the "comment" filled with 'X' let checksum = int32(data, CFF_CHECKSUM_OFFSET); for (i = 0, ii = loadTestFontId.length - 3; i < ii; i += 4) { checksum = (checksum - XXXX_VALUE + int32(loadTestFontId, i)) | 0; @@ -334,13 +334,13 @@ if (typeof PDFJSDev !== "undefined" && PDFJSDev.test("MOZCENTRAL")) { const rule = `@font-face {font-family:"${loadTestFontId}";src:${url}}`; this.insertRule(rule); - let names = []; + const names = []; for (i = 0, ii = fonts.length; i < ii; i++) { names.push(fonts[i].loadedName); } names.push(loadTestFontId); - let div = document.createElement("div"); + const div = document.createElement("div"); div.setAttribute( "style", "visibility: hidden;" + @@ -348,7 +348,7 @@ if (typeof PDFJSDev !== "undefined" && PDFJSDev.test("MOZCENTRAL")) { "position: absolute; top: 0px; left: 0px;" ); for (i = 0, ii = names.length; i < ii; ++i) { - let span = document.createElement("span"); + const span = document.createElement("span"); span.textContent = "Hi"; span.style.fontFamily = names[i]; div.appendChild(span); @@ -383,7 +383,7 @@ class FontFaceObject { ) { this.compiledGlyphs = Object.create(null); // importing translated data - for (let i in translatedData) { + for (const i in translatedData) { this[i] = translatedData[i]; } this.isEvalSupported = isEvalSupported !== false; diff --git a/src/display/metadata.js b/src/display/metadata.js index 36745c4b6..fc09b13e6 100644 --- a/src/display/metadata.js +++ b/src/display/metadata.js @@ -24,7 +24,7 @@ class Metadata { data = this._repair(data); // Convert the string to an XML document. - let parser = new SimpleXMLParser(); + const parser = new SimpleXMLParser(); const xmlDocument = parser.parseFromString(data); this._metadata = Object.create(null); @@ -39,7 +39,7 @@ class Metadata { return data .replace(/^([^<]+)/, "") .replace(/>\\376\\377([^<]+)/g, function(all, codes) { - let bytes = codes + const bytes = codes .replace(/\\([0-3])([0-7])([0-7])/g, function(code, d1, d2, d3) { return String.fromCharCode(d1 * 64 + d2 * 8 + d3 * 1); }) @@ -61,7 +61,7 @@ class Metadata { let chars = ""; for (let i = 0, ii = bytes.length; i < ii; i += 2) { - let code = bytes.charCodeAt(i) * 256 + bytes.charCodeAt(i + 1); + const code = bytes.charCodeAt(i) * 256 + bytes.charCodeAt(i + 1); if ( code >= 32 && code < 127 && @@ -90,22 +90,22 @@ class Metadata { } } - let nodeName = rdf ? rdf.nodeName.toLowerCase() : null; + const nodeName = rdf ? rdf.nodeName.toLowerCase() : null; if (!rdf || nodeName !== "rdf:rdf" || !rdf.hasChildNodes()) { return; } - let children = rdf.childNodes; + const children = rdf.childNodes; for (let i = 0, ii = children.length; i < ii; i++) { - let desc = children[i]; + const desc = children[i]; if (desc.nodeName.toLowerCase() !== "rdf:description") { continue; } for (let j = 0, jj = desc.childNodes.length; j < jj; j++) { if (desc.childNodes[j].nodeName.toLowerCase() !== "#text") { - let entry = desc.childNodes[j]; - let name = entry.nodeName.toLowerCase(); + const entry = desc.childNodes[j]; + const name = entry.nodeName.toLowerCase(); this._metadata[name] = entry.textContent.trim(); } diff --git a/src/display/network_utils.js b/src/display/network_utils.js index 9086580b6..762a9f0c4 100644 --- a/src/display/network_utils.js +++ b/src/display/network_utils.js @@ -27,12 +27,12 @@ function validateRangeRequestCapabilities({ disableRange, }) { assert(rangeChunkSize > 0, "Range chunk size must be larger than zero"); - let returnValues = { + const returnValues = { allowRangeRequests: false, suggestedLength: undefined, }; - let length = parseInt(getResponseHeader("Content-Length"), 10); + const length = parseInt(getResponseHeader("Content-Length"), 10); if (!Number.isInteger(length)) { return returnValues; } @@ -52,7 +52,7 @@ function validateRangeRequestCapabilities({ return returnValues; } - let contentEncoding = getResponseHeader("Content-Encoding") || "identity"; + const contentEncoding = getResponseHeader("Content-Encoding") || "identity"; if (contentEncoding !== "identity") { return returnValues; } diff --git a/src/display/node_stream.js b/src/display/node_stream.js index 79e7a5067..67cff4f8a 100644 --- a/src/display/node_stream.js +++ b/src/display/node_stream.js @@ -14,10 +14,10 @@ */ /* globals __non_webpack_require__ */ -let fs = __non_webpack_require__("fs"); -let http = __non_webpack_require__("http"); -let https = __non_webpack_require__("https"); -let url = __non_webpack_require__("url"); +const fs = __non_webpack_require__("fs"); +const http = __non_webpack_require__("http"); +const https = __non_webpack_require__("https"); +const url = __non_webpack_require__("url"); import { AbortException, @@ -33,7 +33,7 @@ import { const fileUriRegex = /^file:\/\/\/[a-zA-Z]:\//; function parseUrl(sourceUrl) { - let parsedUrl = url.parse(sourceUrl); + const parsedUrl = url.parse(sourceUrl); if (parsedUrl.protocol === "file:" || parsedUrl.host) { return parsedUrl; } @@ -78,7 +78,7 @@ class PDFNodeStream { if (end <= this._progressiveDataLength) { return null; } - let rangeReader = this.isFsUrl + const rangeReader = this.isFsUrl ? new PDFNodeStreamFsRangeReader(this, start, end) : new PDFNodeStreamRangeReader(this, start, end); this._rangeRequestReaders.push(rangeReader); @@ -90,7 +90,7 @@ class PDFNodeStream { this._fullRequestReader.cancel(reason); } - let readers = this._rangeRequestReaders.slice(0); + const readers = this._rangeRequestReaders.slice(0); readers.forEach(function(reader) { reader.cancel(reason); }); @@ -103,7 +103,7 @@ class BaseFullReader { this._done = false; this._storedError = null; this.onProgress = null; - let source = stream.source; + const source = stream.source; this._contentLength = source.length; // optional this._loaded = 0; this._filename = null; @@ -151,7 +151,7 @@ class BaseFullReader { throw this._storedError; } - let chunk = this._readableStream.read(); + const chunk = this._readableStream.read(); if (chunk === null) { this._readCapability = createPromiseCapability(); return this.read(); @@ -164,7 +164,7 @@ class BaseFullReader { }); } // Ensure that `read()` method returns ArrayBuffer. - let buffer = new Uint8Array(chunk).buffer; + const buffer = new Uint8Array(chunk).buffer; return { value: buffer, done: false }; } @@ -222,7 +222,7 @@ class BaseRangeReader { this._loaded = 0; this._readableStream = null; this._readCapability = createPromiseCapability(); - let source = stream.source; + const source = stream.source; this._isStreamingSupported = !source.disableStream; } @@ -239,7 +239,7 @@ class BaseRangeReader { throw this._storedError; } - let chunk = this._readableStream.read(); + const chunk = this._readableStream.read(); if (chunk === null) { this._readCapability = createPromiseCapability(); return this.read(); @@ -249,7 +249,7 @@ class BaseRangeReader { this.onProgress({ loaded: this._loaded }); } // Ensure that `read()` method returns ArrayBuffer. - let buffer = new Uint8Array(chunk).buffer; + const buffer = new Uint8Array(chunk).buffer; return { value: buffer, done: false }; } @@ -308,7 +308,7 @@ class PDFNodeStreamFullReader extends BaseFullReader { constructor(stream) { super(stream); - let handleResponse = response => { + const handleResponse = response => { if (response.statusCode === 404) { const error = new MissingPDFException(`Missing PDF "${this._url}".`); this._storedError = error; @@ -323,7 +323,7 @@ class PDFNodeStreamFullReader extends BaseFullReader { // here: https://nodejs.org/api/http.html#http_message_headers. return this._readableStream.headers[name.toLowerCase()]; }; - let { + const { allowRangeRequests, suggestedLength, } = validateRangeRequestCapabilities({ @@ -369,8 +369,8 @@ class PDFNodeStreamRangeReader extends BaseRangeReader { super(stream); this._httpHeaders = {}; - for (let property in stream.httpHeaders) { - let value = stream.httpHeaders[property]; + for (const property in stream.httpHeaders) { + const value = stream.httpHeaders[property]; if (typeof value === "undefined") { continue; } @@ -378,7 +378,7 @@ class PDFNodeStreamRangeReader extends BaseRangeReader { } this._httpHeaders["Range"] = `bytes=${start}-${end - 1}`; - let handleResponse = response => { + const handleResponse = response => { if (response.statusCode === 404) { const error = new MissingPDFException(`Missing PDF "${this._url}".`); this._storedError = error; diff --git a/src/display/pattern_helper.js b/src/display/pattern_helper.js index b85c81e3d..5fb7d4eb0 100644 --- a/src/display/pattern_helper.js +++ b/src/display/pattern_helper.js @@ -555,7 +555,7 @@ var TilingPattern = (function TilingPatternClosure() { paintType, color ) { - let context = graphics.ctx, + const context = graphics.ctx, current = graphics.current; switch (paintType) { case PaintType.COLORED: diff --git a/src/display/text_layer.js b/src/display/text_layer.js index 5861110f8..4521cd11a 100644 --- a/src/display/text_layer.js +++ b/src/display/text_layer.js @@ -594,11 +594,11 @@ var renderTextLayer = (function renderTextLayerClosure() { }, _render: function TextLayer_render(timeout) { - let capability = createPromiseCapability(); + const capability = createPromiseCapability(); let styleCache = Object.create(null); // The temporary canvas is used to measure text length in the DOM. - let canvas = document.createElement("canvas"); + const canvas = document.createElement("canvas"); if ( typeof PDFJSDev === "undefined" || PDFJSDev.test("MOZCENTRAL || GENERIC") @@ -608,12 +608,12 @@ var renderTextLayer = (function renderTextLayerClosure() { this._layoutTextCtx = canvas.getContext("2d", { alpha: false }); if (this._textContent) { - let textItems = this._textContent.items; - let textStyles = this._textContent.styles; + const textItems = this._textContent.items; + const textStyles = this._textContent.styles; this._processItems(textItems, textStyles); capability.resolve(); } else if (this._textContentStream) { - let pump = () => { + const pump = () => { this._reader.read().then(({ value, done }) => { if (done) { capability.resolve(); diff --git a/src/pdf.js b/src/pdf.js index de9415a1b..1d1252171 100644 --- a/src/pdf.js +++ b/src/pdf.js @@ -27,18 +27,18 @@ var pdfjsDisplayTextLayer = require("./display/text_layer.js"); var pdfjsDisplayAnnotationLayer = require("./display/annotation_layer.js"); var pdfjsDisplayDisplayUtils = require("./display/display_utils.js"); var pdfjsDisplaySVG = require("./display/svg.js"); -let pdfjsDisplayWorkerOptions = require("./display/worker_options.js"); -let pdfjsDisplayAPICompatibility = require("./display/api_compatibility.js"); +const pdfjsDisplayWorkerOptions = require("./display/worker_options.js"); +const pdfjsDisplayAPICompatibility = require("./display/api_compatibility.js"); if (typeof PDFJSDev === "undefined" || PDFJSDev.test("GENERIC")) { const { isNodeJS } = require("./shared/is_node.js"); if (isNodeJS) { - let PDFNodeStream = require("./display/node_stream.js").PDFNodeStream; + const PDFNodeStream = require("./display/node_stream.js").PDFNodeStream; pdfjsDisplayAPI.setPDFNetworkStreamFactory(params => { return new PDFNodeStream(params); }); } else { - let PDFNetworkStream = require("./display/network.js").PDFNetworkStream; + const PDFNetworkStream = require("./display/network.js").PDFNetworkStream; let PDFFetchStream; if (pdfjsDisplayDisplayUtils.isFetchSupported()) { PDFFetchStream = require("./display/fetch_stream.js").PDFFetchStream; @@ -54,9 +54,9 @@ if (typeof PDFJSDev === "undefined" || PDFJSDev.test("GENERIC")) { }); } } else if (PDFJSDev.test("CHROME")) { - let PDFNetworkStream = require("./display/network.js").PDFNetworkStream; + const PDFNetworkStream = require("./display/network.js").PDFNetworkStream; let PDFFetchStream; - let isChromeWithFetchCredentials = function() { + const isChromeWithFetchCredentials = function() { // fetch does not include credentials until Chrome 61.0.3138.0 and later. // https://chromium.googlesource.com/chromium/src/+/2e231cf052ca5e68e22baf0008ac9e5e29121707 try { diff --git a/src/shared/compatibility.js b/src/shared/compatibility.js index fa7d6ad17..e4634b3f3 100644 --- a/src/shared/compatibility.js +++ b/src/shared/compatibility.js @@ -93,12 +93,12 @@ if ( const OriginalDOMTokenListRemove = DOMTokenList.prototype.remove; DOMTokenList.prototype.add = function(...tokens) { - for (let token of tokens) { + for (const token of tokens) { OriginalDOMTokenListAdd.call(this, token); } }; DOMTokenList.prototype.remove = function(...tokens) { - for (let token of tokens) { + for (const token of tokens) { OriginalDOMTokenListRemove.call(this, token); } }; @@ -117,7 +117,8 @@ if ( } DOMTokenList.prototype.toggle = function(token) { - let force = arguments.length > 1 ? !!arguments[1] : !this.contains(token); + const force = + arguments.length > 1 ? !!arguments[1] : !this.contains(token); return this[force ? "add" : "remove"](token), force; }; })(); diff --git a/src/shared/util.js b/src/shared/util.js index 0777550bf..e32617c0b 100644 --- a/src/shared/util.js +++ b/src/shared/util.js @@ -804,7 +804,7 @@ function utf8StringToString(str) { } function isEmptyObj(obj) { - for (let key in obj) { + for (const key in obj) { return false; } return true; diff --git a/test/add_test.js b/test/add_test.js index f3c8bc5da..067b6170f 100644 --- a/test/add_test.js +++ b/test/add_test.js @@ -14,7 +14,7 @@ if (process.argv.length < 3) { process.exit(1); } -let file = process.argv[2]; +const file = process.argv[2]; if (!file.startsWith(pdfFolder)) { throw new Error(`PDF file must be in '${pdfFolder}' directory.`); } @@ -46,7 +46,7 @@ calculateMD5(file, (err, md5) => { throw new Error(err); } let contents = fs.readFileSync(gitIgnore, "utf8").split("\n"); - let randomLine = getRandomArbitrary(10, contents.length - 2); + const randomLine = getRandomArbitrary(10, contents.length - 2); contents.splice( randomLine, 0, @@ -55,10 +55,10 @@ calculateMD5(file, (err, md5) => { fs.writeFileSync("test/pdfs/.gitignore", contents.join("\n")); contents = fs.readFileSync(testManifest, "utf8"); - let pdf = file.substring(file.lastIndexOf("/") + 1, file.length - 4); - let randomPoint = getRandomArbitrary(100, contents.length - 20); - let bracket = contents.indexOf("},\n", randomPoint); - let out = + const pdf = file.substring(file.lastIndexOf("/") + 1, file.length - 4); + const randomPoint = getRandomArbitrary(100, contents.length - 20); + const bracket = contents.indexOf("},\n", randomPoint); + const out = contents.substring(0, bracket) + "},\n" + ` { "id": "${pdf}",\n` + diff --git a/test/driver.js b/test/driver.js index 63ad1fd1a..b8dff5ece 100644 --- a/test/driver.js +++ b/test/driver.js @@ -119,7 +119,7 @@ var rasterizeAnnotationLayer = (function rasterizeAnnotationLayerClosure() { * the overrides file because the browser does not resolve that when the * styles are inserted via XHR. Therefore, we load and combine them here. */ - let styles = { + const styles = { common: { file: "../web/annotation_layer_builder.css", promise: null, @@ -137,9 +137,9 @@ var rasterizeAnnotationLayer = (function rasterizeAnnotationLayerClosure() { } // Load the style files and cache the results. - for (let key in styles) { + for (const key in styles) { styles[key].promise = new Promise(function(resolve, reject) { - let xhr = new XMLHttpRequest(); + const xhr = new XMLHttpRequest(); xhr.open("GET", styles[key].file); xhr.onload = function() { resolve(xhr.responseText); @@ -379,14 +379,14 @@ var Driver = (function DriverClosure() { this._done(); return; } - let task = this.manifest[this.currentTask]; + const task = this.manifest[this.currentTask]; task.round = 0; task.pageNum = task.firstPage || 1; task.stats = { times: [] }; this._log('Loading file "' + task.file + '"\n'); - let absoluteUrl = new URL(task.file, window.location).href; + const absoluteUrl = new URL(task.file, window.location).href; try { const loadingTask = pdfjsLib.getDocument({ url: absoluteUrl, @@ -419,18 +419,18 @@ var Driver = (function DriverClosure() { _cleanup() { // Clear out all the stylesheets since a new one is created for each font. while (document.styleSheets.length > 0) { - let styleSheet = document.styleSheets[0]; + const styleSheet = document.styleSheets[0]; while (styleSheet.cssRules.length > 0) { styleSheet.deleteRule(0); } styleSheet.ownerNode.remove(); } - let body = document.body; + const body = document.body; while (body.lastChild !== this.end) { body.removeChild(body.lastChild); } - let destroyedPromises = []; + const destroyedPromises = []; // Wipe out the link to the pdfdoc so it can be GC'ed. for (let i = 0; i < this.manifest.length; i++) { if (this.manifest[i].pdfDoc) { diff --git a/test/unit/annotation_spec.js b/test/unit/annotation_spec.js index 59dba54ea..48b6f48af 100644 --- a/test/unit/annotation_spec.js +++ b/test/unit/annotation_spec.js @@ -1174,20 +1174,22 @@ describe("annotation", function() { "should correctly parse a Dest, which violates the specification " + "by containing a dictionary", function(done) { - let destDict = new Dict(); + const destDict = new Dict(); destDict.set("Type", Name.get("Action")); destDict.set("S", Name.get("GoTo")); destDict.set("D", "page.157"); - let annotationDict = new Dict(); + const annotationDict = new Dict(); annotationDict.set("Type", Name.get("Annot")); annotationDict.set("Subtype", Name.get("Link")); // The /Dest must be a Name or an Array, refer to ISO 32000-1:2008 // section 12.3.3, but there are PDF files where it's a dictionary. annotationDict.set("Dest", destDict); - let annotationRef = Ref.get(798, 0); - let xref = new XRefMock([{ ref: annotationRef, data: annotationDict }]); + const annotationRef = Ref.get(798, 0); + const xref = new XRefMock([ + { ref: annotationRef, data: annotationDict }, + ]); AnnotationFactory.create( xref, @@ -1479,7 +1481,7 @@ describe("annotation", function() { }); it("should only accept comb fields when the flags are valid", function(done) { - let invalidFieldFlags = [ + const invalidFieldFlags = [ AnnotationFieldFlag.MULTILINE, AnnotationFieldFlag.PASSWORD, AnnotationFieldFlag.FILESELECT, diff --git a/test/unit/api_spec.js b/test/unit/api_spec.js index 42ffa771d..408d15270 100644 --- a/test/unit/api_spec.js +++ b/test/unit/api_spec.js @@ -50,9 +50,9 @@ import { isNodeJS } from "../../src/shared/is_node.js"; import { Metadata } from "../../src/display/metadata.js"; describe("api", function() { - let basicApiFileName = "basicapi.pdf"; - let basicApiFileLength = 105779; // bytes - let basicApiGetDocumentParams = buildGetDocumentParams(basicApiFileName); + const basicApiFileName = "basicapi.pdf"; + const basicApiFileLength = 105779; // bytes + const basicApiGetDocumentParams = buildGetDocumentParams(basicApiFileName); let CanvasFactory; @@ -102,7 +102,7 @@ describe("api", function() { }); it("creates pdf doc from URL and aborts before worker initialized", function(done) { var loadingTask = getDocument(basicApiGetDocumentParams); - let destroyed = loadingTask.destroy(); + const destroyed = loadingTask.destroy(); loadingTask.promise .then(function(reason) { @@ -473,7 +473,7 @@ describe("api", function() { pending("Worker is not supported in Node.js."); } - let workerSrc = PDFWorker.getWorkerSrc(); + const workerSrc = PDFWorker.getWorkerSrc(); expect(typeof workerSrc).toEqual("string"); expect(workerSrc).toEqual(GlobalWorkerOptions.workerSrc); }); @@ -1339,8 +1339,8 @@ describe("api", function() { it('gets viewport respecting "dontFlip" argument', function() { const scale = 1, rotation = 0; - let viewport = page.getViewport({ scale, rotation }); - let dontFlipViewport = page.getViewport({ + const viewport = page.getViewport({ scale, rotation }); + const dontFlipViewport = page.getViewport({ scale, rotation, dontFlip: true, @@ -1511,15 +1511,15 @@ describe("api", function() { }); it("gets operatorList with JPEG image (issue 4888)", function(done) { - let loadingTask = getDocument(buildGetDocumentParams("cmykjpeg.pdf")); + const loadingTask = getDocument(buildGetDocumentParams("cmykjpeg.pdf")); loadingTask.promise .then(pdfDoc => { pdfDoc.getPage(1).then(pdfPage => { pdfPage.getOperatorList().then(opList => { - let imgIndex = opList.fnArray.indexOf(OPS.paintImageXObject); - let imgArgs = opList.argsArray[imgIndex]; - let { data } = pdfPage.objs.get(imgArgs[0]); + const imgIndex = opList.fnArray.indexOf(OPS.paintImageXObject); + const imgArgs = opList.argsArray[imgIndex]; + const { data } = pdfPage.objs.get(imgArgs[0]); expect(data instanceof Uint8ClampedArray).toEqual(true); expect(data.length).toEqual(90000); @@ -1607,7 +1607,7 @@ describe("api", function() { }, done.fail); }); it("gets page stats after parsing page, with `pdfBug` set", function(done) { - let loadingTask = getDocument( + const loadingTask = getDocument( buildGetDocumentParams(basicApiFileName, { pdfBug: true }) ); @@ -1623,7 +1623,7 @@ describe("api", function() { expect(stats instanceof StatTimer).toEqual(true); expect(stats.times.length).toEqual(1); - let [statEntry] = stats.times; + const [statEntry] = stats.times; expect(statEntry.name).toEqual("Page Request"); expect(statEntry.end - statEntry.start).toBeGreaterThanOrEqual(0); @@ -1631,7 +1631,7 @@ describe("api", function() { }, done.fail); }); it("gets page stats after rendering page, with `pdfBug` set", function(done) { - let loadingTask = getDocument( + const loadingTask = getDocument( buildGetDocumentParams(basicApiFileName, { pdfBug: true }) ); let canvasAndCtx; @@ -1639,13 +1639,13 @@ describe("api", function() { loadingTask.promise .then(pdfDoc => { return pdfDoc.getPage(1).then(pdfPage => { - let viewport = pdfPage.getViewport({ scale: 1 }); + const viewport = pdfPage.getViewport({ scale: 1 }); canvasAndCtx = CanvasFactory.create( viewport.width, viewport.height ); - let renderTask = pdfPage.render({ + const renderTask = pdfPage.render({ canvasContext: canvasAndCtx.context, canvasFactory: CanvasFactory, viewport, @@ -1659,7 +1659,7 @@ describe("api", function() { expect(stats instanceof StatTimer).toEqual(true); expect(stats.times.length).toEqual(3); - let [statEntryOne, statEntryTwo, statEntryThree] = stats.times; + const [statEntryOne, statEntryTwo, statEntryThree] = stats.times; expect(statEntryOne.name).toEqual("Page Request"); expect(statEntryOne.end - statEntryOne.start).toBeGreaterThanOrEqual( 0 @@ -1700,10 +1700,13 @@ describe("api", function() { }); it("re-render page, using the same canvas, after cancelling rendering", function(done) { - let viewport = page.getViewport({ scale: 1 }); - let canvasAndCtx = CanvasFactory.create(viewport.width, viewport.height); + const viewport = page.getViewport({ scale: 1 }); + const canvasAndCtx = CanvasFactory.create( + viewport.width, + viewport.height + ); - let renderTask = page.render({ + const renderTask = page.render({ canvasContext: canvasAndCtx.context, canvasFactory: CanvasFactory, viewport, @@ -1720,7 +1723,7 @@ describe("api", function() { } ) .then(() => { - let reRenderTask = page.render({ + const reRenderTask = page.render({ canvasContext: canvasAndCtx.context, canvasFactory: CanvasFactory, viewport, diff --git a/test/unit/cmap_spec.js b/test/unit/cmap_spec.js index b18cd73b9..193a8dcc1 100644 --- a/test/unit/cmap_spec.js +++ b/test/unit/cmap_spec.js @@ -345,7 +345,7 @@ describe("cmap", function() { }); } - let cmapPromise = CMapFactory.create({ + const cmapPromise = CMapFactory.create({ encoding: Name.get("Adobe-Japan1-1"), fetchBuiltInCMap: tmpFetchBuiltInCMap, useCMap: null, @@ -356,7 +356,7 @@ describe("cmap", function() { }, function(reason) { expect(reason instanceof Error).toEqual(true); - let message = reason.message; + const message = reason.message; expect(message.startsWith("Unable to load CMap at: ")).toEqual(true); expect(message.endsWith("/external/bcmaps/Adobe-Japan1-1")).toEqual( true diff --git a/test/unit/colorspace_spec.js b/test/unit/colorspace_spec.js index 71d6141d2..4478987c7 100644 --- a/test/unit/colorspace_spec.js +++ b/test/unit/colorspace_spec.js @@ -48,24 +48,24 @@ describe("colorspace", function() { describe("DeviceGrayCS", function() { it("should handle the case when cs is a Name object", function() { - let cs = Name.get("DeviceGray"); - let xref = new XRefMock([ + const cs = Name.get("DeviceGray"); + const xref = new XRefMock([ { ref: Ref.get(10, 0), data: new Dict(), }, ]); - let res = new Dict(); + const res = new Dict(); - let pdfFunctionFactory = new PDFFunctionFactory({ + const pdfFunctionFactory = new PDFFunctionFactory({ xref, }); - let colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); + const colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); - let testSrc = new Uint8Array([27, 125, 250, 131]); - let testDest = new Uint8ClampedArray(4 * 4 * 3); + const testSrc = new Uint8Array([27, 125, 250, 131]); + const testDest = new Uint8ClampedArray(4 * 4 * 3); // prettier-ignore - let expectedDest = new Uint8ClampedArray([ + const expectedDest = new Uint8ClampedArray([ 27, 27, 27, 27, 27, 27, 125, 125, 125, @@ -93,24 +93,24 @@ describe("colorspace", function() { expect(testDest).toEqual(expectedDest); }); it("should handle the case when cs is an indirect object", function() { - let cs = Ref.get(10, 0); - let xref = new XRefMock([ + const cs = Ref.get(10, 0); + const xref = new XRefMock([ { ref: cs, data: Name.get("DeviceGray"), }, ]); - let res = new Dict(); + const res = new Dict(); - let pdfFunctionFactory = new PDFFunctionFactory({ + const pdfFunctionFactory = new PDFFunctionFactory({ xref, }); - let colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); + const colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); - let testSrc = new Uint8Array([27, 125, 250, 131]); - let testDest = new Uint8ClampedArray(3 * 3 * 3); + const testSrc = new Uint8Array([27, 125, 250, 131]); + const testDest = new Uint8ClampedArray(3 * 3 * 3); // prettier-ignore - let expectedDest = new Uint8ClampedArray([ + const expectedDest = new Uint8ClampedArray([ 27, 27, 27, 27, 27, 27, 125, 125, 125, @@ -134,30 +134,30 @@ describe("colorspace", function() { describe("DeviceRgbCS", function() { it("should handle the case when cs is a Name object", function() { - let cs = Name.get("DeviceRGB"); - let xref = new XRefMock([ + const cs = Name.get("DeviceRGB"); + const xref = new XRefMock([ { ref: Ref.get(10, 0), data: new Dict(), }, ]); - let res = new Dict(); + const res = new Dict(); - let pdfFunctionFactory = new PDFFunctionFactory({ + const pdfFunctionFactory = new PDFFunctionFactory({ xref, }); - let colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); + const colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); // prettier-ignore - let testSrc = new Uint8Array([ + const testSrc = new Uint8Array([ 27, 125, 250, 131, 139, 140, 111, 25, 198, 21, 147, 255 ]); - let testDest = new Uint8ClampedArray(4 * 4 * 3); + const testDest = new Uint8ClampedArray(4 * 4 * 3); // prettier-ignore - let expectedDest = new Uint8ClampedArray([ + const expectedDest = new Uint8ClampedArray([ 27, 125, 250, 27, 125, 250, 131, 139, 140, @@ -185,30 +185,30 @@ describe("colorspace", function() { expect(testDest).toEqual(expectedDest); }); it("should handle the case when cs is an indirect object", function() { - let cs = Ref.get(10, 0); - let xref = new XRefMock([ + const cs = Ref.get(10, 0); + const xref = new XRefMock([ { ref: cs, data: Name.get("DeviceRGB"), }, ]); - let res = new Dict(); + const res = new Dict(); - let pdfFunctionFactory = new PDFFunctionFactory({ + const pdfFunctionFactory = new PDFFunctionFactory({ xref, }); - let colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); + const colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); // prettier-ignore - let testSrc = new Uint8Array([ + const testSrc = new Uint8Array([ 27, 125, 250, 131, 139, 140, 111, 25, 198, 21, 147, 255 ]); - let testDest = new Uint8ClampedArray(3 * 3 * 3); + const testDest = new Uint8ClampedArray(3 * 3 * 3); // prettier-ignore - let expectedDest = new Uint8ClampedArray([ + const expectedDest = new Uint8ClampedArray([ 27, 125, 250, 27, 125, 250, 131, 139, 140, @@ -232,30 +232,30 @@ describe("colorspace", function() { describe("DeviceCmykCS", function() { it("should handle the case when cs is a Name object", function() { - let cs = Name.get("DeviceCMYK"); - let xref = new XRefMock([ + const cs = Name.get("DeviceCMYK"); + const xref = new XRefMock([ { ref: Ref.get(10, 0), data: new Dict(), }, ]); - let res = new Dict(); + const res = new Dict(); - let pdfFunctionFactory = new PDFFunctionFactory({ + const pdfFunctionFactory = new PDFFunctionFactory({ xref, }); - let colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); + const colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); // prettier-ignore - let testSrc = new Uint8Array([ + const testSrc = new Uint8Array([ 27, 125, 250, 128, 131, 139, 140, 45, 111, 25, 198, 78, 21, 147, 255, 69 ]); - let testDest = new Uint8ClampedArray(4 * 4 * 3); + const testDest = new Uint8ClampedArray(4 * 4 * 3); // prettier-ignore - let expectedDest = new Uint8ClampedArray([ + const expectedDest = new Uint8ClampedArray([ 135, 81, 18, 135, 81, 18, 114, 102, 97, @@ -283,30 +283,30 @@ describe("colorspace", function() { expect(testDest).toEqual(expectedDest); }); it("should handle the case when cs is an indirect object", function() { - let cs = Ref.get(10, 0); - let xref = new XRefMock([ + const cs = Ref.get(10, 0); + const xref = new XRefMock([ { ref: cs, data: Name.get("DeviceCMYK"), }, ]); - let res = new Dict(); + const res = new Dict(); - let pdfFunctionFactory = new PDFFunctionFactory({ + const pdfFunctionFactory = new PDFFunctionFactory({ xref, }); - let colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); + const colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); // prettier-ignore - let testSrc = new Uint8Array([ + const testSrc = new Uint8Array([ 27, 125, 250, 128, 131, 139, 140, 45, 111, 25, 198, 78, 21, 147, 255, 69 ]); - let testDest = new Uint8ClampedArray(3 * 3 * 3); + const testDest = new Uint8ClampedArray(3 * 3 * 3); // prettier-ignore - let expectedDest = new Uint8ClampedArray([ + const expectedDest = new Uint8ClampedArray([ 135, 81, 18, 135, 81, 18, 114, 102, 97, @@ -330,29 +330,29 @@ describe("colorspace", function() { describe("CalGrayCS", function() { it("should handle the case when cs is an array", function() { - let params = new Dict(); + const params = new Dict(); params.set("WhitePoint", [1, 1, 1]); params.set("BlackPoint", [0, 0, 0]); params.set("Gamma", 2.0); - let cs = [Name.get("CalGray"), params]; - let xref = new XRefMock([ + const cs = [Name.get("CalGray"), params]; + const xref = new XRefMock([ { ref: Ref.get(10, 0), data: new Dict(), }, ]); - let res = new Dict(); + const res = new Dict(); - let pdfFunctionFactory = new PDFFunctionFactory({ + const pdfFunctionFactory = new PDFFunctionFactory({ xref, }); - let colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); + const colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); - let testSrc = new Uint8Array([27, 125, 250, 131]); - let testDest = new Uint8ClampedArray(4 * 4 * 3); + const testSrc = new Uint8Array([27, 125, 250, 131]); + const testDest = new Uint8ClampedArray(4 * 4 * 3); // prettier-ignore - let expectedDest = new Uint8ClampedArray([ + const expectedDest = new Uint8ClampedArray([ 25, 25, 25, 25, 25, 25, 143, 143, 143, @@ -383,36 +383,36 @@ describe("colorspace", function() { describe("CalRGBCS", function() { it("should handle the case when cs is an array", function() { - let params = new Dict(); + const params = new Dict(); params.set("WhitePoint", [1, 1, 1]); params.set("BlackPoint", [0, 0, 0]); params.set("Gamma", [1, 1, 1]); params.set("Matrix", [1, 0, 0, 0, 1, 0, 0, 0, 1]); - let cs = [Name.get("CalRGB"), params]; - let xref = new XRefMock([ + const cs = [Name.get("CalRGB"), params]; + const xref = new XRefMock([ { ref: Ref.get(10, 0), data: new Dict(), }, ]); - let res = new Dict(); + const res = new Dict(); - let pdfFunctionFactory = new PDFFunctionFactory({ + const pdfFunctionFactory = new PDFFunctionFactory({ xref, }); - let colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); + const colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); // prettier-ignore - let testSrc = new Uint8Array([ + const testSrc = new Uint8Array([ 27, 125, 250, 131, 139, 140, 111, 25, 198, 21, 147, 255 ]); - let testDest = new Uint8ClampedArray(3 * 3 * 3); + const testDest = new Uint8ClampedArray(3 * 3 * 3); // prettier-ignore - let expectedDest = new Uint8ClampedArray([ + const expectedDest = new Uint8ClampedArray([ 0, 238, 255, 0, 238, 255, 185, 196, 195, @@ -436,35 +436,35 @@ describe("colorspace", function() { describe("LabCS", function() { it("should handle the case when cs is an array", function() { - let params = new Dict(); + const params = new Dict(); params.set("WhitePoint", [1, 1, 1]); params.set("BlackPoint", [0, 0, 0]); params.set("Range", [-100, 100, -100, 100]); - let cs = [Name.get("Lab"), params]; - let xref = new XRefMock([ + const cs = [Name.get("Lab"), params]; + const xref = new XRefMock([ { ref: Ref.get(10, 0), data: new Dict(), }, ]); - let res = new Dict(); + const res = new Dict(); - let pdfFunctionFactory = new PDFFunctionFactory({ + const pdfFunctionFactory = new PDFFunctionFactory({ xref, }); - let colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); + const colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); // prettier-ignore - let testSrc = new Uint8Array([ + const testSrc = new Uint8Array([ 27, 25, 50, 31, 19, 40, 11, 25, 98, 21, 47, 55 ]); - let testDest = new Uint8ClampedArray(3 * 3 * 3); + const testDest = new Uint8ClampedArray(3 * 3 * 3); // prettier-ignore - let expectedDest = new Uint8ClampedArray([ + const expectedDest = new Uint8ClampedArray([ 0, 49, 101, 0, 49, 101, 0, 53, 117, @@ -490,29 +490,29 @@ describe("colorspace", function() { describe("IndexedCS", function() { it("should handle the case when cs is an array", function() { // prettier-ignore - let lookup = new Uint8Array([ + const lookup = new Uint8Array([ 23, 155, 35, 147, 69, 93, 255, 109, 70 ]); - let cs = [Name.get("Indexed"), Name.get("DeviceRGB"), 2, lookup]; - let xref = new XRefMock([ + const cs = [Name.get("Indexed"), Name.get("DeviceRGB"), 2, lookup]; + const xref = new XRefMock([ { ref: Ref.get(10, 0), data: new Dict(), }, ]); - let res = new Dict(); + const res = new Dict(); - let pdfFunctionFactory = new PDFFunctionFactory({ + const pdfFunctionFactory = new PDFFunctionFactory({ xref, }); - let colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); + const colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); - let testSrc = new Uint8Array([2, 2, 0, 1]); - let testDest = new Uint8ClampedArray(3 * 3 * 3); + const testSrc = new Uint8Array([2, 2, 0, 1]); + const testDest = new Uint8ClampedArray(3 * 3 * 3); // prettier-ignore - let expectedDest = new Uint8ClampedArray([ + const expectedDest = new Uint8ClampedArray([ 255, 109, 70, 255, 109, 70, 255, 109, 70, @@ -536,7 +536,7 @@ describe("colorspace", function() { describe("AlternateCS", function() { it("should handle the case when cs is an array", function() { - let fnDict = new Dict(); + const fnDict = new Dict(); fnDict.set("FunctionType", 4); fnDict.set("Domain", [0.0, 1.0]); fnDict.set("Range", [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0]); @@ -550,31 +550,31 @@ describe("colorspace", function() { ); fn = new Stream(fn.bytes, 0, 58, fnDict); - let fnRef = Ref.get(10, 0); + const fnRef = Ref.get(10, 0); - let cs = [ + const cs = [ Name.get("Separation"), Name.get("LogoGreen"), Name.get("DeviceCMYK"), fnRef, ]; - let xref = new XRefMock([ + const xref = new XRefMock([ { ref: fnRef, data: fn, }, ]); - let res = new Dict(); + const res = new Dict(); - let pdfFunctionFactory = new PDFFunctionFactory({ + const pdfFunctionFactory = new PDFFunctionFactory({ xref, }); - let colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); + const colorSpace = ColorSpace.parse(cs, xref, res, pdfFunctionFactory); - let testSrc = new Uint8Array([27, 25, 50, 31]); - let testDest = new Uint8ClampedArray(3 * 3 * 3); + const testSrc = new Uint8Array([27, 25, 50, 31]); + const testDest = new Uint8ClampedArray(3 * 3 * 3); // prettier-ignore - let expectedDest = new Uint8ClampedArray([ + const expectedDest = new Uint8ClampedArray([ 226, 242, 241, 226, 242, 241, 229, 244, 242, diff --git a/test/unit/custom_spec.js b/test/unit/custom_spec.js index 61c3723aa..b422c0277 100644 --- a/test/unit/custom_spec.js +++ b/test/unit/custom_spec.js @@ -19,7 +19,7 @@ import { getDocument } from "../../src/display/api.js"; import { isNodeJS } from "../../src/shared/is_node.js"; function getTopLeftPixel(canvasContext) { - let imgData = canvasContext.getImageData(0, 0, 1, 1); + const imgData = canvasContext.getImageData(0, 0, 1, 1); return { r: imgData.data[0], g: imgData.data[1], @@ -29,7 +29,9 @@ function getTopLeftPixel(canvasContext) { } describe("custom canvas rendering", function() { - let transparentGetDocumentParams = buildGetDocumentParams("transparent.pdf"); + const transparentGetDocumentParams = buildGetDocumentParams( + "transparent.pdf" + ); let CanvasFactory; let loadingTask; diff --git a/test/unit/fetch_stream_spec.js b/test/unit/fetch_stream_spec.js index 1cafc9759..49a06cbea 100644 --- a/test/unit/fetch_stream_spec.js +++ b/test/unit/fetch_stream_spec.js @@ -86,7 +86,7 @@ describe("fetch_stream", function() { ); const rangeReader2 = stream.getRangeReader(pdfLength - tailSize, pdfLength); - let result1 = { value: 0 }, + const result1 = { value: 0 }, result2 = { value: 0 }; const read = function(reader, lenResult) { return reader.read().then(function(result) { diff --git a/test/unit/message_handler_spec.js b/test/unit/message_handler_spec.js index fd8547da8..76ab3e6f0 100644 --- a/test/unit/message_handler_spec.js +++ b/test/unit/message_handler_spec.js @@ -31,9 +31,9 @@ describe("message_handler", function() { describe("sendWithStream", function() { it("should return a ReadableStream", function() { - let port = new LoopbackPort(); - let messageHandler1 = new MessageHandler("main", "worker", port); - let readable = messageHandler1.sendWithStream("fakeHandler"); + const port = new LoopbackPort(); + const messageHandler1 = new MessageHandler("main", "worker", port); + const readable = messageHandler1.sendWithStream("fakeHandler"); // Check if readable is an instance of ReadableStream. expect(typeof readable).toEqual("object"); expect(typeof readable.getReader).toEqual("function"); @@ -41,9 +41,9 @@ describe("message_handler", function() { it("should read using a reader", function(done) { let log = ""; - let port = new LoopbackPort(); - let messageHandler1 = new MessageHandler("main", "worker", port); - let messageHandler2 = new MessageHandler("worker", "main", port); + const port = new LoopbackPort(); + const messageHandler1 = new MessageHandler("main", "worker", port); + const messageHandler2 = new MessageHandler("worker", "main", port); messageHandler2.on("fakeHandler", (data, sink) => { sink.onPull = function() { log += "p"; @@ -61,7 +61,7 @@ describe("message_handler", function() { }); return sleep(5); }); - let readable = messageHandler1.sendWithStream( + const readable = messageHandler1.sendWithStream( "fakeHandler", {}, { @@ -71,7 +71,7 @@ describe("message_handler", function() { }, } ); - let reader = readable.getReader(); + const reader = readable.getReader(); sleep(10) .then(() => { expect(log).toEqual(""); @@ -95,8 +95,8 @@ describe("message_handler", function() { it("should not read any data when cancelled", function(done) { let log = ""; - let port = new LoopbackPort(); - let messageHandler2 = new MessageHandler("worker", "main", port); + const port = new LoopbackPort(); + const messageHandler2 = new MessageHandler("worker", "main", port); messageHandler2.on("fakeHandler", (data, sink) => { sink.onPull = function() { log += "p"; @@ -126,8 +126,8 @@ describe("message_handler", function() { } ); }); - let messageHandler1 = new MessageHandler("main", "worker", port); - let readable = messageHandler1.sendWithStream( + const messageHandler1 = new MessageHandler("main", "worker", port); + const readable = messageHandler1.sendWithStream( "fakeHandler", {}, { @@ -138,7 +138,7 @@ describe("message_handler", function() { } ); - let reader = readable.getReader(); + const reader = readable.getReader(); sleep(10) .then(() => { expect(log).toEqual("01"); @@ -161,8 +161,8 @@ describe("message_handler", function() { it("should not read when errored", function(done) { let log = ""; - let port = new LoopbackPort(); - let messageHandler2 = new MessageHandler("worker", "main", port); + const port = new LoopbackPort(); + const messageHandler2 = new MessageHandler("worker", "main", port); messageHandler2.on("fakeHandler", (data, sink) => { sink.onPull = function() { log += "p"; @@ -182,8 +182,8 @@ describe("message_handler", function() { sink.error(new Error("should not read when errored")); }); }); - let messageHandler1 = new MessageHandler("main", "worker", port); - let readable = messageHandler1.sendWithStream( + const messageHandler1 = new MessageHandler("main", "worker", port); + const readable = messageHandler1.sendWithStream( "fakeHandler", {}, { @@ -194,7 +194,7 @@ describe("message_handler", function() { } ); - let reader = readable.getReader(); + const reader = readable.getReader(); sleep(10) .then(() => { @@ -216,8 +216,8 @@ describe("message_handler", function() { it("should read data with blocking promise", function(done) { let log = ""; - let port = new LoopbackPort(); - let messageHandler2 = new MessageHandler("worker", "main", port); + const port = new LoopbackPort(); + const messageHandler2 = new MessageHandler("worker", "main", port); messageHandler2.on("fakeHandler", (data, sink) => { sink.onPull = function() { log += "p"; @@ -242,8 +242,8 @@ describe("message_handler", function() { }); }); - let messageHandler1 = new MessageHandler("main", "worker", port); - let readable = messageHandler1.sendWithStream( + const messageHandler1 = new MessageHandler("main", "worker", port); + const readable = messageHandler1.sendWithStream( "fakeHandler", {}, { @@ -254,7 +254,7 @@ describe("message_handler", function() { } ); - let reader = readable.getReader(); + const reader = readable.getReader(); // Sleep for 10ms, so that read() is not unblocking the ready promise. // Chain all read() to stream in sequence. sleep(10) @@ -292,8 +292,8 @@ describe("message_handler", function() { " into stream", function(done) { let log = ""; - let port = new LoopbackPort(); - let messageHandler2 = new MessageHandler("worker", "main", port); + const port = new LoopbackPort(); + const messageHandler2 = new MessageHandler("worker", "main", port); messageHandler2.on("fakeHandler", (data, sink) => { sink.onPull = function() { log += "p"; @@ -319,8 +319,8 @@ describe("message_handler", function() { return sleep(10); }); - let messageHandler1 = new MessageHandler("main", "worker", port); - let readable = messageHandler1.sendWithStream( + const messageHandler1 = new MessageHandler("main", "worker", port); + const readable = messageHandler1.sendWithStream( "fakeHandler", {}, { @@ -331,7 +331,7 @@ describe("message_handler", function() { } ); - let reader = readable.getReader(); + const reader = readable.getReader(); sleep(10) .then(() => { @@ -366,9 +366,9 @@ describe("message_handler", function() { it("should ignore any pull after close is called", function(done) { let log = ""; - let port = new LoopbackPort(); - let capability = createPromiseCapability(); - let messageHandler2 = new MessageHandler("worker", "main", port); + const port = new LoopbackPort(); + const capability = createPromiseCapability(); + const messageHandler2 = new MessageHandler("worker", "main", port); messageHandler2.on("fakeHandler", (data, sink) => { sink.onPull = function() { log += "p"; @@ -386,8 +386,8 @@ describe("message_handler", function() { }); }); - let messageHandler1 = new MessageHandler("main", "worker", port); - let readable = messageHandler1.sendWithStream( + const messageHandler1 = new MessageHandler("main", "worker", port); + const readable = messageHandler1.sendWithStream( "fakeHandler", {}, { @@ -398,7 +398,7 @@ describe("message_handler", function() { } ); - let reader = readable.getReader(); + const reader = readable.getReader(); sleep(10) .then(() => { diff --git a/test/unit/node_stream_spec.js b/test/unit/node_stream_spec.js index 8bdc58584..e46c44743 100644 --- a/test/unit/node_stream_spec.js +++ b/test/unit/node_stream_spec.js @@ -21,26 +21,26 @@ import { PDFNodeStream } from "../../src/display/node_stream.js"; // Make sure that we only running this script is Node.js environments. assert(isNodeJS); -let path = __non_webpack_require__("path"); -let url = __non_webpack_require__("url"); -let http = __non_webpack_require__("http"); -let fs = __non_webpack_require__("fs"); +const path = __non_webpack_require__("path"); +const url = __non_webpack_require__("url"); +const http = __non_webpack_require__("http"); +const fs = __non_webpack_require__("fs"); describe("node_stream", function() { let server = null; let port = null; - let pdf = url.parse( + const pdf = url.parse( encodeURI( "file://" + path.join(process.cwd(), "./test/pdfs/tracemonkey.pdf") ) ).href; - let pdfLength = 1016315; + const pdfLength = 1016315; beforeAll(done => { // Create http server to serve pdf data for tests. server = http .createServer((request, response) => { - let filePath = process.cwd() + "/test/pdfs" + request.url; + const filePath = process.cwd() + "/test/pdfs" + request.url; fs.lstat(filePath, (error, stat) => { if (error) { response.writeHead(404); @@ -48,8 +48,8 @@ describe("node_stream", function() { return; } if (!request.headers["range"]) { - let contentLength = stat.size; - let stream = fs.createReadStream(filePath); + const contentLength = stat.size; + const stream = fs.createReadStream(filePath); response.writeHead(200, { "Content-Type": "application/pdf", "Content-Length": contentLength, @@ -57,13 +57,13 @@ describe("node_stream", function() { }); stream.pipe(response); } else { - let [start, end] = request.headers["range"] + const [start, end] = request.headers["range"] .split("=")[1] .split("-") .map(x => { return Number(x); }); - let stream = fs.createReadStream(filePath, { start, end }); + const stream = fs.createReadStream(filePath, { start, end }); response.writeHead(206, { "Content-Type": "application/pdf", }); @@ -83,38 +83,38 @@ describe("node_stream", function() { }); it("read both http(s) and filesystem pdf files", function(done) { - let stream1 = new PDFNodeStream({ + const stream1 = new PDFNodeStream({ url: `http://127.0.0.1:${port}/tracemonkey.pdf`, rangeChunkSize: 65536, disableStream: true, disableRange: true, }); - let stream2 = new PDFNodeStream({ + const stream2 = new PDFNodeStream({ url: pdf, rangeChunkSize: 65536, disableStream: true, disableRange: true, }); - let fullReader1 = stream1.getFullReader(); - let fullReader2 = stream2.getFullReader(); + const fullReader1 = stream1.getFullReader(); + const fullReader2 = stream2.getFullReader(); let isStreamingSupported1, isRangeSupported1; - let promise1 = fullReader1.headersReady.then(() => { + const promise1 = fullReader1.headersReady.then(() => { isStreamingSupported1 = fullReader1.isStreamingSupported; isRangeSupported1 = fullReader1.isRangeSupported; }); let isStreamingSupported2, isRangeSupported2; - let promise2 = fullReader2.headersReady.then(() => { + const promise2 = fullReader2.headersReady.then(() => { isStreamingSupported2 = fullReader2.isStreamingSupported; isRangeSupported2 = fullReader2.isRangeSupported; }); let len1 = 0, len2 = 0; - let read1 = function() { + const read1 = function() { return fullReader1.read().then(function(result) { if (result.done) { return undefined; @@ -123,7 +123,7 @@ describe("node_stream", function() { return read1(); }); }; - let read2 = function() { + const read2 = function() { return fullReader2.read().then(function(result) { if (result.done) { return undefined; @@ -133,7 +133,7 @@ describe("node_stream", function() { }); }; - let readPromise = Promise.all([read1(), read2(), promise1, promise2]); + const readPromise = Promise.all([read1(), read2(), promise1, promise2]); readPromise .then(result => { expect(isStreamingSupported1).toEqual(false); @@ -150,15 +150,15 @@ describe("node_stream", function() { }); it("read custom ranges for both http(s) and filesystem urls", function(done) { - let rangeSize = 32768; - let stream1 = new PDFNodeStream({ + const rangeSize = 32768; + const stream1 = new PDFNodeStream({ url: `http://127.0.0.1:${port}/tracemonkey.pdf`, length: pdfLength, rangeChunkSize: rangeSize, disableStream: true, disableRange: false, }); - let stream2 = new PDFNodeStream({ + const stream2 = new PDFNodeStream({ url: pdf, length: pdfLength, rangeChunkSize: rangeSize, @@ -166,13 +166,13 @@ describe("node_stream", function() { disableRange: false, }); - let fullReader1 = stream1.getFullReader(); - let fullReader2 = stream2.getFullReader(); + const fullReader1 = stream1.getFullReader(); + const fullReader2 = stream2.getFullReader(); let isStreamingSupported1, isRangeSupported1, fullReaderCancelled1; let isStreamingSupported2, isRangeSupported2, fullReaderCancelled2; - let promise1 = fullReader1.headersReady.then(function() { + const promise1 = fullReader1.headersReady.then(function() { isStreamingSupported1 = fullReader1.isStreamingSupported; isRangeSupported1 = fullReader1.isRangeSupported; // we shall be able to close the full reader without issues @@ -180,7 +180,7 @@ describe("node_stream", function() { fullReaderCancelled1 = true; }); - let promise2 = fullReader2.headersReady.then(function() { + const promise2 = fullReader2.headersReady.then(function() { isStreamingSupported2 = fullReader2.isStreamingSupported; isRangeSupported2 = fullReader2.isRangeSupported; fullReader2.cancel(new AbortException("Don't need fullReader2.")); @@ -188,26 +188,32 @@ describe("node_stream", function() { }); // Skipping fullReader results, requesting something from the PDF end. - let tailSize = pdfLength % rangeSize || rangeSize; + const tailSize = pdfLength % rangeSize || rangeSize; - let range11Reader = stream1.getRangeReader( + const range11Reader = stream1.getRangeReader( pdfLength - tailSize - rangeSize, pdfLength - tailSize ); - let range12Reader = stream1.getRangeReader(pdfLength - tailSize, pdfLength); + const range12Reader = stream1.getRangeReader( + pdfLength - tailSize, + pdfLength + ); - let range21Reader = stream2.getRangeReader( + const range21Reader = stream2.getRangeReader( pdfLength - tailSize - rangeSize, pdfLength - tailSize ); - let range22Reader = stream2.getRangeReader(pdfLength - tailSize, pdfLength); + const range22Reader = stream2.getRangeReader( + pdfLength - tailSize, + pdfLength + ); - let result11 = { value: 0 }, + const result11 = { value: 0 }, result12 = { value: 0 }; - let result21 = { value: 0 }, + const result21 = { value: 0 }, result22 = { value: 0 }; - let read = function(reader, lenResult) { + const read = function(reader, lenResult) { return reader.read().then(function(result) { if (result.done) { return undefined; @@ -217,7 +223,7 @@ describe("node_stream", function() { }); }; - let readPromises = Promise.all([ + const readPromises = Promise.all([ read(range11Reader, result11), read(range12Reader, result12), read(range21Reader, result21), diff --git a/test/unit/pdf_history_spec.js b/test/unit/pdf_history_spec.js index 1f997498f..a29cec9d2 100644 --- a/test/unit/pdf_history_spec.js +++ b/test/unit/pdf_history_spec.js @@ -29,7 +29,7 @@ describe("pdf_history", function() { false ); - let destArrayString = JSON.stringify([ + const destArrayString = JSON.stringify([ { num: 3757, gen: 0 }, { name: "XYZ" }, 92.918, @@ -51,11 +51,11 @@ describe("pdf_history", function() { }); describe("isDestArraysEqual", function() { - let firstDest = [{ num: 1, gen: 0 }, { name: "XYZ" }, 0, 375, null]; - let secondDest = [{ num: 5, gen: 0 }, { name: "XYZ" }, 0, 375, null]; - let thirdDest = [{ num: 1, gen: 0 }, { name: "XYZ" }, 750, 0, null]; - let fourthDest = [{ num: 1, gen: 0 }, { name: "XYZ" }, 0, 375, 1.0]; - let fifthDest = [{ gen: 0, num: 1 }, { name: "XYZ" }, 0, 375, null]; + const firstDest = [{ num: 1, gen: 0 }, { name: "XYZ" }, 0, 375, null]; + const secondDest = [{ num: 5, gen: 0 }, { name: "XYZ" }, 0, 375, null]; + const thirdDest = [{ num: 1, gen: 0 }, { name: "XYZ" }, 750, 0, null]; + const fourthDest = [{ num: 1, gen: 0 }, { name: "XYZ" }, 0, 375, 1.0]; + const fifthDest = [{ gen: 0, num: 1 }, { name: "XYZ" }, 0, 375, null]; it("should reject non-equal destination arrays", function() { expect(isDestArraysEqual(firstDest, undefined)).toEqual(false); @@ -70,7 +70,7 @@ describe("pdf_history", function() { expect(isDestArraysEqual(firstDest, firstDest)).toEqual(true); expect(isDestArraysEqual(firstDest, fifthDest)).toEqual(true); - let firstDestCopy = firstDest.slice(); + const firstDestCopy = firstDest.slice(); expect(firstDest).not.toBe(firstDestCopy); expect(isDestArraysEqual(firstDest, firstDestCopy)).toEqual(true); diff --git a/test/unit/stream_spec.js b/test/unit/stream_spec.js index 56cd46e3a..2e58cd3ef 100644 --- a/test/unit/stream_spec.js +++ b/test/unit/stream_spec.js @@ -69,7 +69,7 @@ describe("stream", function() { ); predictor.reset(); - let clampedResult = predictor.getBytes(6, /* forceClamped = */ true); + const clampedResult = predictor.getBytes(6, /* forceClamped = */ true); expect(clampedResult).toEqual( new Uint8ClampedArray([100, 3, 101, 2, 102, 1]) ); diff --git a/test/unit/test_utils.js b/test/unit/test_utils.js index bd2a9cc85..09d2827a2 100644 --- a/test/unit/test_utils.js +++ b/test/unit/test_utils.js @@ -50,13 +50,13 @@ const TEST_PDFS_PATH = { }; function buildGetDocumentParams(filename, options) { - let params = Object.create(null); + const params = Object.create(null); if (isNodeJS) { params.url = TEST_PDFS_PATH.node + filename; } else { params.url = new URL(TEST_PDFS_PATH.dom + filename, window.location).href; } - for (let option in options) { + for (const option in options) { params[option] = options[option]; } return params; @@ -137,8 +137,8 @@ class XRefMock { constructor(array) { this._map = Object.create(null); - for (let key in array) { - let obj = array[key]; + for (const key in array) { + const obj = array[key]; this._map[obj.ref.toString()] = obj.data; } } diff --git a/test/unit/ui_utils_spec.js b/test/unit/ui_utils_spec.js index ef4a74485..b60c783d3 100644 --- a/test/unit/ui_utils_spec.js +++ b/test/unit/ui_utils_spec.js @@ -449,7 +449,7 @@ describe("ui_utils", function() { }); it("should reject invalid parameters", function(done) { - let invalidTarget = waitOnEventOrTimeout({ + const invalidTarget = waitOnEventOrTimeout({ target: "window", name: "DOMContentLoaded", }).then( @@ -461,7 +461,7 @@ describe("ui_utils", function() { } ); - let invalidName = waitOnEventOrTimeout({ + const invalidName = waitOnEventOrTimeout({ target: eventBus, name: "", }).then( @@ -473,7 +473,7 @@ describe("ui_utils", function() { } ); - let invalidDelay = waitOnEventOrTimeout({ + const invalidDelay = waitOnEventOrTimeout({ target: eventBus, name: "pagerendered", delay: -1000, @@ -496,9 +496,9 @@ describe("ui_utils", function() { if (isNodeJS) { pending("Document in not supported in Node.js."); } - let button = document.createElement("button"); + const button = document.createElement("button"); - let buttonClicked = waitOnEventOrTimeout({ + const buttonClicked = waitOnEventOrTimeout({ target: button, name: "click", delay: 10000, @@ -516,9 +516,9 @@ describe("ui_utils", function() { if (isNodeJS) { pending("Document in not supported in Node.js."); } - let button = document.createElement("button"); + const button = document.createElement("button"); - let buttonClicked = waitOnEventOrTimeout({ + const buttonClicked = waitOnEventOrTimeout({ target: button, name: "click", delay: 10, @@ -532,7 +532,7 @@ describe("ui_utils", function() { }); it("should resolve on event, using the EventBus", function(done) { - let pageRendered = waitOnEventOrTimeout({ + const pageRendered = waitOnEventOrTimeout({ target: eventBus, name: "pagerendered", delay: 10000, @@ -547,7 +547,7 @@ describe("ui_utils", function() { }); it("should resolve on timeout, using the EventBus", function(done) { - let pageRendered = waitOnEventOrTimeout({ + const pageRendered = waitOnEventOrTimeout({ target: eventBus, name: "pagerendered", delay: 10, diff --git a/test/unit/util_spec.js b/test/unit/util_spec.js index f5b7daaa6..38f83632e 100644 --- a/test/unit/util_spec.js +++ b/test/unit/util_spec.js @@ -48,14 +48,14 @@ describe("util", function() { const length = 10000; // Larger than MAX_ARGUMENT_COUNT = 8192. // Create an array with `length` 'a' character codes. - let bytes = new Uint8Array(length); + const bytes = new Uint8Array(length); for (let i = 0; i < length; i++) { bytes[i] = "a".charCodeAt(0); } // Create a string with `length` 'a' characters. We need an array of size // `length + 1` since `join` puts the argument between the array elements. - let string = Array(length + 1).join("a"); + const string = Array(length + 1).join("a"); expect(bytesToString(bytes)).toEqual(string); }); @@ -184,55 +184,55 @@ describe("util", function() { describe("stringToPDFString", function() { it("handles ISO Latin 1 strings", function() { - let str = "\x8Dstring\x8E"; + const str = "\x8Dstring\x8E"; expect(stringToPDFString(str)).toEqual("\u201Cstring\u201D"); }); it("handles UTF-16 big-endian strings", function() { - let str = "\xFE\xFF\x00\x73\x00\x74\x00\x72\x00\x69\x00\x6E\x00\x67"; + const str = "\xFE\xFF\x00\x73\x00\x74\x00\x72\x00\x69\x00\x6E\x00\x67"; expect(stringToPDFString(str)).toEqual("string"); }); it("handles UTF-16 little-endian strings", function() { - let str = "\xFF\xFE\x73\x00\x74\x00\x72\x00\x69\x00\x6E\x00\x67\x00"; + const str = "\xFF\xFE\x73\x00\x74\x00\x72\x00\x69\x00\x6E\x00\x67\x00"; expect(stringToPDFString(str)).toEqual("string"); }); it("handles empty strings", function() { // ISO Latin 1 - let str1 = ""; + const str1 = ""; expect(stringToPDFString(str1)).toEqual(""); // UTF-16BE - let str2 = "\xFE\xFF"; + const str2 = "\xFE\xFF"; expect(stringToPDFString(str2)).toEqual(""); // UTF-16LE - let str3 = "\xFF\xFE"; + const str3 = "\xFF\xFE"; expect(stringToPDFString(str3)).toEqual(""); }); }); describe("removeNullCharacters", function() { it("should not modify string without null characters", function() { - let str = "string without null chars"; + const str = "string without null chars"; expect(removeNullCharacters(str)).toEqual("string without null chars"); }); it("should modify string with null characters", function() { - let str = "string\x00With\x00Null\x00Chars"; + const str = "string\x00With\x00Null\x00Chars"; expect(removeNullCharacters(str)).toEqual("stringWithNullChars"); }); }); describe("ReadableStream", function() { it("should return an Object", function() { - let readable = new ReadableStream(); + const readable = new ReadableStream(); expect(typeof readable).toEqual("object"); }); it("should have property getReader", function() { - let readable = new ReadableStream(); + const readable = new ReadableStream(); expect(typeof readable.getReader).toEqual("function"); }); }); diff --git a/web/.eslintrc b/web/.eslintrc index 3fe119c0d..4bf1e6286 100644 --- a/web/.eslintrc +++ b/web/.eslintrc @@ -9,6 +9,5 @@ // ECMAScript 6 "no-var": "error", - "prefer-const": "error", }, }