Merge pull request #13294 from timvandermeij/src-no-var
Enable the `no-var` linting rule in `src/core/{cmap,image,worker}.js`
This commit is contained in:
commit
72be684c10
161
src/core/cmap.js
161
src/core/cmap.js
@ -12,7 +12,6 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
/* eslint-disable no-var */
|
||||
|
||||
import {
|
||||
CMapCompressionType,
|
||||
@ -26,7 +25,7 @@ import { Lexer } from "./parser.js";
|
||||
import { MissingDataException } from "./core_utils.js";
|
||||
import { Stream } from "./stream.js";
|
||||
|
||||
var BUILT_IN_CMAPS = [
|
||||
const BUILT_IN_CMAPS = [
|
||||
// << Start unicode maps.
|
||||
"Adobe-GB1-UCS2",
|
||||
"Adobe-CNS1-UCS2",
|
||||
@ -240,7 +239,7 @@ class CMap {
|
||||
if (high - low > MAX_MAP_RANGE) {
|
||||
throw new Error("mapBfRange - ignoring data above MAX_MAP_RANGE.");
|
||||
}
|
||||
var lastByte = dstLow.length - 1;
|
||||
const lastByte = dstLow.length - 1;
|
||||
while (low <= high) {
|
||||
this._map[low++] = dstLow;
|
||||
// Only the last byte has to be incremented.
|
||||
@ -437,10 +436,10 @@ class IdentityCMap extends CMap {
|
||||
}
|
||||
}
|
||||
|
||||
var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
const BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
function hexToInt(a, size) {
|
||||
var n = 0;
|
||||
for (var i = 0; i <= size; i++) {
|
||||
let n = 0;
|
||||
for (let i = 0; i <= size; i++) {
|
||||
n = (n << 8) | a[i];
|
||||
}
|
||||
return n >>> 0;
|
||||
@ -459,8 +458,8 @@ var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
}
|
||||
|
||||
function addHex(a, b, size) {
|
||||
var c = 0;
|
||||
for (var i = size; i >= 0; i--) {
|
||||
let c = 0;
|
||||
for (let i = size; i >= 0; i--) {
|
||||
c += a[i] + b[i];
|
||||
a[i] = c & 255;
|
||||
c >>= 8;
|
||||
@ -468,16 +467,16 @@ var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
}
|
||||
|
||||
function incHex(a, size) {
|
||||
var c = 1;
|
||||
for (var i = size; i >= 0 && c > 0; i--) {
|
||||
let c = 1;
|
||||
for (let i = size; i >= 0 && c > 0; i--) {
|
||||
c += a[i];
|
||||
a[i] = c & 255;
|
||||
c >>= 8;
|
||||
}
|
||||
}
|
||||
|
||||
var MAX_NUM_SIZE = 16;
|
||||
var MAX_ENCODED_NUM_SIZE = 19; // ceil(MAX_NUM_SIZE * 7 / 8)
|
||||
const MAX_NUM_SIZE = 16;
|
||||
const MAX_ENCODED_NUM_SIZE = 19; // ceil(MAX_NUM_SIZE * 7 / 8)
|
||||
|
||||
class BinaryCMapStream {
|
||||
constructor(data) {
|
||||
@ -495,10 +494,10 @@ var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
}
|
||||
|
||||
readNumber() {
|
||||
var n = 0;
|
||||
var last;
|
||||
let n = 0;
|
||||
let last;
|
||||
do {
|
||||
var b = this.readByte();
|
||||
const b = this.readByte();
|
||||
if (b < 0) {
|
||||
throw new FormatError("unexpected EOF in bcmap");
|
||||
}
|
||||
@ -509,7 +508,7 @@ var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
}
|
||||
|
||||
readSigned() {
|
||||
var n = this.readNumber();
|
||||
const n = this.readNumber();
|
||||
return n & 1 ? ~(n >>> 1) : n >>> 1;
|
||||
}
|
||||
|
||||
@ -519,18 +518,18 @@ var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
}
|
||||
|
||||
readHexNumber(num, size) {
|
||||
var last;
|
||||
var stack = this.tmpBuf,
|
||||
sp = 0;
|
||||
let last;
|
||||
const stack = this.tmpBuf;
|
||||
let sp = 0;
|
||||
do {
|
||||
var b = this.readByte();
|
||||
const b = this.readByte();
|
||||
if (b < 0) {
|
||||
throw new FormatError("unexpected EOF in bcmap");
|
||||
}
|
||||
last = !(b & 0x80);
|
||||
stack[sp++] = b & 0x7f;
|
||||
} while (!last);
|
||||
var i = size,
|
||||
let i = size,
|
||||
buffer = 0,
|
||||
bufferSize = 0;
|
||||
while (i >= 0) {
|
||||
@ -547,18 +546,18 @@ var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
|
||||
readHexSigned(num, size) {
|
||||
this.readHexNumber(num, size);
|
||||
var sign = num[size] & 1 ? 255 : 0;
|
||||
var c = 0;
|
||||
for (var i = 0; i <= size; i++) {
|
||||
const sign = num[size] & 1 ? 255 : 0;
|
||||
let c = 0;
|
||||
for (let i = 0; i <= size; i++) {
|
||||
c = ((c & 1) << 8) | num[i];
|
||||
num[i] = (c >> 1) ^ sign;
|
||||
}
|
||||
}
|
||||
|
||||
readString() {
|
||||
var len = this.readNumber();
|
||||
var s = "";
|
||||
for (var i = 0; i < len; i++) {
|
||||
const len = this.readNumber();
|
||||
let s = "";
|
||||
for (let i = 0; i < len; i++) {
|
||||
s += String.fromCharCode(this.readNumber());
|
||||
}
|
||||
return s;
|
||||
@ -568,21 +567,21 @@ var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
// eslint-disable-next-line no-shadow
|
||||
class BinaryCMapReader {
|
||||
async process(data, cMap, extend) {
|
||||
var stream = new BinaryCMapStream(data);
|
||||
var header = stream.readByte();
|
||||
const stream = new BinaryCMapStream(data);
|
||||
const header = stream.readByte();
|
||||
cMap.vertical = !!(header & 1);
|
||||
|
||||
var useCMap = null;
|
||||
var start = new Uint8Array(MAX_NUM_SIZE);
|
||||
var end = new Uint8Array(MAX_NUM_SIZE);
|
||||
var char = new Uint8Array(MAX_NUM_SIZE);
|
||||
var charCode = new Uint8Array(MAX_NUM_SIZE);
|
||||
var tmp = new Uint8Array(MAX_NUM_SIZE);
|
||||
var code;
|
||||
let useCMap = null;
|
||||
const start = new Uint8Array(MAX_NUM_SIZE);
|
||||
const end = new Uint8Array(MAX_NUM_SIZE);
|
||||
const char = new Uint8Array(MAX_NUM_SIZE);
|
||||
const charCode = new Uint8Array(MAX_NUM_SIZE);
|
||||
const tmp = new Uint8Array(MAX_NUM_SIZE);
|
||||
let code;
|
||||
|
||||
var b;
|
||||
let b;
|
||||
while ((b = stream.readByte()) >= 0) {
|
||||
var type = b >> 5;
|
||||
const type = b >> 5;
|
||||
if (type === 7) {
|
||||
// metadata, e.g. comment or usecmap
|
||||
switch (b & 0x1f) {
|
||||
@ -595,16 +594,15 @@ var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
}
|
||||
continue;
|
||||
}
|
||||
var sequence = !!(b & 0x10);
|
||||
var dataSize = b & 15;
|
||||
const sequence = !!(b & 0x10);
|
||||
const dataSize = b & 15;
|
||||
|
||||
if (dataSize + 1 > MAX_NUM_SIZE) {
|
||||
throw new Error("BinaryCMapReader.process: Invalid dataSize.");
|
||||
}
|
||||
|
||||
var ucs2DataSize = 1;
|
||||
var subitemsCount = stream.readNumber();
|
||||
var i;
|
||||
const ucs2DataSize = 1;
|
||||
const subitemsCount = stream.readNumber();
|
||||
switch (type) {
|
||||
case 0: // codespacerange
|
||||
stream.readHex(start, dataSize);
|
||||
@ -615,7 +613,7 @@ var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
hexToInt(start, dataSize),
|
||||
hexToInt(end, dataSize)
|
||||
);
|
||||
for (i = 1; i < subitemsCount; i++) {
|
||||
for (let i = 1; i < subitemsCount; i++) {
|
||||
incHex(end, dataSize);
|
||||
stream.readHexNumber(start, dataSize);
|
||||
addHex(start, end, dataSize);
|
||||
@ -634,7 +632,7 @@ var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
addHex(end, start, dataSize);
|
||||
stream.readNumber(); // code
|
||||
// undefined range, skipping
|
||||
for (i = 1; i < subitemsCount; i++) {
|
||||
for (let i = 1; i < subitemsCount; i++) {
|
||||
incHex(end, dataSize);
|
||||
stream.readHexNumber(start, dataSize);
|
||||
addHex(start, end, dataSize);
|
||||
@ -648,7 +646,7 @@ var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
stream.readHex(char, dataSize);
|
||||
code = stream.readNumber();
|
||||
cMap.mapOne(hexToInt(char, dataSize), code);
|
||||
for (i = 1; i < subitemsCount; i++) {
|
||||
for (let i = 1; i < subitemsCount; i++) {
|
||||
incHex(char, dataSize);
|
||||
if (!sequence) {
|
||||
stream.readHexNumber(tmp, dataSize);
|
||||
@ -668,7 +666,7 @@ var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
hexToInt(end, dataSize),
|
||||
code
|
||||
);
|
||||
for (i = 1; i < subitemsCount; i++) {
|
||||
for (let i = 1; i < subitemsCount; i++) {
|
||||
incHex(end, dataSize);
|
||||
if (!sequence) {
|
||||
stream.readHexNumber(start, dataSize);
|
||||
@ -693,7 +691,7 @@ var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
hexToInt(char, ucs2DataSize),
|
||||
hexToStr(charCode, dataSize)
|
||||
);
|
||||
for (i = 1; i < subitemsCount; i++) {
|
||||
for (let i = 1; i < subitemsCount; i++) {
|
||||
incHex(char, ucs2DataSize);
|
||||
if (!sequence) {
|
||||
stream.readHexNumber(tmp, ucs2DataSize);
|
||||
@ -718,7 +716,7 @@ var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
hexToInt(end, ucs2DataSize),
|
||||
hexToStr(charCode, dataSize)
|
||||
);
|
||||
for (i = 1; i < subitemsCount; i++) {
|
||||
for (let i = 1; i < subitemsCount; i++) {
|
||||
incHex(end, ucs2DataSize);
|
||||
if (!sequence) {
|
||||
stream.readHexNumber(start, ucs2DataSize);
|
||||
@ -751,10 +749,10 @@ var BinaryCMapReader = (function BinaryCMapReaderClosure() {
|
||||
return BinaryCMapReader;
|
||||
})();
|
||||
|
||||
var CMapFactory = (function CMapFactoryClosure() {
|
||||
const CMapFactory = (function CMapFactoryClosure() {
|
||||
function strToInt(str) {
|
||||
var a = 0;
|
||||
for (var i = 0; i < str.length; i++) {
|
||||
let a = 0;
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
a = (a << 8) | str.charCodeAt(i);
|
||||
}
|
||||
return a >>> 0;
|
||||
@ -774,7 +772,7 @@ var CMapFactory = (function CMapFactoryClosure() {
|
||||
|
||||
function parseBfChar(cMap, lexer) {
|
||||
while (true) {
|
||||
var obj = lexer.getObj();
|
||||
let obj = lexer.getObj();
|
||||
if (isEOF(obj)) {
|
||||
break;
|
||||
}
|
||||
@ -782,18 +780,18 @@ var CMapFactory = (function CMapFactoryClosure() {
|
||||
return;
|
||||
}
|
||||
expectString(obj);
|
||||
var src = strToInt(obj);
|
||||
const src = strToInt(obj);
|
||||
obj = lexer.getObj();
|
||||
// TODO are /dstName used?
|
||||
expectString(obj);
|
||||
var dst = obj;
|
||||
const dst = obj;
|
||||
cMap.mapOne(src, dst);
|
||||
}
|
||||
}
|
||||
|
||||
function parseBfRange(cMap, lexer) {
|
||||
while (true) {
|
||||
var obj = lexer.getObj();
|
||||
let obj = lexer.getObj();
|
||||
if (isEOF(obj)) {
|
||||
break;
|
||||
}
|
||||
@ -801,17 +799,17 @@ var CMapFactory = (function CMapFactoryClosure() {
|
||||
return;
|
||||
}
|
||||
expectString(obj);
|
||||
var low = strToInt(obj);
|
||||
const low = strToInt(obj);
|
||||
obj = lexer.getObj();
|
||||
expectString(obj);
|
||||
var high = strToInt(obj);
|
||||
const high = strToInt(obj);
|
||||
obj = lexer.getObj();
|
||||
if (Number.isInteger(obj) || isString(obj)) {
|
||||
var dstLow = Number.isInteger(obj) ? String.fromCharCode(obj) : obj;
|
||||
const dstLow = Number.isInteger(obj) ? String.fromCharCode(obj) : obj;
|
||||
cMap.mapBfRange(low, high, dstLow);
|
||||
} else if (isCmd(obj, "[")) {
|
||||
obj = lexer.getObj();
|
||||
var array = [];
|
||||
const array = [];
|
||||
while (!isCmd(obj, "]") && !isEOF(obj)) {
|
||||
array.push(obj);
|
||||
obj = lexer.getObj();
|
||||
@ -826,7 +824,7 @@ var CMapFactory = (function CMapFactoryClosure() {
|
||||
|
||||
function parseCidChar(cMap, lexer) {
|
||||
while (true) {
|
||||
var obj = lexer.getObj();
|
||||
let obj = lexer.getObj();
|
||||
if (isEOF(obj)) {
|
||||
break;
|
||||
}
|
||||
@ -834,17 +832,17 @@ var CMapFactory = (function CMapFactoryClosure() {
|
||||
return;
|
||||
}
|
||||
expectString(obj);
|
||||
var src = strToInt(obj);
|
||||
const src = strToInt(obj);
|
||||
obj = lexer.getObj();
|
||||
expectInt(obj);
|
||||
var dst = obj;
|
||||
const dst = obj;
|
||||
cMap.mapOne(src, dst);
|
||||
}
|
||||
}
|
||||
|
||||
function parseCidRange(cMap, lexer) {
|
||||
while (true) {
|
||||
var obj = lexer.getObj();
|
||||
let obj = lexer.getObj();
|
||||
if (isEOF(obj)) {
|
||||
break;
|
||||
}
|
||||
@ -852,20 +850,20 @@ var CMapFactory = (function CMapFactoryClosure() {
|
||||
return;
|
||||
}
|
||||
expectString(obj);
|
||||
var low = strToInt(obj);
|
||||
const low = strToInt(obj);
|
||||
obj = lexer.getObj();
|
||||
expectString(obj);
|
||||
var high = strToInt(obj);
|
||||
const high = strToInt(obj);
|
||||
obj = lexer.getObj();
|
||||
expectInt(obj);
|
||||
var dstLow = obj;
|
||||
const dstLow = obj;
|
||||
cMap.mapCidRange(low, high, dstLow);
|
||||
}
|
||||
}
|
||||
|
||||
function parseCodespaceRange(cMap, lexer) {
|
||||
while (true) {
|
||||
var obj = lexer.getObj();
|
||||
let obj = lexer.getObj();
|
||||
if (isEOF(obj)) {
|
||||
break;
|
||||
}
|
||||
@ -875,37 +873,36 @@ var CMapFactory = (function CMapFactoryClosure() {
|
||||
if (!isString(obj)) {
|
||||
break;
|
||||
}
|
||||
var low = strToInt(obj);
|
||||
const low = strToInt(obj);
|
||||
obj = lexer.getObj();
|
||||
if (!isString(obj)) {
|
||||
break;
|
||||
}
|
||||
var high = strToInt(obj);
|
||||
const high = strToInt(obj);
|
||||
cMap.addCodespaceRange(obj.length, low, high);
|
||||
}
|
||||
throw new FormatError("Invalid codespace range.");
|
||||
}
|
||||
|
||||
function parseWMode(cMap, lexer) {
|
||||
var obj = lexer.getObj();
|
||||
const obj = lexer.getObj();
|
||||
if (Number.isInteger(obj)) {
|
||||
cMap.vertical = !!obj;
|
||||
}
|
||||
}
|
||||
|
||||
function parseCMapName(cMap, lexer) {
|
||||
var obj = lexer.getObj();
|
||||
const obj = lexer.getObj();
|
||||
if (isName(obj) && isString(obj.name)) {
|
||||
cMap.name = obj.name;
|
||||
}
|
||||
}
|
||||
|
||||
async function parseCMap(cMap, lexer, fetchBuiltInCMap, useCMap) {
|
||||
var previous;
|
||||
var embeddedUseCMap;
|
||||
let previous, embeddedUseCMap;
|
||||
objLoop: while (true) {
|
||||
try {
|
||||
var obj = lexer.getObj();
|
||||
const obj = lexer.getObj();
|
||||
if (isEOF(obj)) {
|
||||
break;
|
||||
} else if (isName(obj)) {
|
||||
@ -966,8 +963,8 @@ var CMapFactory = (function CMapFactoryClosure() {
|
||||
// If there aren't any code space ranges defined clone all the parent ones
|
||||
// into this cMap.
|
||||
if (cMap.numCodespaceRanges === 0) {
|
||||
var useCodespaceRanges = cMap.useCMap.codespaceRanges;
|
||||
for (var i = 0; i < useCodespaceRanges.length; i++) {
|
||||
const useCodespaceRanges = cMap.useCMap.codespaceRanges;
|
||||
for (let i = 0; i < useCodespaceRanges.length; i++) {
|
||||
cMap.codespaceRanges[i] = useCodespaceRanges[i].slice();
|
||||
}
|
||||
cMap.numCodespaceRanges = cMap.useCMap.numCodespaceRanges;
|
||||
@ -997,7 +994,7 @@ var CMapFactory = (function CMapFactoryClosure() {
|
||||
}
|
||||
|
||||
const { cMapData, compressionType } = await fetchBuiltInCMap(name);
|
||||
var cMap = new CMap(true);
|
||||
const cMap = new CMap(true);
|
||||
|
||||
if (compressionType === CMapCompressionType.BINARY) {
|
||||
return new BinaryCMapReader().process(cMapData, cMap, useCMap => {
|
||||
@ -1005,7 +1002,7 @@ var CMapFactory = (function CMapFactoryClosure() {
|
||||
});
|
||||
}
|
||||
if (compressionType === CMapCompressionType.NONE) {
|
||||
var lexer = new Lexer(new Stream(cMapData));
|
||||
const lexer = new Lexer(new Stream(cMapData));
|
||||
return parseCMap(cMap, lexer, fetchBuiltInCMap, null);
|
||||
}
|
||||
throw new Error(
|
||||
@ -1015,9 +1012,9 @@ var CMapFactory = (function CMapFactoryClosure() {
|
||||
|
||||
return {
|
||||
async create(params) {
|
||||
var encoding = params.encoding;
|
||||
var fetchBuiltInCMap = params.fetchBuiltInCMap;
|
||||
var useCMap = params.useCMap;
|
||||
const encoding = params.encoding;
|
||||
const fetchBuiltInCMap = params.fetchBuiltInCMap;
|
||||
const useCMap = params.useCMap;
|
||||
|
||||
if (isName(encoding)) {
|
||||
return createBuiltInCMap(encoding.name, fetchBuiltInCMap);
|
||||
|
@ -12,7 +12,6 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
/* eslint-disable no-var */
|
||||
|
||||
import { assert, FormatError, ImageKind, info, warn } from "../shared/util.js";
|
||||
import { isName, isStream, Name } from "./primitives.js";
|
||||
@ -47,7 +46,7 @@ function decodeAndClamp(value, addend, coefficient, max) {
|
||||
* @returns {TypedArray} The resized image mask buffer.
|
||||
*/
|
||||
function resizeImageMask(src, bpc, w1, h1, w2, h2) {
|
||||
var length = w2 * h2;
|
||||
const length = w2 * h2;
|
||||
let dest;
|
||||
if (bpc <= 8) {
|
||||
dest = new Uint8Array(length);
|
||||
@ -56,15 +55,15 @@ function resizeImageMask(src, bpc, w1, h1, w2, h2) {
|
||||
} else {
|
||||
dest = new Uint32Array(length);
|
||||
}
|
||||
var xRatio = w1 / w2;
|
||||
var yRatio = h1 / h2;
|
||||
var i,
|
||||
const xRatio = w1 / w2;
|
||||
const yRatio = h1 / h2;
|
||||
let i,
|
||||
j,
|
||||
py,
|
||||
newIndex = 0,
|
||||
oldIndex;
|
||||
var xScaled = new Uint16Array(w2);
|
||||
var w1Scanline = w1;
|
||||
const xScaled = new Uint16Array(w2);
|
||||
const w1Scanline = w1;
|
||||
|
||||
for (i = 0; i < w2; i++) {
|
||||
xScaled[i] = Math.floor(i * xRatio);
|
||||
@ -92,13 +91,13 @@ class PDFImage {
|
||||
localColorSpaceCache,
|
||||
}) {
|
||||
this.image = image;
|
||||
var dict = image.dict;
|
||||
const dict = image.dict;
|
||||
|
||||
const filter = dict.get("Filter");
|
||||
if (isName(filter)) {
|
||||
switch (filter.name) {
|
||||
case "JPXDecode":
|
||||
var jpxImage = new JpxImage();
|
||||
const jpxImage = new JpxImage();
|
||||
jpxImage.parseImageProperties(image.stream);
|
||||
image.stream.reset();
|
||||
|
||||
@ -144,7 +143,7 @@ class PDFImage {
|
||||
this.imageMask = dict.get("ImageMask", "IM") || false;
|
||||
this.matte = dict.get("Matte") || false;
|
||||
|
||||
var bitsPerComponent = image.bitsPerComponent;
|
||||
let bitsPerComponent = image.bitsPerComponent;
|
||||
if (!bitsPerComponent) {
|
||||
bitsPerComponent = dict.get("BitsPerComponent", "BPC");
|
||||
if (!bitsPerComponent) {
|
||||
@ -201,13 +200,13 @@ class PDFImage {
|
||||
) {
|
||||
this.needsDecode = true;
|
||||
// Do some preprocessing to avoid more math.
|
||||
var max = (1 << bitsPerComponent) - 1;
|
||||
const max = (1 << bitsPerComponent) - 1;
|
||||
this.decodeCoefficients = [];
|
||||
this.decodeAddends = [];
|
||||
const isIndexed = this.colorSpace && this.colorSpace.name === "Indexed";
|
||||
for (var i = 0, j = 0; i < this.decode.length; i += 2, ++j) {
|
||||
var dmin = this.decode[i];
|
||||
var dmax = this.decode[i + 1];
|
||||
for (let i = 0, j = 0; i < this.decode.length; i += 2, ++j) {
|
||||
const dmin = this.decode[i];
|
||||
const dmax = this.decode[i + 1];
|
||||
this.decodeCoefficients[j] = isIndexed
|
||||
? (dmax - dmin) / max
|
||||
: dmax - dmin;
|
||||
@ -226,7 +225,7 @@ class PDFImage {
|
||||
});
|
||||
} else if (mask) {
|
||||
if (isStream(mask)) {
|
||||
var maskDict = mask.dict,
|
||||
const maskDict = mask.dict,
|
||||
imageMask = maskDict.get("ImageMask", "IM");
|
||||
if (!imageMask) {
|
||||
warn("Ignoring /Mask in image without /ImageMask.");
|
||||
@ -310,10 +309,10 @@ class PDFImage {
|
||||
// In particular, if inverseDecode is true, then the array we return must
|
||||
// have a length of |computedLength|.
|
||||
|
||||
var computedLength = ((width + 7) >> 3) * height;
|
||||
var actualLength = imgArray.byteLength;
|
||||
var haveFullData = computedLength === actualLength;
|
||||
var data, i;
|
||||
const computedLength = ((width + 7) >> 3) * height;
|
||||
const actualLength = imgArray.byteLength;
|
||||
const haveFullData = computedLength === actualLength;
|
||||
let data, i;
|
||||
|
||||
if (imageIsFromDecodeStream && (!inverseDecode || haveFullData)) {
|
||||
// imgArray came from a DecodeStream and its data is in an appropriate
|
||||
@ -360,13 +359,13 @@ class PDFImage {
|
||||
}
|
||||
|
||||
decodeBuffer(buffer) {
|
||||
var bpc = this.bpc;
|
||||
var numComps = this.numComps;
|
||||
const bpc = this.bpc;
|
||||
const numComps = this.numComps;
|
||||
|
||||
var decodeAddends = this.decodeAddends;
|
||||
var decodeCoefficients = this.decodeCoefficients;
|
||||
var max = (1 << bpc) - 1;
|
||||
var i, ii;
|
||||
const decodeAddends = this.decodeAddends;
|
||||
const decodeCoefficients = this.decodeCoefficients;
|
||||
const max = (1 << bpc) - 1;
|
||||
let i, ii;
|
||||
|
||||
if (bpc === 1) {
|
||||
// If the buffer needed decode that means it just needs to be inverted.
|
||||
@ -375,9 +374,9 @@ class PDFImage {
|
||||
}
|
||||
return;
|
||||
}
|
||||
var index = 0;
|
||||
let index = 0;
|
||||
for (i = 0, ii = this.width * this.height; i < ii; i++) {
|
||||
for (var j = 0; j < numComps; j++) {
|
||||
for (let j = 0; j < numComps; j++) {
|
||||
buffer[index] = decodeAndClamp(
|
||||
buffer[index],
|
||||
decodeAddends[j],
|
||||
@ -390,19 +389,19 @@ class PDFImage {
|
||||
}
|
||||
|
||||
getComponents(buffer) {
|
||||
var bpc = this.bpc;
|
||||
const bpc = this.bpc;
|
||||
|
||||
// This image doesn't require any extra work.
|
||||
if (bpc === 8) {
|
||||
return buffer;
|
||||
}
|
||||
|
||||
var width = this.width;
|
||||
var height = this.height;
|
||||
var numComps = this.numComps;
|
||||
const width = this.width;
|
||||
const height = this.height;
|
||||
const numComps = this.numComps;
|
||||
|
||||
var length = width * height * numComps;
|
||||
var bufferPos = 0;
|
||||
const length = width * height * numComps;
|
||||
let bufferPos = 0;
|
||||
let output;
|
||||
if (bpc <= 8) {
|
||||
output = new Uint8Array(length);
|
||||
@ -411,17 +410,17 @@ class PDFImage {
|
||||
} else {
|
||||
output = new Uint32Array(length);
|
||||
}
|
||||
var rowComps = width * numComps;
|
||||
const rowComps = width * numComps;
|
||||
|
||||
var max = (1 << bpc) - 1;
|
||||
var i = 0,
|
||||
const max = (1 << bpc) - 1;
|
||||
let i = 0,
|
||||
ii,
|
||||
buf;
|
||||
|
||||
if (bpc === 1) {
|
||||
// Optimization for reading 1 bpc images.
|
||||
var mask, loop1End, loop2End;
|
||||
for (var j = 0; j < height; j++) {
|
||||
let mask, loop1End, loop2End;
|
||||
for (let j = 0; j < height; j++) {
|
||||
loop1End = i + (rowComps & ~7);
|
||||
loop2End = i + rowComps;
|
||||
|
||||
@ -451,7 +450,7 @@ class PDFImage {
|
||||
}
|
||||
} else {
|
||||
// The general case that handles all other bpc values.
|
||||
var bits = 0;
|
||||
let bits = 0;
|
||||
buf = 0;
|
||||
for (i = 0, ii = length; i < ii; ++i) {
|
||||
if (i % rowComps === 0) {
|
||||
@ -464,7 +463,7 @@ class PDFImage {
|
||||
bits += 8;
|
||||
}
|
||||
|
||||
var remainingBits = bits - bpc;
|
||||
const remainingBits = bits - bpc;
|
||||
let value = buf >> remainingBits;
|
||||
if (value < 0) {
|
||||
value = 0;
|
||||
@ -489,9 +488,9 @@ class PDFImage {
|
||||
'PDFImage.fillOpacity: Unsupported "rgbaBuf" type.'
|
||||
);
|
||||
}
|
||||
var smask = this.smask;
|
||||
var mask = this.mask;
|
||||
var alphaBuf, sw, sh, i, ii, j;
|
||||
const smask = this.smask;
|
||||
const mask = this.mask;
|
||||
let alphaBuf, sw, sh, i, ii, j;
|
||||
|
||||
if (smask) {
|
||||
sw = smask.width;
|
||||
@ -521,13 +520,13 @@ class PDFImage {
|
||||
// Color key mask: if any of the components are outside the range
|
||||
// then they should be painted.
|
||||
alphaBuf = new Uint8ClampedArray(width * height);
|
||||
var numComps = this.numComps;
|
||||
const numComps = this.numComps;
|
||||
for (i = 0, ii = width * height; i < ii; ++i) {
|
||||
var opacity = 0;
|
||||
var imageOffset = i * numComps;
|
||||
let opacity = 0;
|
||||
const imageOffset = i * numComps;
|
||||
for (j = 0; j < numComps; ++j) {
|
||||
var color = image[imageOffset + j];
|
||||
var maskOffset = j * 2;
|
||||
const color = image[imageOffset + j];
|
||||
const maskOffset = j * 2;
|
||||
if (color < mask[maskOffset] || color > mask[maskOffset + 1]) {
|
||||
opacity = 255;
|
||||
break;
|
||||
@ -562,17 +561,17 @@ class PDFImage {
|
||||
'PDFImage.undoPreblend: Unsupported "buffer" type.'
|
||||
);
|
||||
}
|
||||
var matte = this.smask && this.smask.matte;
|
||||
const matte = this.smask && this.smask.matte;
|
||||
if (!matte) {
|
||||
return;
|
||||
}
|
||||
var matteRgb = this.colorSpace.getRgb(matte, 0);
|
||||
var matteR = matteRgb[0];
|
||||
var matteG = matteRgb[1];
|
||||
var matteB = matteRgb[2];
|
||||
var length = width * height * 4;
|
||||
for (var i = 0; i < length; i += 4) {
|
||||
var alpha = buffer[i + 3];
|
||||
const matteRgb = this.colorSpace.getRgb(matte, 0);
|
||||
const matteR = matteRgb[0];
|
||||
const matteG = matteRgb[1];
|
||||
const matteB = matteRgb[2];
|
||||
const length = width * height * 4;
|
||||
for (let i = 0; i < length; i += 4) {
|
||||
const alpha = buffer[i + 3];
|
||||
if (alpha === 0) {
|
||||
// according formula we have to get Infinity in all components
|
||||
// making it white (typical paper color) should be okay
|
||||
@ -581,7 +580,7 @@ class PDFImage {
|
||||
buffer[i + 2] = 255;
|
||||
continue;
|
||||
}
|
||||
var k = 255 / alpha;
|
||||
const k = 255 / alpha;
|
||||
buffer[i] = (buffer[i] - matteR) * k + matteR;
|
||||
buffer[i + 1] = (buffer[i + 1] - matteG) * k + matteG;
|
||||
buffer[i + 2] = (buffer[i + 2] - matteB) * k + matteB;
|
||||
@ -589,9 +588,9 @@ class PDFImage {
|
||||
}
|
||||
|
||||
createImageData(forceRGBA = false) {
|
||||
var drawWidth = this.drawWidth;
|
||||
var drawHeight = this.drawHeight;
|
||||
var imgData = {
|
||||
const drawWidth = this.drawWidth;
|
||||
const drawHeight = this.drawHeight;
|
||||
const imgData = {
|
||||
width: drawWidth,
|
||||
height: drawHeight,
|
||||
kind: 0,
|
||||
@ -599,14 +598,14 @@ class PDFImage {
|
||||
// Other fields are filled in below.
|
||||
};
|
||||
|
||||
var numComps = this.numComps;
|
||||
var originalWidth = this.width;
|
||||
var originalHeight = this.height;
|
||||
var bpc = this.bpc;
|
||||
const numComps = this.numComps;
|
||||
const originalWidth = this.width;
|
||||
const originalHeight = this.height;
|
||||
const bpc = this.bpc;
|
||||
|
||||
// Rows start at byte boundary.
|
||||
var rowBytes = (originalWidth * numComps * bpc + 7) >> 3;
|
||||
var imgArray;
|
||||
const rowBytes = (originalWidth * numComps * bpc + 7) >> 3;
|
||||
let imgArray;
|
||||
|
||||
if (!forceRGBA) {
|
||||
// If it is a 1-bit-per-pixel grayscale (i.e. black-and-white) image
|
||||
@ -616,7 +615,7 @@ class PDFImage {
|
||||
//
|
||||
// Similarly, if it is a 24-bit-per pixel RGB image without any
|
||||
// complications, we avoid expanding by 1.333x to RGBA form.
|
||||
var kind;
|
||||
let kind;
|
||||
if (this.colorSpace.name === "DeviceGray" && bpc === 1) {
|
||||
kind = ImageKind.GRAYSCALE_1BPP;
|
||||
} else if (
|
||||
@ -644,7 +643,7 @@ class PDFImage {
|
||||
if (this.image instanceof DecodeStream) {
|
||||
imgData.data = imgArray;
|
||||
} else {
|
||||
var newArray = new Uint8ClampedArray(imgArray.length);
|
||||
const newArray = new Uint8ClampedArray(imgArray.length);
|
||||
newArray.set(imgArray);
|
||||
imgData.data = newArray;
|
||||
}
|
||||
@ -654,8 +653,8 @@ class PDFImage {
|
||||
kind === ImageKind.GRAYSCALE_1BPP,
|
||||
"PDFImage.createImageData: The image must be grayscale."
|
||||
);
|
||||
var buffer = imgData.data;
|
||||
for (var i = 0, ii = buffer.length; i < ii; i++) {
|
||||
const buffer = imgData.data;
|
||||
for (let i = 0, ii = buffer.length; i < ii; i++) {
|
||||
buffer[i] ^= 0xff;
|
||||
}
|
||||
}
|
||||
@ -685,14 +684,14 @@ class PDFImage {
|
||||
|
||||
imgArray = this.getImageBytes(originalHeight * rowBytes);
|
||||
// imgArray can be incomplete (e.g. after CCITT fax encoding).
|
||||
var actualHeight =
|
||||
const actualHeight =
|
||||
0 | (((imgArray.length / rowBytes) * drawHeight) / originalHeight);
|
||||
|
||||
var comps = this.getComponents(imgArray);
|
||||
const comps = this.getComponents(imgArray);
|
||||
|
||||
// If opacity data is present, use RGBA_32BPP form. Otherwise, use the
|
||||
// more compact RGB_24BPP form if allowable.
|
||||
var alpha01, maybeUndoPreblend;
|
||||
let alpha01, maybeUndoPreblend;
|
||||
if (!forceRGBA && !this.smask && !this.mask) {
|
||||
imgData.kind = ImageKind.RGB_24BPP;
|
||||
imgData.data = new Uint8ClampedArray(drawWidth * drawHeight * 3);
|
||||
@ -745,23 +744,23 @@ class PDFImage {
|
||||
'PDFImage.fillGrayBuffer: Unsupported "buffer" type.'
|
||||
);
|
||||
}
|
||||
var numComps = this.numComps;
|
||||
const numComps = this.numComps;
|
||||
if (numComps !== 1) {
|
||||
throw new FormatError(
|
||||
`Reading gray scale from a color image: ${numComps}`
|
||||
);
|
||||
}
|
||||
|
||||
var width = this.width;
|
||||
var height = this.height;
|
||||
var bpc = this.bpc;
|
||||
const width = this.width;
|
||||
const height = this.height;
|
||||
const bpc = this.bpc;
|
||||
|
||||
// rows start at byte boundary
|
||||
var rowBytes = (width * numComps * bpc + 7) >> 3;
|
||||
var imgArray = this.getImageBytes(height * rowBytes);
|
||||
const rowBytes = (width * numComps * bpc + 7) >> 3;
|
||||
const imgArray = this.getImageBytes(height * rowBytes);
|
||||
|
||||
var comps = this.getComponents(imgArray);
|
||||
var i, length;
|
||||
const comps = this.getComponents(imgArray);
|
||||
let i, length;
|
||||
|
||||
if (bpc === 1) {
|
||||
// inline decoding (= inversion) for 1 bpc images
|
||||
@ -785,7 +784,7 @@ class PDFImage {
|
||||
}
|
||||
length = width * height;
|
||||
// we aren't using a colorspace so we need to scale the value
|
||||
var scale = 255 / ((1 << bpc) - 1);
|
||||
const scale = 255 / ((1 << bpc) - 1);
|
||||
for (i = 0; i < length; ++i) {
|
||||
buffer[i] = scale * comps[i];
|
||||
}
|
||||
|
@ -12,7 +12,6 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
/* eslint-disable no-var */
|
||||
|
||||
import {
|
||||
AbortException,
|
||||
@ -69,7 +68,7 @@ class WorkerTask {
|
||||
|
||||
class WorkerMessageHandler {
|
||||
static setup(handler, port) {
|
||||
var testMessageProcessed = false;
|
||||
let testMessageProcessed = false;
|
||||
handler.on("test", function wphSetupTest(data) {
|
||||
if (testMessageProcessed) {
|
||||
return; // we already processed 'test' message once
|
||||
@ -100,10 +99,10 @@ class WorkerMessageHandler {
|
||||
static createDocumentHandler(docParams, port) {
|
||||
// This context is actually holds references on pdfManager and handler,
|
||||
// until the latter is destroyed.
|
||||
var pdfManager;
|
||||
var terminated = false;
|
||||
var cancelXHRs = null;
|
||||
var WorkerTasks = [];
|
||||
let pdfManager;
|
||||
let terminated = false;
|
||||
let cancelXHRs = null;
|
||||
const WorkerTasks = [];
|
||||
const verbosity = getVerbosityLevel();
|
||||
|
||||
const apiVersion = docParams.apiVersion;
|
||||
@ -152,10 +151,10 @@ class WorkerMessageHandler {
|
||||
}
|
||||
}
|
||||
|
||||
var docId = docParams.docId;
|
||||
var docBaseUrl = docParams.docBaseUrl;
|
||||
var workerHandlerName = docParams.docId + "_worker";
|
||||
var handler = new MessageHandler(workerHandlerName, docId, port);
|
||||
const docId = docParams.docId;
|
||||
const docBaseUrl = docParams.docBaseUrl;
|
||||
const workerHandlerName = docParams.docId + "_worker";
|
||||
let handler = new MessageHandler(workerHandlerName, docId, port);
|
||||
|
||||
// Ensure that postMessage transfers are always correctly enabled/disabled,
|
||||
// to prevent "DataCloneError" in browsers without transfers support.
|
||||
@ -173,7 +172,7 @@ class WorkerMessageHandler {
|
||||
|
||||
function finishWorkerTask(task) {
|
||||
task.finish();
|
||||
var i = WorkerTasks.indexOf(task);
|
||||
const i = WorkerTasks.indexOf(task);
|
||||
WorkerTasks.splice(i, 1);
|
||||
}
|
||||
|
||||
@ -208,10 +207,10 @@ class WorkerMessageHandler {
|
||||
}
|
||||
|
||||
function getPdfManager(data, evaluatorOptions, enableXfa) {
|
||||
var pdfManagerCapability = createPromiseCapability();
|
||||
const pdfManagerCapability = createPromiseCapability();
|
||||
let newPdfManager;
|
||||
|
||||
var source = data.source;
|
||||
const source = data.source;
|
||||
if (source.data) {
|
||||
try {
|
||||
newPdfManager = new LocalPdfManager(
|
||||
@ -229,7 +228,7 @@ class WorkerMessageHandler {
|
||||
return pdfManagerCapability.promise;
|
||||
}
|
||||
|
||||
var pdfStream,
|
||||
let pdfStream,
|
||||
cachedChunks = [];
|
||||
try {
|
||||
pdfStream = new PDFWorkerStream(handler);
|
||||
@ -238,7 +237,7 @@ class WorkerMessageHandler {
|
||||
return pdfManagerCapability.promise;
|
||||
}
|
||||
|
||||
var fullRequest = pdfStream.getFullReader();
|
||||
const fullRequest = pdfStream.getFullReader();
|
||||
fullRequest.headersReady
|
||||
.then(function () {
|
||||
if (!fullRequest.isRangeSupported) {
|
||||
@ -246,7 +245,7 @@ class WorkerMessageHandler {
|
||||
}
|
||||
|
||||
// We don't need auto-fetch when streaming is enabled.
|
||||
var disableAutoFetch =
|
||||
const disableAutoFetch =
|
||||
source.disableAutoFetch || fullRequest.isStreamingSupported;
|
||||
newPdfManager = new NetworkPdfManager(
|
||||
docId,
|
||||
@ -278,9 +277,9 @@ class WorkerMessageHandler {
|
||||
cancelXHRs = null;
|
||||
});
|
||||
|
||||
var loaded = 0;
|
||||
var flushChunks = function () {
|
||||
var pdfFile = arraysToBytes(cachedChunks);
|
||||
let loaded = 0;
|
||||
const flushChunks = function () {
|
||||
const pdfFile = arraysToBytes(cachedChunks);
|
||||
if (source.length && pdfFile.length !== source.length) {
|
||||
warn("reported HTTP length is different from actual");
|
||||
}
|
||||
@ -300,8 +299,8 @@ class WorkerMessageHandler {
|
||||
}
|
||||
cachedChunks = [];
|
||||
};
|
||||
var readPromise = new Promise(function (resolve, reject) {
|
||||
var readChunk = function ({ value, done }) {
|
||||
const readPromise = new Promise(function (resolve, reject) {
|
||||
const readChunk = function ({ value, done }) {
|
||||
try {
|
||||
ensureNotTerminated();
|
||||
if (done) {
|
||||
@ -355,7 +354,7 @@ class WorkerMessageHandler {
|
||||
ensureNotTerminated();
|
||||
|
||||
if (ex instanceof PasswordException) {
|
||||
var task = new WorkerTask(`PasswordException: response ${ex.code}`);
|
||||
const task = new WorkerTask(`PasswordException: response ${ex.code}`);
|
||||
startWorkerTask(task);
|
||||
|
||||
handler
|
||||
@ -406,7 +405,7 @@ class WorkerMessageHandler {
|
||||
|
||||
ensureNotTerminated();
|
||||
|
||||
var evaluatorOptions = {
|
||||
const evaluatorOptions = {
|
||||
maxImageSize: data.maxImageSize,
|
||||
disableFontFace: data.disableFontFace,
|
||||
ignoreErrors: data.ignoreErrors,
|
||||
@ -656,9 +655,9 @@ class WorkerMessageHandler {
|
||||
);
|
||||
|
||||
handler.on("GetOperatorList", function wphSetupRenderPage(data, sink) {
|
||||
var pageIndex = data.pageIndex;
|
||||
const pageIndex = data.pageIndex;
|
||||
pdfManager.getPage(pageIndex).then(function (page) {
|
||||
var task = new WorkerTask(`GetOperatorList: page ${pageIndex}`);
|
||||
const task = new WorkerTask(`GetOperatorList: page ${pageIndex}`);
|
||||
startWorkerTask(task);
|
||||
|
||||
// NOTE: Keep this condition in sync with the `info` helper function.
|
||||
@ -707,12 +706,12 @@ class WorkerMessageHandler {
|
||||
});
|
||||
|
||||
handler.on("GetTextContent", function wphExtractText(data, sink) {
|
||||
var pageIndex = data.pageIndex;
|
||||
const pageIndex = data.pageIndex;
|
||||
sink.onPull = function (desiredSize) {};
|
||||
sink.onCancel = function (reason) {};
|
||||
|
||||
pdfManager.getPage(pageIndex).then(function (page) {
|
||||
var task = new WorkerTask("GetTextContent: page " + pageIndex);
|
||||
const task = new WorkerTask("GetTextContent: page " + pageIndex);
|
||||
startWorkerTask(task);
|
||||
|
||||
// NOTE: Keep this condition in sync with the `info` helper function.
|
||||
@ -806,7 +805,7 @@ class WorkerMessageHandler {
|
||||
}
|
||||
|
||||
static initializeFromPort(port) {
|
||||
var handler = new MessageHandler("worker", "main", port);
|
||||
const handler = new MessageHandler("worker", "main", port);
|
||||
WorkerMessageHandler.setup(handler, port);
|
||||
handler.send("ready", null);
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user