Remove variable shadowing from the JavaScript files in the src/core/
folder
*This is part of a series of patches that will try to split PR 11566 into smaller chunks, to make reviewing more feasible.* Once all the code has been fixed, we'll be able to eventually enable the ESLint no-shadow rule; see https://eslint.org/docs/rules/no-shadow
This commit is contained in:
parent
b86df97725
commit
216cbca16c
@ -519,15 +519,15 @@ class ChunkedStreamManager {
|
||||
}
|
||||
|
||||
const loadedRequests = [];
|
||||
for (let chunk = beginChunk; chunk < endChunk; ++chunk) {
|
||||
for (let curChunk = beginChunk; curChunk < endChunk; ++curChunk) {
|
||||
// The server might return more chunks than requested.
|
||||
const requestIds = this.requestsByChunk[chunk] || [];
|
||||
delete this.requestsByChunk[chunk];
|
||||
const requestIds = this.requestsByChunk[curChunk] || [];
|
||||
delete this.requestsByChunk[curChunk];
|
||||
|
||||
for (const requestId of requestIds) {
|
||||
const chunksNeeded = this.chunksNeededByRequest[requestId];
|
||||
if (chunk in chunksNeeded) {
|
||||
delete chunksNeeded[chunk];
|
||||
if (curChunk in chunksNeeded) {
|
||||
delete chunksNeeded[curChunk];
|
||||
}
|
||||
|
||||
if (!isEmptyObj(chunksNeeded)) {
|
||||
|
@ -216,8 +216,8 @@ class Page {
|
||||
// Fetching the individual streams from the array.
|
||||
const xref = this.xref;
|
||||
const streams = [];
|
||||
for (const stream of content) {
|
||||
streams.push(xref.fetchIfRef(stream));
|
||||
for (const subStream of content) {
|
||||
streams.push(xref.fetchIfRef(subStream));
|
||||
}
|
||||
stream = new StreamsSequenceStream(streams);
|
||||
} else if (isStream(content)) {
|
||||
|
@ -629,7 +629,7 @@ var PartialEvaluator = (function PartialEvaluatorClosure() {
|
||||
pdfFunctionFactory: this.pdfFunctionFactory,
|
||||
})
|
||||
.then(imageObj => {
|
||||
var imgData = imageObj.createImageData(/* forceRGBA = */ false);
|
||||
imgData = imageObj.createImageData(/* forceRGBA = */ false);
|
||||
|
||||
if (this.parsingType3Font) {
|
||||
return this.handler.sendWithPromise(
|
||||
@ -2479,16 +2479,16 @@ var PartialEvaluator = (function PartialEvaluatorClosure() {
|
||||
properties.hasEncoding = !!baseEncodingName || differences.length > 0;
|
||||
properties.dict = dict;
|
||||
return toUnicodePromise
|
||||
.then(toUnicode => {
|
||||
properties.toUnicode = toUnicode;
|
||||
.then(readToUnicode => {
|
||||
properties.toUnicode = readToUnicode;
|
||||
return this.buildToUnicode(properties);
|
||||
})
|
||||
.then(toUnicode => {
|
||||
properties.toUnicode = toUnicode;
|
||||
.then(builtToUnicode => {
|
||||
properties.toUnicode = builtToUnicode;
|
||||
if (cidToGidBytes) {
|
||||
properties.cidToGidMap = this.readCidToGidMap(
|
||||
cidToGidBytes,
|
||||
toUnicode
|
||||
builtToUnicode
|
||||
);
|
||||
}
|
||||
return properties;
|
||||
@ -3092,21 +3092,21 @@ var PartialEvaluator = (function PartialEvaluatorClosure() {
|
||||
};
|
||||
const widths = dict.get("Widths");
|
||||
return this.extractDataStructures(dict, dict, properties).then(
|
||||
properties => {
|
||||
newProperties => {
|
||||
if (widths) {
|
||||
const glyphWidths = [];
|
||||
let j = firstChar;
|
||||
for (let i = 0, ii = widths.length; i < ii; i++) {
|
||||
glyphWidths[j++] = this.xref.fetchIfRef(widths[i]);
|
||||
}
|
||||
properties.widths = glyphWidths;
|
||||
newProperties.widths = glyphWidths;
|
||||
} else {
|
||||
properties.widths = this.buildCharCodeToWidth(
|
||||
newProperties.widths = this.buildCharCodeToWidth(
|
||||
metrics.widths,
|
||||
properties
|
||||
newProperties
|
||||
);
|
||||
}
|
||||
return new Font(baseFontName, null, properties);
|
||||
return new Font(baseFontName, null, newProperties);
|
||||
}
|
||||
);
|
||||
}
|
||||
@ -3212,13 +3212,13 @@ var PartialEvaluator = (function PartialEvaluatorClosure() {
|
||||
.then(() => {
|
||||
return this.extractDataStructures(dict, baseDict, properties);
|
||||
})
|
||||
.then(properties => {
|
||||
this.extractWidths(dict, descriptor, properties);
|
||||
.then(newProperties => {
|
||||
this.extractWidths(dict, descriptor, newProperties);
|
||||
|
||||
if (type === "Type3") {
|
||||
properties.isType3Font = true;
|
||||
newProperties.isType3Font = true;
|
||||
}
|
||||
return new Font(fontName.name, fontFile, properties);
|
||||
return new Font(fontName.name, fontFile, newProperties);
|
||||
});
|
||||
},
|
||||
};
|
||||
@ -3352,8 +3352,8 @@ var TranslatedFont = (function TranslatedFontClosure() {
|
||||
})
|
||||
.catch(function(reason) {
|
||||
warn(`Type3 font resource "${key}" is not available.`);
|
||||
var operatorList = new OperatorList();
|
||||
charProcOperatorList[key] = operatorList.getIR();
|
||||
const dummyOperatorList = new OperatorList();
|
||||
charProcOperatorList[key] = dummyOperatorList.getIR();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
@ -345,12 +345,7 @@ var FontRendererFactory = (function FontRendererFactoryClosure() {
|
||||
}
|
||||
}
|
||||
|
||||
function compileCharString(code, cmds, font, glyphId) {
|
||||
var stack = [];
|
||||
var x = 0,
|
||||
y = 0;
|
||||
var stems = 0;
|
||||
|
||||
function compileCharString(charStringCode, cmds, font, glyphId) {
|
||||
function moveTo(x, y) {
|
||||
cmds.push({ cmd: "moveTo", args: [x, y] });
|
||||
}
|
||||
@ -361,6 +356,11 @@ var FontRendererFactory = (function FontRendererFactoryClosure() {
|
||||
cmds.push({ cmd: "bezierCurveTo", args: [x1, y1, x2, y2, x, y] });
|
||||
}
|
||||
|
||||
var stack = [];
|
||||
var x = 0,
|
||||
y = 0;
|
||||
var stems = 0;
|
||||
|
||||
function parse(code) {
|
||||
var i = 0;
|
||||
while (i < code.length) {
|
||||
@ -719,7 +719,7 @@ var FontRendererFactory = (function FontRendererFactoryClosure() {
|
||||
}
|
||||
}
|
||||
}
|
||||
parse(code);
|
||||
parse(charStringCode);
|
||||
}
|
||||
|
||||
const NOOP = [];
|
||||
|
@ -1269,7 +1269,6 @@ var Font = (function FontClosure() {
|
||||
|
||||
fallbackToSystemFont: function Font_fallbackToSystemFont() {
|
||||
this.missingFile = true;
|
||||
var charCode, unicode;
|
||||
// The file data is not specified. Trying to fix the font name
|
||||
// to be used with the canvas.font.
|
||||
var name = this.name;
|
||||
@ -1303,17 +1302,17 @@ var Font = (function FontClosure() {
|
||||
// Standard fonts might be embedded as CID font without glyph mapping.
|
||||
// Building one based on GlyphMapForStandardFonts.
|
||||
const map = [];
|
||||
for (charCode in GlyphMapForStandardFonts) {
|
||||
for (const charCode in GlyphMapForStandardFonts) {
|
||||
map[+charCode] = GlyphMapForStandardFonts[charCode];
|
||||
}
|
||||
if (/Arial-?Black/i.test(name)) {
|
||||
var SupplementalGlyphMapForArialBlack = getSupplementalGlyphMapForArialBlack();
|
||||
for (charCode in SupplementalGlyphMapForArialBlack) {
|
||||
for (const charCode in SupplementalGlyphMapForArialBlack) {
|
||||
map[+charCode] = SupplementalGlyphMapForArialBlack[charCode];
|
||||
}
|
||||
} else if (/Calibri/i.test(name)) {
|
||||
const SupplementalGlyphMapForCalibri = getSupplementalGlyphMapForCalibri();
|
||||
for (charCode in SupplementalGlyphMapForCalibri) {
|
||||
for (const charCode in SupplementalGlyphMapForCalibri) {
|
||||
map[+charCode] = SupplementalGlyphMapForCalibri[charCode];
|
||||
}
|
||||
}
|
||||
@ -1354,7 +1353,7 @@ var Font = (function FontClosure() {
|
||||
if (!this.composite) {
|
||||
var glyphName =
|
||||
this.differences[charCode] || this.defaultEncoding[charCode];
|
||||
unicode = getUnicodeForGlyph(glyphName, glyphsUnicodeMap);
|
||||
const unicode = getUnicodeForGlyph(glyphName, glyphsUnicodeMap);
|
||||
if (unicode !== -1) {
|
||||
unicodeCharCode = unicode;
|
||||
}
|
||||
@ -1368,7 +1367,7 @@ var Font = (function FontClosure() {
|
||||
if (/Verdana/i.test(name)) {
|
||||
// Fixes issue11242_reduced.pdf
|
||||
const GlyphMapForStandardFonts = getGlyphMapForStandardFonts();
|
||||
for (charCode in GlyphMapForStandardFonts) {
|
||||
for (const charCode in GlyphMapForStandardFonts) {
|
||||
map[+charCode] = GlyphMapForStandardFonts[charCode];
|
||||
}
|
||||
}
|
||||
@ -1409,7 +1408,7 @@ var Font = (function FontClosure() {
|
||||
tables["post"] = null;
|
||||
|
||||
for (let i = 0; i < numTables; i++) {
|
||||
const table = readTableEntry(font);
|
||||
const table = readTableEntry(file);
|
||||
if (!VALID_TABLES.includes(table.tag)) {
|
||||
continue; // skipping table if it's not a required or optional table
|
||||
}
|
||||
@ -1529,7 +1528,7 @@ var Font = (function FontClosure() {
|
||||
* Read the appropriate subtable from the cmap according to 9.6.6.4 from
|
||||
* PDF spec
|
||||
*/
|
||||
function readCmapTable(cmap, font, isSymbolicFont, hasEncoding) {
|
||||
function readCmapTable(cmap, file, isSymbolicFont, hasEncoding) {
|
||||
if (!cmap) {
|
||||
warn("No cmap table available.");
|
||||
return {
|
||||
@ -1540,11 +1539,11 @@ var Font = (function FontClosure() {
|
||||
};
|
||||
}
|
||||
var segment;
|
||||
var start = (font.start ? font.start : 0) + cmap.offset;
|
||||
font.pos = start;
|
||||
var start = (file.start ? file.start : 0) + cmap.offset;
|
||||
file.pos = start;
|
||||
|
||||
font.getUint16(); // version
|
||||
var numTables = font.getUint16();
|
||||
file.getUint16(); // version
|
||||
var numTables = file.getUint16();
|
||||
|
||||
var potentialTable;
|
||||
var canBreak = false;
|
||||
@ -1555,9 +1554,9 @@ var Font = (function FontClosure() {
|
||||
// The following takes advantage of the fact that the tables are sorted
|
||||
// to work.
|
||||
for (var i = 0; i < numTables; i++) {
|
||||
var platformId = font.getUint16();
|
||||
var encodingId = font.getUint16();
|
||||
var offset = font.getInt32() >>> 0;
|
||||
var platformId = file.getUint16();
|
||||
var encodingId = file.getUint16();
|
||||
var offset = file.getInt32() >>> 0;
|
||||
var useTable = false;
|
||||
|
||||
// Sometimes there are multiple of the same type of table. Default
|
||||
@ -1605,9 +1604,9 @@ var Font = (function FontClosure() {
|
||||
}
|
||||
|
||||
if (potentialTable) {
|
||||
font.pos = start + potentialTable.offset;
|
||||
file.pos = start + potentialTable.offset;
|
||||
}
|
||||
if (!potentialTable || font.peekByte() === -1) {
|
||||
if (!potentialTable || file.peekByte() === -1) {
|
||||
warn("Could not find a preferred cmap table.");
|
||||
return {
|
||||
platformId: -1,
|
||||
@ -1617,9 +1616,9 @@ var Font = (function FontClosure() {
|
||||
};
|
||||
}
|
||||
|
||||
var format = font.getUint16();
|
||||
font.getUint16(); // length
|
||||
font.getUint16(); // language
|
||||
var format = file.getUint16();
|
||||
file.getUint16(); // length
|
||||
file.getUint16(); // language
|
||||
|
||||
var hasShortCmap = false;
|
||||
var mappings = [];
|
||||
@ -1628,7 +1627,7 @@ var Font = (function FontClosure() {
|
||||
// TODO(mack): refactor this cmap subtable reading logic out
|
||||
if (format === 0) {
|
||||
for (j = 0; j < 256; j++) {
|
||||
var index = font.getByte();
|
||||
var index = file.getByte();
|
||||
if (!index) {
|
||||
continue;
|
||||
}
|
||||
@ -1641,26 +1640,26 @@ var Font = (function FontClosure() {
|
||||
} else if (format === 4) {
|
||||
// re-creating the table in format 4 since the encoding
|
||||
// might be changed
|
||||
var segCount = font.getUint16() >> 1;
|
||||
font.getBytes(6); // skipping range fields
|
||||
var segCount = file.getUint16() >> 1;
|
||||
file.getBytes(6); // skipping range fields
|
||||
var segIndex,
|
||||
segments = [];
|
||||
for (segIndex = 0; segIndex < segCount; segIndex++) {
|
||||
segments.push({ end: font.getUint16() });
|
||||
segments.push({ end: file.getUint16() });
|
||||
}
|
||||
font.getUint16();
|
||||
file.getUint16();
|
||||
for (segIndex = 0; segIndex < segCount; segIndex++) {
|
||||
segments[segIndex].start = font.getUint16();
|
||||
segments[segIndex].start = file.getUint16();
|
||||
}
|
||||
|
||||
for (segIndex = 0; segIndex < segCount; segIndex++) {
|
||||
segments[segIndex].delta = font.getUint16();
|
||||
segments[segIndex].delta = file.getUint16();
|
||||
}
|
||||
|
||||
var offsetsCount = 0;
|
||||
for (segIndex = 0; segIndex < segCount; segIndex++) {
|
||||
segment = segments[segIndex];
|
||||
var rangeOffset = font.getUint16();
|
||||
var rangeOffset = file.getUint16();
|
||||
if (!rangeOffset) {
|
||||
segment.offsetIndex = -1;
|
||||
continue;
|
||||
@ -1676,7 +1675,7 @@ var Font = (function FontClosure() {
|
||||
|
||||
var offsets = [];
|
||||
for (j = 0; j < offsetsCount; j++) {
|
||||
offsets.push(font.getUint16());
|
||||
offsets.push(file.getUint16());
|
||||
}
|
||||
|
||||
for (segIndex = 0; segIndex < segCount; segIndex++) {
|
||||
@ -1705,11 +1704,11 @@ var Font = (function FontClosure() {
|
||||
// table. (This looks weird, so I can have missed something), this
|
||||
// works on Linux but seems to fails on Mac so let's rewrite the
|
||||
// cmap table to a 3-1-4 style
|
||||
var firstCode = font.getUint16();
|
||||
var entryCount = font.getUint16();
|
||||
var firstCode = file.getUint16();
|
||||
var entryCount = file.getUint16();
|
||||
|
||||
for (j = 0; j < entryCount; j++) {
|
||||
glyphId = font.getUint16();
|
||||
glyphId = file.getUint16();
|
||||
var charCode = firstCode + j;
|
||||
|
||||
mappings.push({
|
||||
@ -1747,7 +1746,7 @@ var Font = (function FontClosure() {
|
||||
}
|
||||
|
||||
function sanitizeMetrics(
|
||||
font,
|
||||
file,
|
||||
header,
|
||||
metrics,
|
||||
numGlyphs,
|
||||
@ -1760,21 +1759,21 @@ var Font = (function FontClosure() {
|
||||
return;
|
||||
}
|
||||
|
||||
font.pos = (font.start ? font.start : 0) + header.offset;
|
||||
font.pos += 4; // version
|
||||
font.pos += 2; // ascent
|
||||
font.pos += 2; // descent
|
||||
font.pos += 2; // linegap
|
||||
font.pos += 2; // adv_width_max
|
||||
font.pos += 2; // min_sb1
|
||||
font.pos += 2; // min_sb2
|
||||
font.pos += 2; // max_extent
|
||||
font.pos += 2; // caret_slope_rise
|
||||
font.pos += 2; // caret_slope_run
|
||||
font.pos += 2; // caret_offset
|
||||
font.pos += 8; // reserved
|
||||
font.pos += 2; // format
|
||||
var numOfMetrics = font.getUint16();
|
||||
file.pos = (file.start ? file.start : 0) + header.offset;
|
||||
file.pos += 4; // version
|
||||
file.pos += 2; // ascent
|
||||
file.pos += 2; // descent
|
||||
file.pos += 2; // linegap
|
||||
file.pos += 2; // adv_width_max
|
||||
file.pos += 2; // min_sb1
|
||||
file.pos += 2; // min_sb2
|
||||
file.pos += 2; // max_extent
|
||||
file.pos += 2; // caret_slope_rise
|
||||
file.pos += 2; // caret_slope_run
|
||||
file.pos += 2; // caret_offset
|
||||
file.pos += 8; // reserved
|
||||
file.pos += 2; // format
|
||||
var numOfMetrics = file.getUint16();
|
||||
|
||||
if (numOfMetrics > numGlyphs) {
|
||||
info(
|
||||
@ -2107,7 +2106,7 @@ var Font = (function FontClosure() {
|
||||
};
|
||||
}
|
||||
|
||||
function readPostScriptTable(post, properties, maxpNumGlyphs) {
|
||||
function readPostScriptTable(post, propertiesObj, maxpNumGlyphs) {
|
||||
var start = (font.start ? font.start : 0) + post.offset;
|
||||
font.pos = start;
|
||||
|
||||
@ -2168,12 +2167,12 @@ var Font = (function FontClosure() {
|
||||
default:
|
||||
warn("Unknown/unsupported post table version " + version);
|
||||
valid = false;
|
||||
if (properties.defaultEncoding) {
|
||||
glyphNames = properties.defaultEncoding;
|
||||
if (propertiesObj.defaultEncoding) {
|
||||
glyphNames = propertiesObj.defaultEncoding;
|
||||
}
|
||||
break;
|
||||
}
|
||||
properties.glyphNames = glyphNames;
|
||||
propertiesObj.glyphNames = glyphNames;
|
||||
return valid;
|
||||
}
|
||||
|
||||
@ -2706,8 +2705,7 @@ var Font = (function FontClosure() {
|
||||
data: createPostTable(properties),
|
||||
};
|
||||
|
||||
var charCodeToGlyphId = [],
|
||||
charCode;
|
||||
const charCodeToGlyphId = [];
|
||||
|
||||
// Helper function to try to skip mapping of empty glyphs.
|
||||
function hasGlyph(glyphId) {
|
||||
@ -2773,7 +2771,7 @@ var Font = (function FontClosure() {
|
||||
baseEncoding = getEncoding(properties.baseEncodingName);
|
||||
}
|
||||
var glyphsUnicodeMap = getGlyphsUnicode();
|
||||
for (charCode = 0; charCode < 256; charCode++) {
|
||||
for (let charCode = 0; charCode < 256; charCode++) {
|
||||
var glyphName, standardGlyphName;
|
||||
if (this.differences && charCode in this.differences) {
|
||||
glyphName = this.differences[charCode];
|
||||
@ -2840,7 +2838,7 @@ var Font = (function FontClosure() {
|
||||
// (e.g. 0x2013) which when masked would overwrite other values in the
|
||||
// cmap.
|
||||
for (let i = 0; i < cmapMappingsLength; ++i) {
|
||||
charCode = cmapMappings[i].charCode;
|
||||
let charCode = cmapMappings[i].charCode;
|
||||
if (
|
||||
cmapPlatformId === 3 &&
|
||||
charCode >= 0xf000 &&
|
||||
@ -3000,7 +2998,7 @@ var Font = (function FontClosure() {
|
||||
// to begin with.
|
||||
continue;
|
||||
}
|
||||
for (var i = 0, ii = charCodes.length; i < ii; i++) {
|
||||
for (let i = 0, ii = charCodes.length; i < ii; i++) {
|
||||
var charCode = charCodes[i];
|
||||
// Find a fontCharCode that maps to the base and accent glyphs.
|
||||
// If one doesn't exists, create it.
|
||||
@ -3089,7 +3087,7 @@ var Font = (function FontClosure() {
|
||||
var charstrings = font.charstrings;
|
||||
var cffWidths = font.cff ? font.cff.widths : null;
|
||||
var hmtx = "\x00\x00\x00\x00"; // Fake .notdef
|
||||
for (var i = 1, ii = numGlyphs; i < ii; i++) {
|
||||
for (let i = 1, ii = numGlyphs; i < ii; i++) {
|
||||
var width = 0;
|
||||
if (charstrings) {
|
||||
var charstring = charstrings[i - 1];
|
||||
@ -3564,8 +3562,8 @@ var Type1Font = (function Type1FontClosure() {
|
||||
SEAC_ANALYSIS_ENABLED
|
||||
);
|
||||
var data = eexecBlockParser.extractFontProgram(properties);
|
||||
for (var info in data.properties) {
|
||||
properties[info] = data.properties[info];
|
||||
for (const key in data.properties) {
|
||||
properties[key] = data.properties[key];
|
||||
}
|
||||
|
||||
var charstrings = data.charstrings;
|
||||
|
@ -55,8 +55,8 @@ function toNumberArray(arr) {
|
||||
if (typeof arr[i] !== "number") {
|
||||
// Non-number is found -- convert all items to numbers.
|
||||
const result = new Array(length);
|
||||
for (let i = 0; i < length; i++) {
|
||||
result[i] = +arr[i];
|
||||
for (let j = 0; j < length; j++) {
|
||||
result[j] = +arr[j];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@ -1088,18 +1088,17 @@ var PostScriptCompiler = (function PostScriptCompilerClosure() {
|
||||
PostScriptCompiler.prototype = {
|
||||
compile: function PostScriptCompiler_compile(code, domain, range) {
|
||||
var stack = [];
|
||||
var i, ii;
|
||||
var instructions = [];
|
||||
var inputSize = domain.length >> 1,
|
||||
outputSize = range.length >> 1;
|
||||
var lastRegister = 0;
|
||||
var n, j;
|
||||
var num1, num2, ast1, ast2, tmpVar, item;
|
||||
for (i = 0; i < inputSize; i++) {
|
||||
for (let i = 0; i < inputSize; i++) {
|
||||
stack.push(new AstArgument(i, domain[i * 2], domain[i * 2 + 1]));
|
||||
}
|
||||
|
||||
for (i = 0, ii = code.length; i < ii; i++) {
|
||||
for (let i = 0, ii = code.length; i < ii; i++) {
|
||||
item = code[i];
|
||||
if (typeof item === "number") {
|
||||
stack.push(new AstLiteral(item));
|
||||
|
@ -223,10 +223,10 @@ var JpegImage = (function JpegImageClosure() {
|
||||
return n + (-1 << length) + 1;
|
||||
}
|
||||
|
||||
function decodeBaseline(component, offset) {
|
||||
function decodeBaseline(component, blockOffset) {
|
||||
var t = decodeHuffman(component.huffmanTableDC);
|
||||
var diff = t === 0 ? 0 : receiveAndExtend(t);
|
||||
component.blockData[offset] = component.pred += diff;
|
||||
component.blockData[blockOffset] = component.pred += diff;
|
||||
var k = 1;
|
||||
while (k < 64) {
|
||||
var rs = decodeHuffman(component.huffmanTableAC);
|
||||
@ -241,23 +241,23 @@ var JpegImage = (function JpegImageClosure() {
|
||||
}
|
||||
k += r;
|
||||
var z = dctZigZag[k];
|
||||
component.blockData[offset + z] = receiveAndExtend(s);
|
||||
component.blockData[blockOffset + z] = receiveAndExtend(s);
|
||||
k++;
|
||||
}
|
||||
}
|
||||
|
||||
function decodeDCFirst(component, offset) {
|
||||
function decodeDCFirst(component, blockOffset) {
|
||||
var t = decodeHuffman(component.huffmanTableDC);
|
||||
var diff = t === 0 ? 0 : receiveAndExtend(t) << successive;
|
||||
component.blockData[offset] = component.pred += diff;
|
||||
component.blockData[blockOffset] = component.pred += diff;
|
||||
}
|
||||
|
||||
function decodeDCSuccessive(component, offset) {
|
||||
component.blockData[offset] |= readBit() << successive;
|
||||
function decodeDCSuccessive(component, blockOffset) {
|
||||
component.blockData[blockOffset] |= readBit() << successive;
|
||||
}
|
||||
|
||||
var eobrun = 0;
|
||||
function decodeACFirst(component, offset) {
|
||||
function decodeACFirst(component, blockOffset) {
|
||||
if (eobrun > 0) {
|
||||
eobrun--;
|
||||
return;
|
||||
@ -278,7 +278,7 @@ var JpegImage = (function JpegImageClosure() {
|
||||
}
|
||||
k += r;
|
||||
var z = dctZigZag[k];
|
||||
component.blockData[offset + z] =
|
||||
component.blockData[blockOffset + z] =
|
||||
receiveAndExtend(s) * (1 << successive);
|
||||
k++;
|
||||
}
|
||||
@ -286,14 +286,14 @@ var JpegImage = (function JpegImageClosure() {
|
||||
|
||||
var successiveACState = 0,
|
||||
successiveACNextValue;
|
||||
function decodeACSuccessive(component, offset) {
|
||||
function decodeACSuccessive(component, blockOffset) {
|
||||
var k = spectralStart;
|
||||
var e = spectralEnd;
|
||||
var r = 0;
|
||||
var s;
|
||||
var rs;
|
||||
while (k <= e) {
|
||||
const offsetZ = offset + dctZigZag[k];
|
||||
const offsetZ = blockOffset + dctZigZag[k];
|
||||
const sign = component.blockData[offsetZ] < 0 ? -1 : 1;
|
||||
switch (successiveACState) {
|
||||
case 0: // initial state
|
||||
@ -358,15 +358,15 @@ var JpegImage = (function JpegImageClosure() {
|
||||
var mcuCol = mcu % mcusPerLine;
|
||||
blockRow = mcuRow * component.v + row;
|
||||
var blockCol = mcuCol * component.h + col;
|
||||
var offset = getBlockBufferOffset(component, blockRow, blockCol);
|
||||
decode(component, offset);
|
||||
const blockOffset = getBlockBufferOffset(component, blockRow, blockCol);
|
||||
decode(component, blockOffset);
|
||||
}
|
||||
|
||||
function decodeBlock(component, decode, mcu) {
|
||||
blockRow = (mcu / component.blocksPerLine) | 0;
|
||||
var blockCol = mcu % component.blocksPerLine;
|
||||
var offset = getBlockBufferOffset(component, blockRow, blockCol);
|
||||
decode(component, offset);
|
||||
const blockOffset = getBlockBufferOffset(component, blockRow, blockCol);
|
||||
decode(component, blockOffset);
|
||||
}
|
||||
|
||||
var componentsLength = components.length;
|
||||
|
@ -736,7 +736,7 @@ var JpxImage = (function JpxImageClosure() {
|
||||
var l, r, c, p;
|
||||
var maxDecompositionLevelsCount = 0;
|
||||
for (c = 0; c < componentsCount; c++) {
|
||||
var component = tile.components[c];
|
||||
const component = tile.components[c];
|
||||
maxDecompositionLevelsCount = Math.max(
|
||||
maxDecompositionLevelsCount,
|
||||
component.codingStyleParameters.decompositionLevelsCount
|
||||
@ -768,7 +768,7 @@ var JpxImage = (function JpxImageClosure() {
|
||||
for (; r <= maxDecompositionLevelsCount; r++) {
|
||||
for (; p < maxNumPrecinctsInLevel[r]; p++) {
|
||||
for (; c < componentsCount; c++) {
|
||||
var component = tile.components[c];
|
||||
const component = tile.components[c];
|
||||
if (r > component.codingStyleParameters.decompositionLevelsCount) {
|
||||
continue;
|
||||
}
|
||||
|
@ -894,12 +894,12 @@ class Catalog {
|
||||
break;
|
||||
}
|
||||
kidPromises.push(
|
||||
xref.fetchAsync(kid).then(function(kid) {
|
||||
if (!isDict(kid)) {
|
||||
xref.fetchAsync(kid).then(function(obj) {
|
||||
if (!isDict(obj)) {
|
||||
throw new FormatError("Kid node must be a dictionary.");
|
||||
}
|
||||
if (kid.has("Count")) {
|
||||
total += kid.get("Count");
|
||||
if (obj.has("Count")) {
|
||||
total += obj.get("Count");
|
||||
} else {
|
||||
// Page leaf node.
|
||||
total++;
|
||||
|
@ -612,7 +612,7 @@ var Type1Parser = (function Type1ParserClosure() {
|
||||
this.readInt(); // num
|
||||
this.getToken(); // read in 'array'
|
||||
while (this.getToken() === "dup") {
|
||||
var index = this.readInt();
|
||||
const index = this.readInt();
|
||||
length = this.readInt();
|
||||
this.getToken(); // read in 'RD' or '-|'
|
||||
data = length > 0 ? stream.getBytes(length) : new Uint8Array(0);
|
||||
|
@ -182,19 +182,19 @@ var WorkerMessageHandler = {
|
||||
|
||||
function getPdfManager(data, evaluatorOptions) {
|
||||
var pdfManagerCapability = createPromiseCapability();
|
||||
var pdfManager;
|
||||
let newPdfManager;
|
||||
|
||||
var source = data.source;
|
||||
if (source.data) {
|
||||
try {
|
||||
pdfManager = new LocalPdfManager(
|
||||
newPdfManager = new LocalPdfManager(
|
||||
docId,
|
||||
source.data,
|
||||
source.password,
|
||||
evaluatorOptions,
|
||||
docBaseUrl
|
||||
);
|
||||
pdfManagerCapability.resolve(pdfManager);
|
||||
pdfManagerCapability.resolve(newPdfManager);
|
||||
} catch (ex) {
|
||||
pdfManagerCapability.reject(ex);
|
||||
}
|
||||
@ -220,7 +220,7 @@ var WorkerMessageHandler = {
|
||||
// We don't need auto-fetch when streaming is enabled.
|
||||
var disableAutoFetch =
|
||||
source.disableAutoFetch || fullRequest.isStreamingSupported;
|
||||
pdfManager = new NetworkPdfManager(
|
||||
newPdfManager = new NetworkPdfManager(
|
||||
docId,
|
||||
pdfStream,
|
||||
{
|
||||
@ -233,16 +233,15 @@ var WorkerMessageHandler = {
|
||||
evaluatorOptions,
|
||||
docBaseUrl
|
||||
);
|
||||
// There may be a chance that `pdfManager` is not initialized
|
||||
// for first few runs of `readchunk` block of code. Be sure
|
||||
// to send all cached chunks, if any, to chunked_stream via
|
||||
// pdf_manager.
|
||||
// There may be a chance that `newPdfManager` is not initialized for
|
||||
// the first few runs of `readchunk` block of code. Be sure to send
|
||||
// all cached chunks, if any, to chunked_stream via pdf_manager.
|
||||
for (let i = 0; i < cachedChunks.length; i++) {
|
||||
pdfManager.sendProgressiveData(cachedChunks[i]);
|
||||
newPdfManager.sendProgressiveData(cachedChunks[i]);
|
||||
}
|
||||
|
||||
cachedChunks = [];
|
||||
pdfManagerCapability.resolve(pdfManager);
|
||||
pdfManagerCapability.resolve(newPdfManager);
|
||||
cancelXHRs = null;
|
||||
})
|
||||
.catch(function(reason) {
|
||||
@ -258,33 +257,32 @@ var WorkerMessageHandler = {
|
||||
}
|
||||
// the data is array, instantiating directly from it
|
||||
try {
|
||||
pdfManager = new LocalPdfManager(
|
||||
newPdfManager = new LocalPdfManager(
|
||||
docId,
|
||||
pdfFile,
|
||||
source.password,
|
||||
evaluatorOptions,
|
||||
docBaseUrl
|
||||
);
|
||||
pdfManagerCapability.resolve(pdfManager);
|
||||
pdfManagerCapability.resolve(newPdfManager);
|
||||
} catch (ex) {
|
||||
pdfManagerCapability.reject(ex);
|
||||
}
|
||||
cachedChunks = [];
|
||||
};
|
||||
var readPromise = new Promise(function(resolve, reject) {
|
||||
var readChunk = function(chunk) {
|
||||
var readChunk = function({ value, done }) {
|
||||
try {
|
||||
ensureNotTerminated();
|
||||
if (chunk.done) {
|
||||
if (!pdfManager) {
|
||||
if (done) {
|
||||
if (!newPdfManager) {
|
||||
flushChunks();
|
||||
}
|
||||
cancelXHRs = null;
|
||||
return;
|
||||
}
|
||||
|
||||
var data = chunk.value;
|
||||
loaded += arrayByteLength(data);
|
||||
loaded += arrayByteLength(value);
|
||||
if (!fullRequest.isStreamingSupported) {
|
||||
handler.send("DocProgress", {
|
||||
loaded,
|
||||
@ -292,10 +290,10 @@ var WorkerMessageHandler = {
|
||||
});
|
||||
}
|
||||
|
||||
if (pdfManager) {
|
||||
pdfManager.sendProgressiveData(data);
|
||||
if (newPdfManager) {
|
||||
newPdfManager.sendProgressiveData(value);
|
||||
} else {
|
||||
cachedChunks.push(data);
|
||||
cachedChunks.push(value);
|
||||
}
|
||||
|
||||
fullRequest.read().then(readChunk, reject);
|
||||
@ -332,9 +330,9 @@ var WorkerMessageHandler = {
|
||||
|
||||
handler
|
||||
.sendWithPromise("PasswordRequest", ex)
|
||||
.then(function(data) {
|
||||
.then(function({ password }) {
|
||||
finishWorkerTask(task);
|
||||
pdfManager.updatePassword(data.password);
|
||||
pdfManager.updatePassword(password);
|
||||
pdfManagerReady();
|
||||
})
|
||||
.catch(function() {
|
||||
|
Loading…
x
Reference in New Issue
Block a user