Convert the Parser
class in src/core/parser.js
to ES6 syntax
This commit is contained in:
parent
d587abbceb
commit
7d0ecee771
@ -34,7 +34,7 @@ const MAX_LENGTH_TO_CACHE = 1000;
|
|||||||
const MAX_ADLER32_LENGTH = 5552;
|
const MAX_ADLER32_LENGTH = 5552;
|
||||||
|
|
||||||
function computeAdler32(bytes) {
|
function computeAdler32(bytes) {
|
||||||
let bytesLength = bytes.length;
|
const bytesLength = bytes.length;
|
||||||
if (typeof PDFJSDev === 'undefined' ||
|
if (typeof PDFJSDev === 'undefined' ||
|
||||||
PDFJSDev.test('!PRODUCTION || TESTING')) {
|
PDFJSDev.test('!PRODUCTION || TESTING')) {
|
||||||
assert(bytesLength < MAX_ADLER32_LENGTH,
|
assert(bytesLength < MAX_ADLER32_LENGTH,
|
||||||
@ -49,22 +49,23 @@ function computeAdler32(bytes) {
|
|||||||
return ((b % 65521) << 16) | (a % 65521);
|
return ((b % 65521) << 16) | (a % 65521);
|
||||||
}
|
}
|
||||||
|
|
||||||
var Parser = (function ParserClosure() {
|
class Parser {
|
||||||
function Parser(lexer, allowStreams, xref, recoveryMode) {
|
constructor(lexer, allowStreams, xref, recoveryMode = false) {
|
||||||
this.lexer = lexer;
|
this.lexer = lexer;
|
||||||
this.allowStreams = allowStreams;
|
this.allowStreams = allowStreams;
|
||||||
this.xref = xref;
|
this.xref = xref;
|
||||||
this.recoveryMode = recoveryMode || false;
|
this.recoveryMode = recoveryMode;
|
||||||
|
|
||||||
this.imageCache = Object.create(null);
|
this.imageCache = Object.create(null);
|
||||||
this.refill();
|
this.refill();
|
||||||
}
|
}
|
||||||
|
|
||||||
Parser.prototype = {
|
refill() {
|
||||||
refill: function Parser_refill() {
|
|
||||||
this.buf1 = this.lexer.getObj();
|
this.buf1 = this.lexer.getObj();
|
||||||
this.buf2 = this.lexer.getObj();
|
this.buf2 = this.lexer.getObj();
|
||||||
},
|
}
|
||||||
shift: function Parser_shift() {
|
|
||||||
|
shift() {
|
||||||
if (isCmd(this.buf2, 'ID')) {
|
if (isCmd(this.buf2, 'ID')) {
|
||||||
this.buf1 = this.buf2;
|
this.buf1 = this.buf2;
|
||||||
this.buf2 = null;
|
this.buf2 = null;
|
||||||
@ -72,8 +73,9 @@ var Parser = (function ParserClosure() {
|
|||||||
this.buf1 = this.buf2;
|
this.buf1 = this.buf2;
|
||||||
this.buf2 = this.lexer.getObj();
|
this.buf2 = this.lexer.getObj();
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
tryShift: function Parser_tryShift() {
|
|
||||||
|
tryShift() {
|
||||||
try {
|
try {
|
||||||
this.shift();
|
this.shift();
|
||||||
return true;
|
return true;
|
||||||
@ -85,9 +87,10 @@ var Parser = (function ParserClosure() {
|
|||||||
// state and call this.shift() twice to reset the buffers.
|
// state and call this.shift() twice to reset the buffers.
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
getObj: function Parser_getObj(cipherTransform) {
|
|
||||||
var buf1 = this.buf1;
|
getObj(cipherTransform) {
|
||||||
|
const buf1 = this.buf1;
|
||||||
this.shift();
|
this.shift();
|
||||||
|
|
||||||
if (buf1 instanceof Cmd) {
|
if (buf1 instanceof Cmd) {
|
||||||
@ -95,7 +98,7 @@ var Parser = (function ParserClosure() {
|
|||||||
case 'BI': // inline image
|
case 'BI': // inline image
|
||||||
return this.makeInlineImage(cipherTransform);
|
return this.makeInlineImage(cipherTransform);
|
||||||
case '[': // array
|
case '[': // array
|
||||||
var array = [];
|
const array = [];
|
||||||
while (!isCmd(this.buf1, ']') && !isEOF(this.buf1)) {
|
while (!isCmd(this.buf1, ']') && !isEOF(this.buf1)) {
|
||||||
array.push(this.getObj(cipherTransform));
|
array.push(this.getObj(cipherTransform));
|
||||||
}
|
}
|
||||||
@ -108,7 +111,7 @@ var Parser = (function ParserClosure() {
|
|||||||
this.shift();
|
this.shift();
|
||||||
return array;
|
return array;
|
||||||
case '<<': // dictionary or stream
|
case '<<': // dictionary or stream
|
||||||
var dict = new Dict(this.xref);
|
const dict = new Dict(this.xref);
|
||||||
while (!isCmd(this.buf1, '>>') && !isEOF(this.buf1)) {
|
while (!isCmd(this.buf1, '>>') && !isEOF(this.buf1)) {
|
||||||
if (!isName(this.buf1)) {
|
if (!isName(this.buf1)) {
|
||||||
info('Malformed dictionary: key must be a name object');
|
info('Malformed dictionary: key must be a name object');
|
||||||
@ -116,7 +119,7 @@ var Parser = (function ParserClosure() {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
var key = this.buf1.name;
|
const key = this.buf1.name;
|
||||||
this.shift();
|
this.shift();
|
||||||
if (isEOF(this.buf1)) {
|
if (isEOF(this.buf1)) {
|
||||||
break;
|
break;
|
||||||
@ -144,9 +147,9 @@ var Parser = (function ParserClosure() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (Number.isInteger(buf1)) { // indirect reference or integer
|
if (Number.isInteger(buf1)) { // indirect reference or integer
|
||||||
var num = buf1;
|
const num = buf1;
|
||||||
if (Number.isInteger(this.buf1) && isCmd(this.buf2, 'R')) {
|
if (Number.isInteger(this.buf1) && isCmd(this.buf2, 'R')) {
|
||||||
var ref = new Ref(num, this.buf1);
|
const ref = new Ref(num, this.buf1);
|
||||||
this.shift();
|
this.shift();
|
||||||
this.shift();
|
this.shift();
|
||||||
return ref;
|
return ref;
|
||||||
@ -155,7 +158,7 @@ var Parser = (function ParserClosure() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (isString(buf1)) { // string
|
if (isString(buf1)) { // string
|
||||||
var str = buf1;
|
let str = buf1;
|
||||||
if (cipherTransform) {
|
if (cipherTransform) {
|
||||||
str = cipherTransform.decryptString(str);
|
str = cipherTransform.decryptString(str);
|
||||||
}
|
}
|
||||||
@ -164,7 +167,8 @@ var Parser = (function ParserClosure() {
|
|||||||
|
|
||||||
// simple object
|
// simple object
|
||||||
return buf1;
|
return buf1;
|
||||||
},
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find the end of the stream by searching for the /EI\s/.
|
* Find the end of the stream by searching for the /EI\s/.
|
||||||
* @returns {number} The inline stream length.
|
* @returns {number} The inline stream length.
|
||||||
@ -183,7 +187,7 @@ var Parser = (function ParserClosure() {
|
|||||||
if (ch === SPACE || ch === LF || ch === CR) {
|
if (ch === SPACE || ch === LF || ch === CR) {
|
||||||
maybeEIPos = stream.pos;
|
maybeEIPos = stream.pos;
|
||||||
// Let's check that the next `n` bytes are ASCII... just to be sure.
|
// Let's check that the next `n` bytes are ASCII... just to be sure.
|
||||||
let followingBytes = stream.peekBytes(n);
|
const followingBytes = stream.peekBytes(n);
|
||||||
for (let i = 0, ii = followingBytes.length; i < ii; i++) {
|
for (let i = 0, ii = followingBytes.length; i < ii; i++) {
|
||||||
ch = followingBytes[i];
|
ch = followingBytes[i];
|
||||||
if (ch === NUL && followingBytes[i + 1] !== NUL) {
|
if (ch === NUL && followingBytes[i + 1] !== NUL) {
|
||||||
@ -235,14 +239,14 @@ var Parser = (function ParserClosure() {
|
|||||||
endOffset--;
|
endOffset--;
|
||||||
}
|
}
|
||||||
return ((stream.pos - endOffset) - startPos);
|
return ((stream.pos - endOffset) - startPos);
|
||||||
},
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find the EOI (end-of-image) marker 0xFFD9 of the stream.
|
* Find the EOI (end-of-image) marker 0xFFD9 of the stream.
|
||||||
* @returns {number} The inline stream length.
|
* @returns {number} The inline stream length.
|
||||||
*/
|
*/
|
||||||
findDCTDecodeInlineStreamEnd:
|
findDCTDecodeInlineStreamEnd(stream) {
|
||||||
function Parser_findDCTDecodeInlineStreamEnd(stream) {
|
let startPos = stream.pos, foundEOI = false, b, markerLength, length;
|
||||||
var startPos = stream.pos, foundEOI = false, b, markerLength, length;
|
|
||||||
while ((b = stream.getByte()) !== -1) {
|
while ((b = stream.getByte()) !== -1) {
|
||||||
if (b !== 0xFF) { // Not a valid marker.
|
if (b !== 0xFF) { // Not a valid marker.
|
||||||
continue;
|
continue;
|
||||||
@ -331,14 +335,15 @@ var Parser = (function ParserClosure() {
|
|||||||
}
|
}
|
||||||
this.inlineStreamSkipEI(stream);
|
this.inlineStreamSkipEI(stream);
|
||||||
return length;
|
return length;
|
||||||
},
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find the EOD (end-of-data) marker '~>' (i.e. TILDE + GT) of the stream.
|
* Find the EOD (end-of-data) marker '~>' (i.e. TILDE + GT) of the stream.
|
||||||
* @returns {number} The inline stream length.
|
* @returns {number} The inline stream length.
|
||||||
*/
|
*/
|
||||||
findASCII85DecodeInlineStreamEnd(stream) {
|
findASCII85DecodeInlineStreamEnd(stream) {
|
||||||
var TILDE = 0x7E, GT = 0x3E;
|
const TILDE = 0x7E, GT = 0x3E;
|
||||||
var startPos = stream.pos, ch, length;
|
let startPos = stream.pos, ch, length;
|
||||||
while ((ch = stream.getByte()) !== -1) {
|
while ((ch = stream.getByte()) !== -1) {
|
||||||
if (ch === TILDE) {
|
if (ch === TILDE) {
|
||||||
ch = stream.peekByte();
|
ch = stream.peekByte();
|
||||||
@ -363,15 +368,15 @@ var Parser = (function ParserClosure() {
|
|||||||
}
|
}
|
||||||
this.inlineStreamSkipEI(stream);
|
this.inlineStreamSkipEI(stream);
|
||||||
return length;
|
return length;
|
||||||
},
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find the EOD (end-of-data) marker '>' (i.e. GT) of the stream.
|
* Find the EOD (end-of-data) marker '>' (i.e. GT) of the stream.
|
||||||
* @returns {number} The inline stream length.
|
* @returns {number} The inline stream length.
|
||||||
*/
|
*/
|
||||||
findASCIIHexDecodeInlineStreamEnd:
|
findASCIIHexDecodeInlineStreamEnd(stream) {
|
||||||
function Parser_findASCIIHexDecodeInlineStreamEnd(stream) {
|
const GT = 0x3E;
|
||||||
var GT = 0x3E;
|
let startPos = stream.pos, ch, length;
|
||||||
var startPos = stream.pos, ch, length;
|
|
||||||
while ((ch = stream.getByte()) !== -1) {
|
while ((ch = stream.getByte()) !== -1) {
|
||||||
if (ch === GT) {
|
if (ch === GT) {
|
||||||
break;
|
break;
|
||||||
@ -386,13 +391,14 @@ var Parser = (function ParserClosure() {
|
|||||||
}
|
}
|
||||||
this.inlineStreamSkipEI(stream);
|
this.inlineStreamSkipEI(stream);
|
||||||
return length;
|
return length;
|
||||||
},
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Skip over the /EI/ for streams where we search for an EOD marker.
|
* Skip over the /EI/ for streams where we search for an EOD marker.
|
||||||
*/
|
*/
|
||||||
inlineStreamSkipEI: function Parser_inlineStreamSkipEI(stream) {
|
inlineStreamSkipEI(stream) {
|
||||||
var E = 0x45, I = 0x49;
|
const E = 0x45, I = 0x49;
|
||||||
var state = 0, ch;
|
let state = 0, ch;
|
||||||
while ((ch = stream.getByte()) !== -1) {
|
while ((ch = stream.getByte()) !== -1) {
|
||||||
if (state === 0) {
|
if (state === 0) {
|
||||||
state = (ch === E) ? 1 : 0;
|
state = (ch === E) ? 1 : 0;
|
||||||
@ -402,18 +408,20 @@ var Parser = (function ParserClosure() {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
makeInlineImage: function Parser_makeInlineImage(cipherTransform) {
|
|
||||||
var lexer = this.lexer;
|
makeInlineImage(cipherTransform) {
|
||||||
var stream = lexer.stream;
|
const lexer = this.lexer;
|
||||||
|
const stream = lexer.stream;
|
||||||
|
|
||||||
// Parse dictionary.
|
// Parse dictionary.
|
||||||
let dict = new Dict(this.xref), dictLength;
|
const dict = new Dict(this.xref);
|
||||||
|
let dictLength;
|
||||||
while (!isCmd(this.buf1, 'ID') && !isEOF(this.buf1)) {
|
while (!isCmd(this.buf1, 'ID') && !isEOF(this.buf1)) {
|
||||||
if (!isName(this.buf1)) {
|
if (!isName(this.buf1)) {
|
||||||
throw new FormatError('Dictionary key must be a name object');
|
throw new FormatError('Dictionary key must be a name object');
|
||||||
}
|
}
|
||||||
var key = this.buf1.name;
|
const key = this.buf1.name;
|
||||||
this.shift();
|
this.shift();
|
||||||
if (isEOF(this.buf1)) {
|
if (isEOF(this.buf1)) {
|
||||||
break;
|
break;
|
||||||
@ -425,18 +433,20 @@ var Parser = (function ParserClosure() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Extract the name of the first (i.e. the current) image filter.
|
// Extract the name of the first (i.e. the current) image filter.
|
||||||
var filter = dict.get('Filter', 'F'), filterName;
|
const filter = dict.get('Filter', 'F');
|
||||||
|
let filterName;
|
||||||
if (isName(filter)) {
|
if (isName(filter)) {
|
||||||
filterName = filter.name;
|
filterName = filter.name;
|
||||||
} else if (Array.isArray(filter)) {
|
} else if (Array.isArray(filter)) {
|
||||||
var filterZero = this.xref.fetchIfRef(filter[0]);
|
const filterZero = this.xref.fetchIfRef(filter[0]);
|
||||||
if (isName(filterZero)) {
|
if (isName(filterZero)) {
|
||||||
filterName = filterZero.name;
|
filterName = filterZero.name;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse image stream.
|
// Parse image stream.
|
||||||
let startPos = stream.pos, length;
|
const startPos = stream.pos;
|
||||||
|
let length;
|
||||||
if (filterName === 'DCTDecode' || filterName === 'DCT') {
|
if (filterName === 'DCTDecode' || filterName === 'DCT') {
|
||||||
length = this.findDCTDecodeInlineStreamEnd(stream);
|
length = this.findDCTDecodeInlineStreamEnd(stream);
|
||||||
} else if (filterName === 'ASCII85Decode' || filterName === 'A85') {
|
} else if (filterName === 'ASCII85Decode' || filterName === 'A85') {
|
||||||
@ -446,26 +456,26 @@ var Parser = (function ParserClosure() {
|
|||||||
} else {
|
} else {
|
||||||
length = this.findDefaultInlineStreamEnd(stream);
|
length = this.findDefaultInlineStreamEnd(stream);
|
||||||
}
|
}
|
||||||
var imageStream = stream.makeSubStream(startPos, length, dict);
|
let imageStream = stream.makeSubStream(startPos, length, dict);
|
||||||
|
|
||||||
// Cache all images below the MAX_LENGTH_TO_CACHE threshold by their
|
// Cache all images below the MAX_LENGTH_TO_CACHE threshold by their
|
||||||
// adler32 checksum.
|
// adler32 checksum.
|
||||||
let cacheKey;
|
let cacheKey;
|
||||||
if (length < MAX_LENGTH_TO_CACHE && dictLength < MAX_ADLER32_LENGTH) {
|
if (length < MAX_LENGTH_TO_CACHE && dictLength < MAX_ADLER32_LENGTH) {
|
||||||
var imageBytes = imageStream.getBytes();
|
const imageBytes = imageStream.getBytes();
|
||||||
imageStream.reset();
|
imageStream.reset();
|
||||||
|
|
||||||
const initialStreamPos = stream.pos;
|
const initialStreamPos = stream.pos;
|
||||||
// Set the stream position to the beginning of the dictionary data...
|
// Set the stream position to the beginning of the dictionary data...
|
||||||
stream.pos = lexer.beginInlineImagePos;
|
stream.pos = lexer.beginInlineImagePos;
|
||||||
// ... and fetch the bytes of the *entire* dictionary.
|
// ... and fetch the bytes of the *entire* dictionary.
|
||||||
let dictBytes = stream.getBytes(dictLength);
|
const dictBytes = stream.getBytes(dictLength);
|
||||||
// Finally, don't forget to reset the stream position.
|
// Finally, don't forget to reset the stream position.
|
||||||
stream.pos = initialStreamPos;
|
stream.pos = initialStreamPos;
|
||||||
|
|
||||||
cacheKey = computeAdler32(imageBytes) + '_' + computeAdler32(dictBytes);
|
cacheKey = computeAdler32(imageBytes) + '_' + computeAdler32(dictBytes);
|
||||||
|
|
||||||
let cacheEntry = this.imageCache[cacheKey];
|
const cacheEntry = this.imageCache[cacheKey];
|
||||||
if (cacheEntry !== undefined) {
|
if (cacheEntry !== undefined) {
|
||||||
this.buf2 = Cmd.get('EI');
|
this.buf2 = Cmd.get('EI');
|
||||||
this.shift();
|
this.shift();
|
||||||
@ -482,7 +492,7 @@ var Parser = (function ParserClosure() {
|
|||||||
imageStream = this.filter(imageStream, dict, length);
|
imageStream = this.filter(imageStream, dict, length);
|
||||||
imageStream.dict = dict;
|
imageStream.dict = dict;
|
||||||
if (cacheKey !== undefined) {
|
if (cacheKey !== undefined) {
|
||||||
imageStream.cacheKey = 'inline_' + length + '_' + cacheKey;
|
imageStream.cacheKey = `inline_${length}_${cacheKey}`;
|
||||||
this.imageCache[cacheKey] = imageStream;
|
this.imageCache[cacheKey] = imageStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -490,7 +500,7 @@ var Parser = (function ParserClosure() {
|
|||||||
this.shift();
|
this.shift();
|
||||||
|
|
||||||
return imageStream;
|
return imageStream;
|
||||||
},
|
}
|
||||||
|
|
||||||
_findStreamLength(startPos, signature) {
|
_findStreamLength(startPos, signature) {
|
||||||
const { stream, } = this.lexer;
|
const { stream, } = this.lexer;
|
||||||
@ -521,28 +531,28 @@ var Parser = (function ParserClosure() {
|
|||||||
stream.pos += scanLength;
|
stream.pos += scanLength;
|
||||||
}
|
}
|
||||||
return -1;
|
return -1;
|
||||||
},
|
}
|
||||||
|
|
||||||
makeStream: function Parser_makeStream(dict, cipherTransform) {
|
makeStream(dict, cipherTransform) {
|
||||||
var lexer = this.lexer;
|
const lexer = this.lexer;
|
||||||
var stream = lexer.stream;
|
let stream = lexer.stream;
|
||||||
|
|
||||||
// get stream start position
|
// Get the stream's start position.
|
||||||
lexer.skipToNextLine();
|
lexer.skipToNextLine();
|
||||||
const startPos = stream.pos - 1;
|
const startPos = stream.pos - 1;
|
||||||
|
|
||||||
// get length
|
// Get the length.
|
||||||
var length = dict.get('Length');
|
let length = dict.get('Length');
|
||||||
if (!Number.isInteger(length)) {
|
if (!Number.isInteger(length)) {
|
||||||
info('Bad ' + length + ' attribute in stream');
|
info(`Bad length "${length}" in stream`);
|
||||||
length = 0;
|
length = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// skip over the stream data
|
// Skip over the stream data.
|
||||||
stream.pos = startPos + length;
|
stream.pos = startPos + length;
|
||||||
lexer.nextChar();
|
lexer.nextChar();
|
||||||
|
|
||||||
// Shift '>>' and check whether the new object marks the end of the stream
|
// Shift '>>' and check whether the new object marks the end of the stream.
|
||||||
if (this.tryShift() && isCmd(this.buf2, 'endstream')) {
|
if (this.tryShift() && isCmd(this.buf2, 'endstream')) {
|
||||||
this.shift(); // 'stream'
|
this.shift(); // 'stream'
|
||||||
} else {
|
} else {
|
||||||
@ -561,7 +571,7 @@ var Parser = (function ParserClosure() {
|
|||||||
const end = ENDSTREAM_SIGNATURE.length - i;
|
const end = ENDSTREAM_SIGNATURE.length - i;
|
||||||
const TRUNCATED_SIGNATURE = ENDSTREAM_SIGNATURE.slice(0, end);
|
const TRUNCATED_SIGNATURE = ENDSTREAM_SIGNATURE.slice(0, end);
|
||||||
|
|
||||||
let maybeLength = this._findStreamLength(startPos,
|
const maybeLength = this._findStreamLength(startPos,
|
||||||
TRUNCATED_SIGNATURE);
|
TRUNCATED_SIGNATURE);
|
||||||
if (maybeLength >= 0) {
|
if (maybeLength >= 0) {
|
||||||
// Ensure that the byte immediately following the truncated
|
// Ensure that the byte immediately following the truncated
|
||||||
@ -596,10 +606,12 @@ var Parser = (function ParserClosure() {
|
|||||||
stream = this.filter(stream, dict, length);
|
stream = this.filter(stream, dict, length);
|
||||||
stream.dict = dict;
|
stream.dict = dict;
|
||||||
return stream;
|
return stream;
|
||||||
},
|
}
|
||||||
filter: function Parser_filter(stream, dict, length) {
|
|
||||||
var filter = dict.get('Filter', 'F');
|
filter(stream, dict, length) {
|
||||||
var params = dict.get('DecodeParms', 'DP');
|
let filter = dict.get('Filter', 'F');
|
||||||
|
let params = dict.get('DecodeParms', 'DP');
|
||||||
|
|
||||||
if (isName(filter)) {
|
if (isName(filter)) {
|
||||||
if (Array.isArray(params)) {
|
if (Array.isArray(params)) {
|
||||||
warn('/DecodeParms should not contain an Array, ' +
|
warn('/DecodeParms should not contain an Array, ' +
|
||||||
@ -608,14 +620,14 @@ var Parser = (function ParserClosure() {
|
|||||||
return this.makeFilter(stream, filter.name, length, params);
|
return this.makeFilter(stream, filter.name, length, params);
|
||||||
}
|
}
|
||||||
|
|
||||||
var maybeLength = length;
|
let maybeLength = length;
|
||||||
if (Array.isArray(filter)) {
|
if (Array.isArray(filter)) {
|
||||||
var filterArray = filter;
|
let filterArray = filter;
|
||||||
var paramsArray = params;
|
let paramsArray = params;
|
||||||
for (var i = 0, ii = filterArray.length; i < ii; ++i) {
|
for (let i = 0, ii = filterArray.length; i < ii; ++i) {
|
||||||
filter = this.xref.fetchIfRef(filterArray[i]);
|
filter = this.xref.fetchIfRef(filterArray[i]);
|
||||||
if (!isName(filter)) {
|
if (!isName(filter)) {
|
||||||
throw new FormatError('Bad filter name: ' + filter);
|
throw new FormatError(`Bad filter name "${filter}"`);
|
||||||
}
|
}
|
||||||
|
|
||||||
params = null;
|
params = null;
|
||||||
@ -623,22 +635,24 @@ var Parser = (function ParserClosure() {
|
|||||||
params = this.xref.fetchIfRef(paramsArray[i]);
|
params = this.xref.fetchIfRef(paramsArray[i]);
|
||||||
}
|
}
|
||||||
stream = this.makeFilter(stream, filter.name, maybeLength, params);
|
stream = this.makeFilter(stream, filter.name, maybeLength, params);
|
||||||
// after the first stream the length variable is invalid
|
// After the first stream the `length` variable is invalid.
|
||||||
maybeLength = null;
|
maybeLength = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return stream;
|
return stream;
|
||||||
},
|
}
|
||||||
makeFilter: function Parser_makeFilter(stream, name, maybeLength, params) {
|
|
||||||
|
makeFilter(stream, name, maybeLength, params) {
|
||||||
// Since the 'Length' entry in the stream dictionary can be completely
|
// Since the 'Length' entry in the stream dictionary can be completely
|
||||||
// wrong, e.g. zero for non-empty streams, only skip parsing the stream
|
// wrong, e.g. zero for non-empty streams, only skip parsing the stream
|
||||||
// when we can be absolutely certain that it actually is empty.
|
// when we can be absolutely certain that it actually is empty.
|
||||||
if (maybeLength === 0) {
|
if (maybeLength === 0) {
|
||||||
warn('Empty "' + name + '" stream.');
|
warn(`Empty "${name}" stream.`);
|
||||||
return new NullStream();
|
return new NullStream();
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
var xrefStreamStats = this.xref.stats.streamTypes;
|
const xrefStreamStats = this.xref.stats.streamTypes;
|
||||||
if (name === 'FlateDecode' || name === 'Fl') {
|
if (name === 'FlateDecode' || name === 'Fl') {
|
||||||
xrefStreamStats[StreamType.FLATE] = true;
|
xrefStreamStats[StreamType.FLATE] = true;
|
||||||
if (params) {
|
if (params) {
|
||||||
@ -649,7 +663,7 @@ var Parser = (function ParserClosure() {
|
|||||||
}
|
}
|
||||||
if (name === 'LZWDecode' || name === 'LZW') {
|
if (name === 'LZWDecode' || name === 'LZW') {
|
||||||
xrefStreamStats[StreamType.LZW] = true;
|
xrefStreamStats[StreamType.LZW] = true;
|
||||||
var earlyChange = 1;
|
let earlyChange = 1;
|
||||||
if (params) {
|
if (params) {
|
||||||
if (params.has('EarlyChange')) {
|
if (params.has('EarlyChange')) {
|
||||||
earlyChange = params.get('EarlyChange');
|
earlyChange = params.get('EarlyChange');
|
||||||
@ -688,20 +702,17 @@ var Parser = (function ParserClosure() {
|
|||||||
xrefStreamStats[StreamType.JBIG] = true;
|
xrefStreamStats[StreamType.JBIG] = true;
|
||||||
return new Jbig2Stream(stream, maybeLength, stream.dict, params);
|
return new Jbig2Stream(stream, maybeLength, stream.dict, params);
|
||||||
}
|
}
|
||||||
warn('filter "' + name + '" not supported yet');
|
warn(`Filter "${name}" is not supported.`);
|
||||||
return stream;
|
return stream;
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
if (ex instanceof MissingDataException) {
|
if (ex instanceof MissingDataException) {
|
||||||
throw ex;
|
throw ex;
|
||||||
}
|
}
|
||||||
warn('Invalid stream: \"' + ex + '\"');
|
warn(`Invalid stream: "${ex}"`);
|
||||||
return new NullStream();
|
return new NullStream();
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
return Parser;
|
|
||||||
})();
|
|
||||||
|
|
||||||
var Lexer = (function LexerClosure() {
|
var Lexer = (function LexerClosure() {
|
||||||
function Lexer(stream, knownCommands) {
|
function Lexer(stream, knownCommands) {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user