Merge pull request #10926 from Snuffleupagus/parser-signature
Change the signature of the `Parser` constructor to take a parameter object
This commit is contained in:
commit
2cc0bfd1b1
@ -2960,7 +2960,10 @@ var EvaluatorPreprocessor = (function EvaluatorPreprocessorClosure() {
|
|||||||
this.opMap = getOPMap();
|
this.opMap = getOPMap();
|
||||||
// TODO(mduan): pass array of knownCommands rather than this.opMap
|
// TODO(mduan): pass array of knownCommands rather than this.opMap
|
||||||
// dictionary
|
// dictionary
|
||||||
this.parser = new Parser(new Lexer(stream, this.opMap), false, xref);
|
this.parser = new Parser({
|
||||||
|
lexer: new Lexer(stream, this.opMap),
|
||||||
|
xref,
|
||||||
|
});
|
||||||
this.stateManager = stateManager;
|
this.stateManager = stateManager;
|
||||||
this.nonProcessedArgs = [];
|
this.nonProcessedArgs = [];
|
||||||
this._numInvalidPathOPS = 0;
|
this._numInvalidPathOPS = 0;
|
||||||
|
@ -1477,8 +1477,12 @@ var XRef = (function XRefClosure() {
|
|||||||
let trailerDict;
|
let trailerDict;
|
||||||
for (i = 0, ii = trailers.length; i < ii; ++i) {
|
for (i = 0, ii = trailers.length; i < ii; ++i) {
|
||||||
stream.pos = trailers[i];
|
stream.pos = trailers[i];
|
||||||
var parser = new Parser(new Lexer(stream), /* allowStreams = */ true,
|
const parser = new Parser({
|
||||||
/* xref = */ this, /* recoveryMode = */ true);
|
lexer: new Lexer(stream),
|
||||||
|
xref: this,
|
||||||
|
allowStreams: true,
|
||||||
|
recoveryMode: true,
|
||||||
|
});
|
||||||
var obj = parser.getObj();
|
var obj = parser.getObj();
|
||||||
if (!isCmd(obj, 'trailer')) {
|
if (!isCmd(obj, 'trailer')) {
|
||||||
continue;
|
continue;
|
||||||
@ -1536,7 +1540,11 @@ var XRef = (function XRefClosure() {
|
|||||||
|
|
||||||
stream.pos = startXRef + stream.start;
|
stream.pos = startXRef + stream.start;
|
||||||
|
|
||||||
var parser = new Parser(new Lexer(stream), true, this);
|
const parser = new Parser({
|
||||||
|
lexer: new Lexer(stream),
|
||||||
|
xref: this,
|
||||||
|
allowStreams: true,
|
||||||
|
});
|
||||||
var obj = parser.getObj();
|
var obj = parser.getObj();
|
||||||
var dict;
|
var dict;
|
||||||
|
|
||||||
@ -1662,7 +1670,11 @@ var XRef = (function XRefClosure() {
|
|||||||
}
|
}
|
||||||
var stream = this.stream.makeSubStream(xrefEntry.offset +
|
var stream = this.stream.makeSubStream(xrefEntry.offset +
|
||||||
this.stream.start);
|
this.stream.start);
|
||||||
var parser = new Parser(new Lexer(stream), true, this);
|
const parser = new Parser({
|
||||||
|
lexer: new Lexer(stream),
|
||||||
|
xref: this,
|
||||||
|
allowStreams: true,
|
||||||
|
});
|
||||||
var obj1 = parser.getObj();
|
var obj1 = parser.getObj();
|
||||||
var obj2 = parser.getObj();
|
var obj2 = parser.getObj();
|
||||||
var obj3 = parser.getObj();
|
var obj3 = parser.getObj();
|
||||||
@ -1709,8 +1721,11 @@ var XRef = (function XRefClosure() {
|
|||||||
throw new FormatError(
|
throw new FormatError(
|
||||||
'invalid first and n parameters for ObjStm stream');
|
'invalid first and n parameters for ObjStm stream');
|
||||||
}
|
}
|
||||||
var parser = new Parser(new Lexer(stream), false, this);
|
const parser = new Parser({
|
||||||
parser.allowStreams = true;
|
lexer: new Lexer(stream),
|
||||||
|
xref: this,
|
||||||
|
allowStreams: true,
|
||||||
|
});
|
||||||
var i, entries = [], num, nums = [];
|
var i, entries = [], num, nums = [];
|
||||||
// read the object numbers to populate cache
|
// read the object numbers to populate cache
|
||||||
for (i = 0; i < n; ++i) {
|
for (i = 0; i < n; ++i) {
|
||||||
|
@ -51,10 +51,10 @@ function computeAdler32(bytes) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
class Parser {
|
class Parser {
|
||||||
constructor(lexer, allowStreams, xref, recoveryMode = false) {
|
constructor({ lexer, xref, allowStreams = false, recoveryMode = false, }) {
|
||||||
this.lexer = lexer;
|
this.lexer = lexer;
|
||||||
this.allowStreams = allowStreams;
|
|
||||||
this.xref = xref;
|
this.xref = xref;
|
||||||
|
this.allowStreams = allowStreams;
|
||||||
this.recoveryMode = recoveryMode;
|
this.recoveryMode = recoveryMode;
|
||||||
|
|
||||||
this.imageCache = Object.create(null);
|
this.imageCache = Object.create(null);
|
||||||
@ -748,7 +748,7 @@ function toHexDigit(ch) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
class Lexer {
|
class Lexer {
|
||||||
constructor(stream, knownCommands) {
|
constructor(stream, knownCommands = null) {
|
||||||
this.stream = stream;
|
this.stream = stream;
|
||||||
this.nextChar();
|
this.nextChar();
|
||||||
|
|
||||||
@ -1202,7 +1202,10 @@ class Linearization {
|
|||||||
throw new Error('Hint array in the linearization dictionary is invalid.');
|
throw new Error('Hint array in the linearization dictionary is invalid.');
|
||||||
}
|
}
|
||||||
|
|
||||||
const parser = new Parser(new Lexer(stream), false, null);
|
const parser = new Parser({
|
||||||
|
lexer: new Lexer(stream),
|
||||||
|
xref: null,
|
||||||
|
});
|
||||||
const obj1 = parser.getObj();
|
const obj1 = parser.getObj();
|
||||||
const obj2 = parser.getObj();
|
const obj2 = parser.getObj();
|
||||||
const obj3 = parser.getObj();
|
const obj3 = parser.getObj();
|
||||||
|
@ -409,8 +409,10 @@ describe('annotation', function() {
|
|||||||
'/URI (http://www.example.com/\\303\\274\\303\\266\\303\\244)\n' +
|
'/URI (http://www.example.com/\\303\\274\\303\\266\\303\\244)\n' +
|
||||||
'>>\n'
|
'>>\n'
|
||||||
);
|
);
|
||||||
const lexer = new Lexer(actionStream);
|
const parser = new Parser({
|
||||||
const parser = new Parser(lexer);
|
lexer: new Lexer(actionStream),
|
||||||
|
xref: null,
|
||||||
|
});
|
||||||
const actionDict = parser.getObj();
|
const actionDict = parser.getObj();
|
||||||
|
|
||||||
const annotationDict = new Dict();
|
const annotationDict = new Dict();
|
||||||
@ -1412,8 +1414,11 @@ describe('annotation', function() {
|
|||||||
'Test attachment' +
|
'Test attachment' +
|
||||||
'endstream\n'
|
'endstream\n'
|
||||||
);
|
);
|
||||||
const lexer = new Lexer(fileStream);
|
const parser = new Parser({
|
||||||
const parser = new Parser(lexer, /* allowStreams = */ true);
|
lexer: new Lexer(fileStream),
|
||||||
|
xref: null,
|
||||||
|
allowStreams: true,
|
||||||
|
});
|
||||||
|
|
||||||
const fileStreamRef = Ref.get(18, 0);
|
const fileStreamRef = Ref.get(18, 0);
|
||||||
const fileStreamDict = parser.getObj();
|
const fileStreamDict = parser.getObj();
|
||||||
|
@ -26,9 +26,12 @@ describe('parser', function() {
|
|||||||
const string = 'q 1 0 0 1 0 0 cm BI /W 10 /H 10 /BPC 1 ' +
|
const string = 'q 1 0 0 1 0 0 cm BI /W 10 /H 10 /BPC 1 ' +
|
||||||
'/F /A85 ID abc123~> EI Q';
|
'/F /A85 ID abc123~> EI Q';
|
||||||
const input = new StringStream(string);
|
const input = new StringStream(string);
|
||||||
const lexer = new Lexer(input);
|
const parser = new Parser({
|
||||||
const parser = new Parser(lexer, /* allowStreams = */ true,
|
lexer: new Lexer(input),
|
||||||
/* xref = */ null);
|
xref: null,
|
||||||
|
allowStreams: true,
|
||||||
|
});
|
||||||
|
|
||||||
parser.inlineStreamSkipEI(input);
|
parser.inlineStreamSkipEI(input);
|
||||||
expect(input.pos).toEqual(string.indexOf('Q'));
|
expect(input.pos).toEqual(string.indexOf('Q'));
|
||||||
expect(input.peekByte()).toEqual(0x51); // 'Q'
|
expect(input.peekByte()).toEqual(0x51); // 'Q'
|
||||||
@ -39,9 +42,12 @@ describe('parser', function() {
|
|||||||
const string = 'q 1 0 0 1 0 0 cm BI /W 10 /H 10 /BPC 1 ' +
|
const string = 'q 1 0 0 1 0 0 cm BI /W 10 /H 10 /BPC 1 ' +
|
||||||
'/F /A85 ID abc123~> Q';
|
'/F /A85 ID abc123~> Q';
|
||||||
const input = new StringStream(string);
|
const input = new StringStream(string);
|
||||||
const lexer = new Lexer(input);
|
const parser = new Parser({
|
||||||
const parser = new Parser(lexer, /* allowStreams = */ true,
|
lexer: new Lexer(input),
|
||||||
/* xref = */ null);
|
xref: null,
|
||||||
|
allowStreams: true,
|
||||||
|
});
|
||||||
|
|
||||||
parser.inlineStreamSkipEI(input);
|
parser.inlineStreamSkipEI(input);
|
||||||
expect(input.pos).toEqual(string.length);
|
expect(input.pos).toEqual(string.length);
|
||||||
expect(input.peekByte()).toEqual(-1);
|
expect(input.peekByte()).toEqual(-1);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user