Add an abstract base-class, which all the various Stream implementations inherit from
By having an abstract base-class, it becomes a lot clearer exactly which methods/getters are expected to exist on all Stream instances. Furthermore, since a number of the methods are *identical* for all Stream implementations, this reduces unnecessary code duplication in the `Stream`, `DecodeStream`, and `ChunkedStream` classes. For e.g. `gulp mozcentral`, the *built* `pdf.worker.js` files decreases from `1 619 329` to `1 616 115` bytes with this patch-series.
This commit is contained in:
parent
6151b4ecac
commit
67415bfabe
99
src/core/base_stream.js
Normal file
99
src/core/base_stream.js
Normal file
@ -0,0 +1,99 @@
|
||||
/* Copyright 2021 Mozilla Foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { shadow, unreachable } from "../shared/util.js";
|
||||
|
||||
class BaseStream {
|
||||
constructor() {
|
||||
if (this.constructor === BaseStream) {
|
||||
unreachable("Cannot initialize BaseStream.");
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line getter-return
|
||||
get length() {
|
||||
unreachable("Abstract getter `length` accessed");
|
||||
}
|
||||
|
||||
// eslint-disable-next-line getter-return
|
||||
get isEmpty() {
|
||||
unreachable("Abstract getter `isEmpty` accessed");
|
||||
}
|
||||
|
||||
get isDataLoaded() {
|
||||
return shadow(this, "isDataLoaded", true);
|
||||
}
|
||||
|
||||
getByte() {
|
||||
unreachable("Abstract method `getByte` called");
|
||||
}
|
||||
|
||||
getBytes(length, forceClamped = false) {
|
||||
unreachable("Abstract method `getBytes` called");
|
||||
}
|
||||
|
||||
peekByte() {
|
||||
const peekedByte = this.getByte();
|
||||
if (peekedByte !== -1) {
|
||||
this.pos--;
|
||||
}
|
||||
return peekedByte;
|
||||
}
|
||||
|
||||
peekBytes(length, forceClamped = false) {
|
||||
const bytes = this.getBytes(length, forceClamped);
|
||||
this.pos -= bytes.length;
|
||||
return bytes;
|
||||
}
|
||||
|
||||
getUint16() {
|
||||
const b0 = this.getByte();
|
||||
const b1 = this.getByte();
|
||||
if (b0 === -1 || b1 === -1) {
|
||||
return -1;
|
||||
}
|
||||
return (b0 << 8) + b1;
|
||||
}
|
||||
|
||||
getInt32() {
|
||||
const b0 = this.getByte();
|
||||
const b1 = this.getByte();
|
||||
const b2 = this.getByte();
|
||||
const b3 = this.getByte();
|
||||
return (b0 << 24) + (b1 << 16) + (b2 << 8) + b3;
|
||||
}
|
||||
|
||||
getByteRange(begin, end) {
|
||||
unreachable("Abstract method `getByteRange` called");
|
||||
}
|
||||
|
||||
skip(n) {
|
||||
this.pos += n || 1;
|
||||
}
|
||||
|
||||
reset() {
|
||||
unreachable("Abstract method `reset` called");
|
||||
}
|
||||
|
||||
moveStart() {
|
||||
unreachable("Abstract method `moveStart` called");
|
||||
}
|
||||
|
||||
makeSubStream(start, length, dict = null) {
|
||||
unreachable("Abstract method `makeSubStream` called");
|
||||
}
|
||||
}
|
||||
|
||||
export { BaseStream };
|
@ -18,10 +18,13 @@ import {
|
||||
arraysToBytes,
|
||||
createPromiseCapability,
|
||||
} from "../shared/util.js";
|
||||
import { BaseStream } from "./base_stream.js";
|
||||
import { MissingDataException } from "./core_utils.js";
|
||||
|
||||
class ChunkedStream {
|
||||
class ChunkedStream extends BaseStream {
|
||||
constructor(length, chunkSize, manager) {
|
||||
super();
|
||||
|
||||
this.bytes = new Uint8Array(length);
|
||||
this.start = 0;
|
||||
this.pos = 0;
|
||||
@ -46,15 +49,11 @@ class ChunkedStream {
|
||||
return chunks;
|
||||
}
|
||||
|
||||
getBaseStreams() {
|
||||
return [this];
|
||||
}
|
||||
|
||||
get numChunksLoaded() {
|
||||
return this._loadedChunks.size;
|
||||
}
|
||||
|
||||
allChunksLoaded() {
|
||||
get isDataLoaded() {
|
||||
return this.numChunksLoaded === this.numChunks;
|
||||
}
|
||||
|
||||
@ -169,24 +168,6 @@ class ChunkedStream {
|
||||
return this.bytes[this.pos++];
|
||||
}
|
||||
|
||||
getUint16() {
|
||||
const b0 = this.getByte();
|
||||
const b1 = this.getByte();
|
||||
if (b0 === -1 || b1 === -1) {
|
||||
return -1;
|
||||
}
|
||||
return (b0 << 8) + b1;
|
||||
}
|
||||
|
||||
getInt32() {
|
||||
const b0 = this.getByte();
|
||||
const b1 = this.getByte();
|
||||
const b2 = this.getByte();
|
||||
const b3 = this.getByte();
|
||||
return (b0 << 24) + (b1 << 16) + (b2 << 8) + b3;
|
||||
}
|
||||
|
||||
// Returns subarray of original buffer, should only be read.
|
||||
getBytes(length, forceClamped = false) {
|
||||
const bytes = this.bytes;
|
||||
const pos = this.pos;
|
||||
@ -215,20 +196,6 @@ class ChunkedStream {
|
||||
return forceClamped ? new Uint8ClampedArray(subarray) : subarray;
|
||||
}
|
||||
|
||||
peekByte() {
|
||||
const peekedByte = this.getByte();
|
||||
if (peekedByte !== -1) {
|
||||
this.pos--;
|
||||
}
|
||||
return peekedByte;
|
||||
}
|
||||
|
||||
peekBytes(length, forceClamped = false) {
|
||||
const bytes = this.getBytes(length, forceClamped);
|
||||
this.pos -= bytes.length;
|
||||
return bytes;
|
||||
}
|
||||
|
||||
getByteRange(begin, end) {
|
||||
if (begin < 0) {
|
||||
begin = 0;
|
||||
@ -242,13 +209,6 @@ class ChunkedStream {
|
||||
return this.bytes.subarray(begin, end);
|
||||
}
|
||||
|
||||
skip(n) {
|
||||
if (!n) {
|
||||
n = 1;
|
||||
}
|
||||
this.pos += n;
|
||||
}
|
||||
|
||||
reset() {
|
||||
this.pos = this.start;
|
||||
}
|
||||
@ -257,7 +217,7 @@ class ChunkedStream {
|
||||
this.start = this.pos;
|
||||
}
|
||||
|
||||
makeSubStream(start, length, dict) {
|
||||
makeSubStream(start, length, dict = null) {
|
||||
if (length) {
|
||||
if (start + length > this.progressiveDataLength) {
|
||||
this.ensureRange(start, start + length);
|
||||
@ -291,12 +251,15 @@ class ChunkedStream {
|
||||
}
|
||||
return missingChunks;
|
||||
};
|
||||
ChunkedStreamSubstream.prototype.allChunksLoaded = function () {
|
||||
if (this.numChunksLoaded === this.numChunks) {
|
||||
return true;
|
||||
}
|
||||
return this.getMissingChunks().length === 0;
|
||||
};
|
||||
Object.defineProperty(ChunkedStreamSubstream.prototype, "isDataLoaded", {
|
||||
get() {
|
||||
if (this.numChunksLoaded === this.numChunks) {
|
||||
return true;
|
||||
}
|
||||
return this.getMissingChunks().length === 0;
|
||||
},
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
const subStream = new ChunkedStreamSubstream();
|
||||
subStream.pos = subStream.start = start;
|
||||
@ -304,6 +267,10 @@ class ChunkedStream {
|
||||
subStream.dict = dict;
|
||||
return subStream;
|
||||
}
|
||||
|
||||
getBaseStreams() {
|
||||
return [this];
|
||||
}
|
||||
}
|
||||
|
||||
class ChunkedStreamManager {
|
||||
@ -521,7 +488,7 @@ class ChunkedStreamManager {
|
||||
this.stream.onReceiveData(begin, chunk);
|
||||
}
|
||||
|
||||
if (this.stream.allChunksLoaded()) {
|
||||
if (this.stream.isDataLoaded) {
|
||||
this._loadedStreamCapability.resolve(this.stream);
|
||||
}
|
||||
|
||||
|
@ -13,8 +13,8 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { BaseStream } from "./base_stream.js";
|
||||
import { Stream } from "./stream.js";
|
||||
import { unreachable } from "../shared/util.js";
|
||||
|
||||
// Lots of DecodeStreams are created whose buffers are never used. For these
|
||||
// we share a single empty buffer. This is (a) space-efficient and (b) avoids
|
||||
@ -23,8 +23,9 @@ import { unreachable } from "../shared/util.js";
|
||||
const emptyBuffer = new Uint8Array(0);
|
||||
|
||||
// Super class for the decoding streams.
|
||||
class DecodeStream {
|
||||
class DecodeStream extends BaseStream {
|
||||
constructor(maybeMinBufferLength) {
|
||||
super();
|
||||
this._rawMinBufferLength = maybeMinBufferLength || 0;
|
||||
|
||||
this.pos = 0;
|
||||
@ -40,11 +41,6 @@ class DecodeStream {
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line getter-return
|
||||
get length() {
|
||||
unreachable("Should not access DecodeStream.length");
|
||||
}
|
||||
|
||||
get isEmpty() {
|
||||
while (!this.eof && this.bufferLength === 0) {
|
||||
this.readBlock();
|
||||
@ -77,23 +73,6 @@ class DecodeStream {
|
||||
return this.buffer[this.pos++];
|
||||
}
|
||||
|
||||
getUint16() {
|
||||
const b0 = this.getByte();
|
||||
const b1 = this.getByte();
|
||||
if (b0 === -1 || b1 === -1) {
|
||||
return -1;
|
||||
}
|
||||
return (b0 << 8) + b1;
|
||||
}
|
||||
|
||||
getInt32() {
|
||||
const b0 = this.getByte();
|
||||
const b1 = this.getByte();
|
||||
const b2 = this.getByte();
|
||||
const b3 = this.getByte();
|
||||
return (b0 << 24) + (b1 << 16) + (b2 << 8) + b3;
|
||||
}
|
||||
|
||||
getBytes(length, forceClamped = false) {
|
||||
const pos = this.pos;
|
||||
let end;
|
||||
@ -124,18 +103,8 @@ class DecodeStream {
|
||||
: subarray;
|
||||
}
|
||||
|
||||
peekByte() {
|
||||
const peekedByte = this.getByte();
|
||||
if (peekedByte !== -1) {
|
||||
this.pos--;
|
||||
}
|
||||
return peekedByte;
|
||||
}
|
||||
|
||||
peekBytes(length, forceClamped = false) {
|
||||
const bytes = this.getBytes(length, forceClamped);
|
||||
this.pos -= bytes.length;
|
||||
return bytes;
|
||||
reset() {
|
||||
this.pos = 0;
|
||||
}
|
||||
|
||||
makeSubStream(start, length, dict = null) {
|
||||
@ -152,21 +121,6 @@ class DecodeStream {
|
||||
return new Stream(this.buffer, start, length, dict);
|
||||
}
|
||||
|
||||
getByteRange(begin, end) {
|
||||
unreachable("Should not call DecodeStream.getByteRange");
|
||||
}
|
||||
|
||||
skip(n) {
|
||||
if (!n) {
|
||||
n = 1;
|
||||
}
|
||||
this.pos += n;
|
||||
}
|
||||
|
||||
reset() {
|
||||
this.pos = 0;
|
||||
}
|
||||
|
||||
getBaseStreams() {
|
||||
if (this.str && this.str.getBaseStreams) {
|
||||
return this.str.getBaseStreams();
|
||||
|
@ -61,12 +61,8 @@ class ObjectLoader {
|
||||
}
|
||||
|
||||
async load() {
|
||||
// Don't walk the graph if all the data is already loaded; note that only
|
||||
// `ChunkedStream` instances have a `allChunksLoaded` method.
|
||||
if (
|
||||
!this.xref.stream.allChunksLoaded ||
|
||||
this.xref.stream.allChunksLoaded()
|
||||
) {
|
||||
// Don't walk the graph if all the data is already loaded.
|
||||
if (this.xref.stream.isDataLoaded) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@ -115,12 +111,12 @@ class ObjectLoader {
|
||||
if (currentNode && currentNode.getBaseStreams) {
|
||||
const baseStreams = currentNode.getBaseStreams();
|
||||
let foundMissingData = false;
|
||||
for (let i = 0, ii = baseStreams.length; i < ii; i++) {
|
||||
const stream = baseStreams[i];
|
||||
if (stream.allChunksLoaded && !stream.allChunksLoaded()) {
|
||||
foundMissingData = true;
|
||||
pendingRequests.push({ begin: stream.start, end: stream.end });
|
||||
for (const stream of baseStreams) {
|
||||
if (stream.isDataLoaded) {
|
||||
continue;
|
||||
}
|
||||
foundMissingData = true;
|
||||
pendingRequests.push({ begin: stream.start, end: stream.end });
|
||||
}
|
||||
if (foundMissingData) {
|
||||
nodesToRevisit.push(currentNode);
|
||||
@ -133,8 +129,7 @@ class ObjectLoader {
|
||||
if (pendingRequests.length) {
|
||||
await this.xref.stream.manager.requestRanges(pendingRequests);
|
||||
|
||||
for (let i = 0, ii = nodesToRevisit.length; i < ii; i++) {
|
||||
const node = nodesToRevisit[i];
|
||||
for (const node of nodesToRevisit) {
|
||||
// Remove any reference nodes from the current `RefSet` so they
|
||||
// aren't skipped when we revist them.
|
||||
if (node instanceof Ref) {
|
||||
|
@ -14,6 +14,7 @@
|
||||
*/
|
||||
|
||||
import { assert, unreachable } from "../shared/util.js";
|
||||
import { BaseStream } from "./base_stream.js";
|
||||
|
||||
const EOF = {};
|
||||
|
||||
@ -383,7 +384,7 @@ function isRefsEqual(v1, v2) {
|
||||
}
|
||||
|
||||
function isStream(v) {
|
||||
return typeof v === "object" && v !== null && v.getBytes !== undefined;
|
||||
return v instanceof BaseStream;
|
||||
}
|
||||
|
||||
function clearPrimitiveCaches() {
|
||||
|
@ -13,10 +13,13 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { BaseStream } from "./base_stream.js";
|
||||
import { stringToBytes } from "../shared/util.js";
|
||||
|
||||
class Stream {
|
||||
class Stream extends BaseStream {
|
||||
constructor(arrayBuffer, start, length, dict) {
|
||||
super();
|
||||
|
||||
this.bytes =
|
||||
arrayBuffer instanceof Uint8Array
|
||||
? arrayBuffer
|
||||
@ -42,24 +45,6 @@ class Stream {
|
||||
return this.bytes[this.pos++];
|
||||
}
|
||||
|
||||
getUint16() {
|
||||
const b0 = this.getByte();
|
||||
const b1 = this.getByte();
|
||||
if (b0 === -1 || b1 === -1) {
|
||||
return -1;
|
||||
}
|
||||
return (b0 << 8) + b1;
|
||||
}
|
||||
|
||||
getInt32() {
|
||||
const b0 = this.getByte();
|
||||
const b1 = this.getByte();
|
||||
const b2 = this.getByte();
|
||||
const b3 = this.getByte();
|
||||
return (b0 << 24) + (b1 << 16) + (b2 << 8) + b3;
|
||||
}
|
||||
|
||||
// Returns subarray of original buffer, should only be read.
|
||||
getBytes(length, forceClamped = false) {
|
||||
const bytes = this.bytes;
|
||||
const pos = this.pos;
|
||||
@ -80,20 +65,6 @@ class Stream {
|
||||
return forceClamped ? new Uint8ClampedArray(subarray) : subarray;
|
||||
}
|
||||
|
||||
peekByte() {
|
||||
const peekedByte = this.getByte();
|
||||
if (peekedByte !== -1) {
|
||||
this.pos--;
|
||||
}
|
||||
return peekedByte;
|
||||
}
|
||||
|
||||
peekBytes(length, forceClamped = false) {
|
||||
const bytes = this.getBytes(length, forceClamped);
|
||||
this.pos -= bytes.length;
|
||||
return bytes;
|
||||
}
|
||||
|
||||
getByteRange(begin, end) {
|
||||
if (begin < 0) {
|
||||
begin = 0;
|
||||
@ -104,13 +75,6 @@ class Stream {
|
||||
return this.bytes.subarray(begin, end);
|
||||
}
|
||||
|
||||
skip(n) {
|
||||
if (!n) {
|
||||
n = 1;
|
||||
}
|
||||
this.pos += n;
|
||||
}
|
||||
|
||||
reset() {
|
||||
this.pos = this.start;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user