2012-09-01 07:48:21 +09:00
|
|
|
/* Copyright 2012 Mozilla Foundation
|
|
|
|
*
|
|
|
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
* you may not use this file except in compliance with the License.
|
|
|
|
* You may obtain a copy of the License at
|
|
|
|
*
|
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
*
|
|
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
* See the License for the specific language governing permissions and
|
|
|
|
* limitations under the License.
|
|
|
|
*/
|
2017-07-09 22:19:16 +09:00
|
|
|
/* globals requirejs, __non_webpack_require__ */
|
2018-11-09 00:24:20 +09:00
|
|
|
/* eslint no-var: error */
|
2012-04-10 14:20:57 +09:00
|
|
|
|
2017-04-02 21:25:33 +09:00
|
|
|
import {
|
2019-08-01 23:31:32 +09:00
|
|
|
AbortException, assert, createPromiseCapability, getVerbosityLevel, info,
|
|
|
|
InvalidPDFException, isArrayBuffer, isSameOrigin, MissingPDFException,
|
|
|
|
NativeImageDecoding, PasswordException, setVerbosityLevel, shadow,
|
|
|
|
stringToBytes, UnexpectedResponseException, UnknownErrorException,
|
|
|
|
unreachable, URL, warn
|
2017-04-02 21:25:33 +09:00
|
|
|
} from '../shared/util';
|
|
|
|
import {
|
2019-03-02 19:45:14 +09:00
|
|
|
deprecated, DOMCanvasFactory, DOMCMapReaderFactory, DummyStatTimer,
|
2019-03-14 22:01:55 +09:00
|
|
|
loadScript, PageViewport, releaseImageResources, RenderingCancelledException,
|
|
|
|
StatTimer
|
2019-02-23 23:41:02 +09:00
|
|
|
} from './display_utils';
|
2017-04-02 21:25:33 +09:00
|
|
|
import { FontFaceObject, FontLoader } from './font_loader';
|
2018-02-18 06:22:10 +09:00
|
|
|
import { apiCompatibilityParams } from './api_compatibility';
|
2017-04-02 21:25:33 +09:00
|
|
|
import { CanvasGraphics } from './canvas';
|
2017-08-23 07:06:11 +09:00
|
|
|
import globalScope from '../shared/global_scope';
|
2018-02-14 22:25:47 +09:00
|
|
|
import { GlobalWorkerOptions } from './worker_options';
|
2018-06-04 19:37:54 +09:00
|
|
|
import { MessageHandler } from '../shared/message_handler';
|
2017-04-02 21:25:33 +09:00
|
|
|
import { Metadata } from './metadata';
|
2017-07-01 02:59:52 +09:00
|
|
|
import { PDFDataTransportStream } from './transport_stream';
|
2017-11-02 00:32:22 +09:00
|
|
|
import { WebGLContext } from './webgl';
|
2015-11-22 01:32:47 +09:00
|
|
|
|
2018-10-07 21:28:16 +09:00
|
|
|
const DEFAULT_RANGE_CHUNK_SIZE = 65536; // 2^16 = 65536
|
2015-10-22 08:56:27 +09:00
|
|
|
|
2018-02-14 22:49:24 +09:00
|
|
|
let isWorkerDisabled = false;
|
2018-10-07 21:28:16 +09:00
|
|
|
let fallbackWorkerSrc;
|
2017-02-09 07:32:15 +09:00
|
|
|
|
2018-10-05 20:54:16 +09:00
|
|
|
let fakeWorkerFilesLoader = null;
|
2018-01-18 02:20:00 +09:00
|
|
|
if (typeof PDFJSDev !== 'undefined' && PDFJSDev.test('GENERIC')) {
|
2018-10-05 20:54:16 +09:00
|
|
|
let useRequireEnsure = false;
|
2018-02-14 22:49:24 +09:00
|
|
|
// For GENERIC build we need to add support for different fake file loaders
|
|
|
|
// for different frameworks.
|
2016-10-15 00:57:53 +09:00
|
|
|
if (typeof window === 'undefined') {
|
|
|
|
// node.js - disable worker and set require.ensure.
|
|
|
|
isWorkerDisabled = true;
|
2017-07-09 22:19:16 +09:00
|
|
|
if (typeof __non_webpack_require__.ensure === 'undefined') {
|
|
|
|
__non_webpack_require__.ensure = __non_webpack_require__('node-ensure');
|
2016-10-15 00:57:53 +09:00
|
|
|
}
|
|
|
|
useRequireEnsure = true;
|
2017-07-09 22:19:16 +09:00
|
|
|
} else if (typeof __non_webpack_require__ !== 'undefined' &&
|
|
|
|
typeof __non_webpack_require__.ensure === 'function') {
|
2016-10-15 00:57:53 +09:00
|
|
|
useRequireEnsure = true;
|
|
|
|
}
|
|
|
|
if (typeof requirejs !== 'undefined' && requirejs.toUrl) {
|
2018-10-07 21:28:16 +09:00
|
|
|
fallbackWorkerSrc = requirejs.toUrl('pdfjs-dist/build/pdf.worker.js');
|
2016-10-15 00:57:53 +09:00
|
|
|
}
|
2018-10-05 20:54:16 +09:00
|
|
|
const dynamicLoaderSupported =
|
2016-10-15 00:57:53 +09:00
|
|
|
typeof requirejs !== 'undefined' && requirejs.load;
|
2018-10-05 20:54:16 +09:00
|
|
|
fakeWorkerFilesLoader = useRequireEnsure ? (function() {
|
|
|
|
return new Promise(function(resolve, reject) {
|
|
|
|
__non_webpack_require__.ensure([], function() {
|
|
|
|
try {
|
|
|
|
let worker;
|
|
|
|
if (typeof PDFJSDev !== 'undefined' && PDFJSDev.test('LIB')) {
|
|
|
|
worker = __non_webpack_require__('../pdf.worker.js');
|
|
|
|
} else {
|
|
|
|
worker = __non_webpack_require__('./pdf.worker.js');
|
|
|
|
}
|
|
|
|
resolve(worker.WorkerMessageHandler);
|
|
|
|
} catch (ex) {
|
|
|
|
reject(ex);
|
|
|
|
}
|
|
|
|
}, reject, 'pdfjsWorker');
|
|
|
|
});
|
|
|
|
}) : dynamicLoaderSupported ? (function() {
|
|
|
|
return new Promise(function(resolve, reject) {
|
|
|
|
requirejs(['pdfjs-dist/build/pdf.worker'], function(worker) {
|
|
|
|
try {
|
|
|
|
resolve(worker.WorkerMessageHandler);
|
|
|
|
} catch (ex) {
|
|
|
|
reject(ex);
|
|
|
|
}
|
|
|
|
}, reject);
|
2016-10-15 00:57:53 +09:00
|
|
|
});
|
|
|
|
}) : null;
|
2018-10-07 21:28:16 +09:00
|
|
|
|
2019-02-03 22:09:24 +09:00
|
|
|
if (!fallbackWorkerSrc && typeof document === 'object' &&
|
|
|
|
'currentScript' in document) {
|
2018-10-07 21:28:16 +09:00
|
|
|
const pdfjsFilePath = document.currentScript && document.currentScript.src;
|
|
|
|
if (pdfjsFilePath) {
|
|
|
|
fallbackWorkerSrc =
|
|
|
|
pdfjsFilePath.replace(/(\.(?:min\.)?js)(\?.*)?$/i, '.worker$1$2');
|
|
|
|
}
|
|
|
|
}
|
2016-10-15 00:57:53 +09:00
|
|
|
}
|
2015-12-22 04:46:50 +09:00
|
|
|
|
2018-01-14 08:34:46 +09:00
|
|
|
/**
|
|
|
|
* @typedef {function} IPDFStreamFactory
|
|
|
|
* @param {DocumentInitParameters} params The document initialization
|
|
|
|
* parameters. The "url" key is always present.
|
|
|
|
* @return {IPDFStream}
|
|
|
|
*/
|
|
|
|
|
|
|
|
/** @type IPDFStreamFactory */
|
2018-11-09 00:24:20 +09:00
|
|
|
let createPDFNetworkStream;
|
2017-07-01 02:59:52 +09:00
|
|
|
|
|
|
|
/**
|
2018-01-14 08:34:46 +09:00
|
|
|
* Sets the function that instantiates a IPDFStream as an alternative PDF data
|
|
|
|
* transport.
|
|
|
|
* @param {IPDFStreamFactory} pdfNetworkStreamFactory - the factory function
|
|
|
|
* that takes document initialization parameters (including a "url") and returns
|
|
|
|
* an instance of IPDFStream.
|
2017-07-01 02:59:52 +09:00
|
|
|
*/
|
2018-01-14 08:34:46 +09:00
|
|
|
function setPDFNetworkStreamFactory(pdfNetworkStreamFactory) {
|
|
|
|
createPDFNetworkStream = pdfNetworkStreamFactory;
|
2017-07-01 02:59:52 +09:00
|
|
|
}
|
|
|
|
|
2014-01-22 04:28:18 +09:00
|
|
|
/**
|
|
|
|
* Document initialization / loading parameters object.
|
|
|
|
*
|
|
|
|
* @typedef {Object} DocumentInitParameters
|
|
|
|
* @property {string} url - The URL of the PDF.
|
2015-01-13 05:52:52 +09:00
|
|
|
* @property {TypedArray|Array|string} data - Binary PDF data. Use typed arrays
|
|
|
|
* (Uint8Array) to improve the memory usage. If PDF data is BASE64-encoded,
|
|
|
|
* use atob() to convert it to a binary string first.
|
2014-01-22 04:28:18 +09:00
|
|
|
* @property {Object} httpHeaders - Basic authentication headers.
|
|
|
|
* @property {boolean} withCredentials - Indicates whether or not cross-site
|
|
|
|
* Access-Control requests should be made using credentials such as cookies
|
|
|
|
* or authorization headers. The default is false.
|
|
|
|
* @property {string} password - For decrypting password-protected PDFs.
|
|
|
|
* @property {TypedArray} initialData - A typed array with the first portion or
|
|
|
|
* all of the pdf data. Used by the extension since some data is already
|
|
|
|
* loaded before the switch to range requests.
|
2015-01-06 12:45:01 +09:00
|
|
|
* @property {number} length - The PDF file length. It's used for progress
|
|
|
|
* reports and range requests operations.
|
|
|
|
* @property {PDFDataRangeTransport} range
|
2015-10-22 08:56:27 +09:00
|
|
|
* @property {number} rangeChunkSize - Optional parameter to specify
|
|
|
|
* maximum number of bytes fetched per range request. The default value is
|
|
|
|
* 2^16 = 65536.
|
2018-02-15 00:03:54 +09:00
|
|
|
* @property {PDFWorker} worker - (optional) The worker that will be used for
|
|
|
|
* the loading and parsing of the PDF data.
|
2018-02-15 01:35:08 +09:00
|
|
|
* @property {number} verbosity - (optional) Controls the logging level; the
|
|
|
|
* constants from {VerbosityLevel} should be used.
|
2016-10-01 19:05:07 +09:00
|
|
|
* @property {string} docBaseUrl - (optional) The base URL of the document,
|
|
|
|
* used when attempting to recover valid absolute URLs for annotations, and
|
|
|
|
* outline items, that (incorrectly) only specify relative URLs.
|
2017-05-08 12:32:44 +09:00
|
|
|
* @property {string} nativeImageDecoderSupport - (optional) Strategy for
|
|
|
|
* decoding certain (simple) JPEG images in the browser. This is useful for
|
|
|
|
* environments without DOM image and canvas support, such as e.g. Node.js.
|
|
|
|
* Valid values are 'decode', 'display' or 'none'; where 'decode' is intended
|
|
|
|
* for browsers with full image/canvas support, 'display' for environments
|
|
|
|
* with limited image support through stubs (useful for SVG conversion),
|
|
|
|
* and 'none' where JPEG images will be decoded entirely by PDF.js.
|
|
|
|
* The default value is 'decode'.
|
2018-02-18 00:57:24 +09:00
|
|
|
* @property {string} cMapUrl - (optional) The URL where the predefined
|
|
|
|
* Adobe CMaps are located. Include trailing slash.
|
|
|
|
* @property {boolean} cMapPacked - (optional) Specifies if the Adobe CMaps are
|
|
|
|
* binary packed.
|
2017-02-12 23:54:41 +09:00
|
|
|
* @property {Object} CMapReaderFactory - (optional) The factory that will be
|
|
|
|
* used when reading built-in CMap files. Providing a custom factory is useful
|
|
|
|
* for environments without `XMLHttpRequest` support, such as e.g. Node.js.
|
|
|
|
* The default value is {DOMCMapReaderFactory}.
|
[api-minor] Always allow e.g. rendering to continue even if there are errors, and add a `stopAtErrors` parameter to `getDocument` to opt-out of this behaviour (issue 6342, issue 3795, bug 1130815)
Other PDF readers, e.g. Adobe Reader and PDFium (in Chrome), will attempt to render as much of a page as possible even if there are errors present.
Currently we just bail as soon the first error is hit, which means that we'll usually not render anything in these cases and just display a blank page instead.
NOTE: This patch changes the default behaviour of the PDF.js API to always attempt to recover as much data as possible, even when encountering errors during e.g. `getOperatorList`/`getTextContent`, which thus improve our handling of corrupt PDF files and allow the default viewer to handle errors slightly more gracefully.
In the event that an API consumer wishes to use the old behaviour, where we stop parsing as soon as an error is encountered, the `stopAtErrors` parameter can be set at `getDocument`.
Fixes, inasmuch it's possible since the PDF files are corrupt, e.g. issue 6342, issue 3795, and [bug 1130815](https://bugzilla.mozilla.org/show_bug.cgi?id=1130815) (and probably others too).
2017-02-19 22:03:08 +09:00
|
|
|
* @property {boolean} stopAtErrors - (optional) Reject certain promises, e.g.
|
|
|
|
* `getOperatorList`, `getTextContent`, and `RenderTask`, when the associated
|
|
|
|
* PDF data cannot be successfully parsed, instead of attempting to recover
|
|
|
|
* whatever possible of the data. The default value is `false`.
|
2018-02-18 00:13:33 +09:00
|
|
|
* @property {number} maxImageSize - (optional) The maximum allowed image size
|
|
|
|
* in total pixels, i.e. width * height. Images above this value will not be
|
|
|
|
* rendered. Use -1 for no limit, which is also the default value.
|
2018-02-18 05:49:14 +09:00
|
|
|
* @property {boolean} isEvalSupported - (optional) Determines if we can eval
|
|
|
|
* strings as JS. Primarily used to improve performance of font rendering,
|
|
|
|
* and when parsing PDF functions. The default value is `true`.
|
2018-02-18 05:57:20 +09:00
|
|
|
* @property {boolean} disableFontFace - (optional) By default fonts are
|
|
|
|
* converted to OpenType fonts and loaded via font face rules. If disabled,
|
|
|
|
* fonts will be rendered using a built-in font renderer that constructs the
|
|
|
|
* glyphs with primitive path commands. The default value is `false`.
|
2018-02-18 06:22:10 +09:00
|
|
|
* @property {boolean} disableRange - (optional) Disable range request loading
|
|
|
|
* of PDF files. When enabled, and if the server supports partial content
|
|
|
|
* requests, then the PDF will be fetched in chunks.
|
|
|
|
* The default value is `false`.
|
2018-02-18 06:28:08 +09:00
|
|
|
* @property {boolean} disableStream - (optional) Disable streaming of PDF file
|
|
|
|
* data. By default PDF.js attempts to load PDFs in chunks.
|
|
|
|
* The default value is `false`.
|
2018-02-18 06:08:45 +09:00
|
|
|
* @property {boolean} disableAutoFetch - (optional) Disable pre-fetching of PDF
|
|
|
|
* file data. When range requests are enabled PDF.js will automatically keep
|
|
|
|
* fetching more data even if it isn't needed to display the current page.
|
|
|
|
* The default value is `false`.
|
|
|
|
* NOTE: It is also necessary to disable streaming, see above,
|
|
|
|
* in order for disabling of pre-fetching to work correctly.
|
2018-02-18 06:51:03 +09:00
|
|
|
* @property {boolean} disableCreateObjectURL - (optional) Disable the use of
|
|
|
|
* `URL.createObjectURL`, for compatibility with older browsers.
|
|
|
|
* The default value is `false`.
|
2018-02-18 07:13:49 +09:00
|
|
|
* @property {boolean} pdfBug - (optional) Enables special hooks for debugging
|
|
|
|
* PDF.js (see `web/debugger.js`). The default value is `false`.
|
2014-01-22 04:28:18 +09:00
|
|
|
*/
|
|
|
|
|
2014-06-16 23:52:04 +09:00
|
|
|
/**
|
|
|
|
* @typedef {Object} PDFDocumentStats
|
2019-08-02 20:55:37 +09:00
|
|
|
* @property {Object} streamTypes - Used stream types in the document (an item
|
2014-06-16 23:52:04 +09:00
|
|
|
* is set to true if specific stream ID was used in the document).
|
2019-08-02 20:55:37 +09:00
|
|
|
* @property {Object} fontTypes - Used font types in the document (an item
|
|
|
|
* is set to true if specific font ID was used in the document).
|
2014-06-16 23:52:04 +09:00
|
|
|
*/
|
|
|
|
|
2012-04-14 01:25:08 +09:00
|
|
|
/**
|
|
|
|
* This is the main entry point for loading a PDF and interacting with it.
|
|
|
|
* NOTE: If a URL is used to fetch the PDF data a standard XMLHttpRequest(XHR)
|
|
|
|
* is used, which means it must follow the same origin rules that any XHR does
|
|
|
|
* e.g. No cross domain requests without CORS.
|
|
|
|
*
|
2015-01-06 12:45:01 +09:00
|
|
|
* @param {string|TypedArray|DocumentInitParameters|PDFDataRangeTransport} src
|
|
|
|
* Can be a url to where a PDF is located, a typed array (Uint8Array)
|
|
|
|
* already populated with data or parameter object.
|
2012-05-15 03:45:07 +09:00
|
|
|
*
|
2015-01-06 12:45:01 +09:00
|
|
|
* @return {PDFDocumentLoadingTask}
|
2012-04-14 01:25:08 +09:00
|
|
|
*/
|
2017-10-03 06:05:32 +09:00
|
|
|
function getDocument(src) {
|
2018-11-09 00:24:20 +09:00
|
|
|
const task = new PDFDocumentLoadingTask();
|
2015-01-06 12:45:01 +09:00
|
|
|
|
2018-11-09 00:24:20 +09:00
|
|
|
let source;
|
2015-01-06 12:45:01 +09:00
|
|
|
if (typeof src === 'string') {
|
Fix inconsistent spacing and trailing commas in objects in remaining `src/` files, so we can enable the `comma-dangle` and `object-curly-spacing` ESLint rules later on
http://eslint.org/docs/rules/comma-dangle
http://eslint.org/docs/rules/object-curly-spacing
Given that we currently have quite inconsistent object formatting, fixing this in *one* big patch probably wouldn't be feasible (since I cannot imagine anyone wanting to review that); hence I've opted to try and do this piecewise instead.
Please note: This patch was created automatically, using the ESLint `--fix` command line option. In a couple of places this caused lines to become too long, and I've fixed those manually; please refer to the interdiff below for the only hand-edits in this patch.
```diff
diff --git a/src/display/canvas.js b/src/display/canvas.js
index 5739f6f2..4216b2d2 100644
--- a/src/display/canvas.js
+++ b/src/display/canvas.js
@@ -2071,7 +2071,7 @@ var CanvasGraphics = (function CanvasGraphicsClosure() {
var map = [];
for (var i = 0, ii = positions.length; i < ii; i += 2) {
map.push({ transform: [scaleX, 0, 0, scaleY, positions[i],
- positions[i + 1]], x: 0, y: 0, w: width, h: height, });
+ positions[i + 1]], x: 0, y: 0, w: width, h: height, });
}
this.paintInlineImageXObjectGroup(imgData, map);
},
diff --git a/src/display/svg.js b/src/display/svg.js
index 9eb05dfa..2aa21482 100644
--- a/src/display/svg.js
+++ b/src/display/svg.js
@@ -458,7 +458,11 @@ SVGGraphics = (function SVGGraphicsClosure() {
for (var x = 0; x < fnArrayLen; x++) {
var fnId = fnArray[x];
- opList.push({ 'fnId': fnId, 'fn': REVOPS[fnId], 'args': argsArray[x], });
+ opList.push({
+ 'fnId': fnId,
+ 'fn': REVOPS[fnId],
+ 'args': argsArray[x],
+ });
}
return opListToTree(opList);
},
```
2017-06-02 18:26:37 +09:00
|
|
|
source = { url: src, };
|
2015-01-06 12:45:01 +09:00
|
|
|
} else if (isArrayBuffer(src)) {
|
Fix inconsistent spacing and trailing commas in objects in remaining `src/` files, so we can enable the `comma-dangle` and `object-curly-spacing` ESLint rules later on
http://eslint.org/docs/rules/comma-dangle
http://eslint.org/docs/rules/object-curly-spacing
Given that we currently have quite inconsistent object formatting, fixing this in *one* big patch probably wouldn't be feasible (since I cannot imagine anyone wanting to review that); hence I've opted to try and do this piecewise instead.
Please note: This patch was created automatically, using the ESLint `--fix` command line option. In a couple of places this caused lines to become too long, and I've fixed those manually; please refer to the interdiff below for the only hand-edits in this patch.
```diff
diff --git a/src/display/canvas.js b/src/display/canvas.js
index 5739f6f2..4216b2d2 100644
--- a/src/display/canvas.js
+++ b/src/display/canvas.js
@@ -2071,7 +2071,7 @@ var CanvasGraphics = (function CanvasGraphicsClosure() {
var map = [];
for (var i = 0, ii = positions.length; i < ii; i += 2) {
map.push({ transform: [scaleX, 0, 0, scaleY, positions[i],
- positions[i + 1]], x: 0, y: 0, w: width, h: height, });
+ positions[i + 1]], x: 0, y: 0, w: width, h: height, });
}
this.paintInlineImageXObjectGroup(imgData, map);
},
diff --git a/src/display/svg.js b/src/display/svg.js
index 9eb05dfa..2aa21482 100644
--- a/src/display/svg.js
+++ b/src/display/svg.js
@@ -458,7 +458,11 @@ SVGGraphics = (function SVGGraphicsClosure() {
for (var x = 0; x < fnArrayLen; x++) {
var fnId = fnArray[x];
- opList.push({ 'fnId': fnId, 'fn': REVOPS[fnId], 'args': argsArray[x], });
+ opList.push({
+ 'fnId': fnId,
+ 'fn': REVOPS[fnId],
+ 'args': argsArray[x],
+ });
}
return opListToTree(opList);
},
```
2017-06-02 18:26:37 +09:00
|
|
|
source = { data: src, };
|
2015-01-06 12:45:01 +09:00
|
|
|
} else if (src instanceof PDFDataRangeTransport) {
|
Fix inconsistent spacing and trailing commas in objects in remaining `src/` files, so we can enable the `comma-dangle` and `object-curly-spacing` ESLint rules later on
http://eslint.org/docs/rules/comma-dangle
http://eslint.org/docs/rules/object-curly-spacing
Given that we currently have quite inconsistent object formatting, fixing this in *one* big patch probably wouldn't be feasible (since I cannot imagine anyone wanting to review that); hence I've opted to try and do this piecewise instead.
Please note: This patch was created automatically, using the ESLint `--fix` command line option. In a couple of places this caused lines to become too long, and I've fixed those manually; please refer to the interdiff below for the only hand-edits in this patch.
```diff
diff --git a/src/display/canvas.js b/src/display/canvas.js
index 5739f6f2..4216b2d2 100644
--- a/src/display/canvas.js
+++ b/src/display/canvas.js
@@ -2071,7 +2071,7 @@ var CanvasGraphics = (function CanvasGraphicsClosure() {
var map = [];
for (var i = 0, ii = positions.length; i < ii; i += 2) {
map.push({ transform: [scaleX, 0, 0, scaleY, positions[i],
- positions[i + 1]], x: 0, y: 0, w: width, h: height, });
+ positions[i + 1]], x: 0, y: 0, w: width, h: height, });
}
this.paintInlineImageXObjectGroup(imgData, map);
},
diff --git a/src/display/svg.js b/src/display/svg.js
index 9eb05dfa..2aa21482 100644
--- a/src/display/svg.js
+++ b/src/display/svg.js
@@ -458,7 +458,11 @@ SVGGraphics = (function SVGGraphicsClosure() {
for (var x = 0; x < fnArrayLen; x++) {
var fnId = fnArray[x];
- opList.push({ 'fnId': fnId, 'fn': REVOPS[fnId], 'args': argsArray[x], });
+ opList.push({
+ 'fnId': fnId,
+ 'fn': REVOPS[fnId],
+ 'args': argsArray[x],
+ });
}
return opListToTree(opList);
},
```
2017-06-02 18:26:37 +09:00
|
|
|
source = { range: src, };
|
2015-01-06 12:45:01 +09:00
|
|
|
} else {
|
|
|
|
if (typeof src !== 'object') {
|
2017-06-29 05:51:31 +09:00
|
|
|
throw new Error('Invalid parameter in getDocument, ' +
|
|
|
|
'need either Uint8Array, string or a parameter object');
|
2015-01-06 12:45:01 +09:00
|
|
|
}
|
|
|
|
if (!src.url && !src.data && !src.range) {
|
2017-06-29 05:51:31 +09:00
|
|
|
throw new Error(
|
|
|
|
'Invalid parameter object: need either .data, .range or .url');
|
2015-01-06 12:45:01 +09:00
|
|
|
}
|
|
|
|
source = src;
|
2014-03-14 21:24:04 +09:00
|
|
|
}
|
2018-11-09 00:24:20 +09:00
|
|
|
const params = Object.create(null);
|
|
|
|
let rangeTransport = null, worker = null;
|
2012-06-24 04:48:33 +09:00
|
|
|
|
2018-11-09 00:24:20 +09:00
|
|
|
for (const key in source) {
|
2012-07-27 02:11:28 +09:00
|
|
|
if (key === 'url' && typeof window !== 'undefined') {
|
2015-01-13 05:52:52 +09:00
|
|
|
// The full path is required in the 'url' field.
|
2016-04-16 01:03:10 +09:00
|
|
|
params[key] = new URL(source[key], window.location).href;
|
2012-07-27 02:11:28 +09:00
|
|
|
continue;
|
2015-01-06 12:45:01 +09:00
|
|
|
} else if (key === 'range') {
|
2015-10-28 02:55:15 +09:00
|
|
|
rangeTransport = source[key];
|
|
|
|
continue;
|
|
|
|
} else if (key === 'worker') {
|
|
|
|
worker = source[key];
|
2015-01-06 12:45:01 +09:00
|
|
|
continue;
|
2015-01-13 05:52:52 +09:00
|
|
|
} else if (key === 'data' && !(source[key] instanceof Uint8Array)) {
|
|
|
|
// Converting string or array-like data to Uint8Array.
|
2018-11-09 00:24:20 +09:00
|
|
|
const pdfBytes = source[key];
|
2015-01-13 05:52:52 +09:00
|
|
|
if (typeof pdfBytes === 'string') {
|
|
|
|
params[key] = stringToBytes(pdfBytes);
|
|
|
|
} else if (typeof pdfBytes === 'object' && pdfBytes !== null &&
|
|
|
|
!isNaN(pdfBytes.length)) {
|
|
|
|
params[key] = new Uint8Array(pdfBytes);
|
2015-08-20 00:54:41 +09:00
|
|
|
} else if (isArrayBuffer(pdfBytes)) {
|
|
|
|
params[key] = new Uint8Array(pdfBytes);
|
2015-01-13 05:52:52 +09:00
|
|
|
} else {
|
2017-06-29 05:51:31 +09:00
|
|
|
throw new Error('Invalid PDF binary data: either typed array, ' +
|
|
|
|
'string or array-like object is expected in the ' +
|
|
|
|
'data property.');
|
2015-01-13 05:52:52 +09:00
|
|
|
}
|
|
|
|
continue;
|
2012-07-27 02:11:28 +09:00
|
|
|
}
|
|
|
|
params[key] = source[key];
|
|
|
|
}
|
|
|
|
|
2015-10-28 02:55:15 +09:00
|
|
|
params.rangeChunkSize = params.rangeChunkSize || DEFAULT_RANGE_CHUNK_SIZE;
|
2018-06-06 03:30:14 +09:00
|
|
|
params.CMapReaderFactory = params.CMapReaderFactory || DOMCMapReaderFactory;
|
[api-minor] Always allow e.g. rendering to continue even if there are errors, and add a `stopAtErrors` parameter to `getDocument` to opt-out of this behaviour (issue 6342, issue 3795, bug 1130815)
Other PDF readers, e.g. Adobe Reader and PDFium (in Chrome), will attempt to render as much of a page as possible even if there are errors present.
Currently we just bail as soon the first error is hit, which means that we'll usually not render anything in these cases and just display a blank page instead.
NOTE: This patch changes the default behaviour of the PDF.js API to always attempt to recover as much data as possible, even when encountering errors during e.g. `getOperatorList`/`getTextContent`, which thus improve our handling of corrupt PDF files and allow the default viewer to handle errors slightly more gracefully.
In the event that an API consumer wishes to use the old behaviour, where we stop parsing as soon as an error is encountered, the `stopAtErrors` parameter can be set at `getDocument`.
Fixes, inasmuch it's possible since the PDF files are corrupt, e.g. issue 6342, issue 3795, and [bug 1130815](https://bugzilla.mozilla.org/show_bug.cgi?id=1130815) (and probably others too).
2017-02-19 22:03:08 +09:00
|
|
|
params.ignoreErrors = params.stopAtErrors !== true;
|
2018-02-18 07:13:49 +09:00
|
|
|
params.pdfBug = params.pdfBug === true;
|
2015-10-22 08:56:27 +09:00
|
|
|
|
2018-02-18 00:13:33 +09:00
|
|
|
const NativeImageDecoderValues = Object.values(NativeImageDecoding);
|
2017-10-16 01:15:37 +09:00
|
|
|
if (params.nativeImageDecoderSupport === undefined ||
|
2018-02-18 00:13:33 +09:00
|
|
|
!NativeImageDecoderValues.includes(params.nativeImageDecoderSupport)) {
|
2018-06-01 19:52:29 +09:00
|
|
|
params.nativeImageDecoderSupport =
|
|
|
|
(apiCompatibilityParams.nativeImageDecoderSupport ||
|
|
|
|
NativeImageDecoding.DECODE);
|
2017-05-08 12:32:44 +09:00
|
|
|
}
|
2018-02-18 00:13:33 +09:00
|
|
|
if (!Number.isInteger(params.maxImageSize)) {
|
|
|
|
params.maxImageSize = -1;
|
|
|
|
}
|
2018-02-18 05:49:14 +09:00
|
|
|
if (typeof params.isEvalSupported !== 'boolean') {
|
|
|
|
params.isEvalSupported = true;
|
|
|
|
}
|
2018-02-18 05:57:20 +09:00
|
|
|
if (typeof params.disableFontFace !== 'boolean') {
|
2018-06-01 19:52:29 +09:00
|
|
|
params.disableFontFace = apiCompatibilityParams.disableFontFace || false;
|
2018-02-18 05:57:20 +09:00
|
|
|
}
|
2017-05-08 12:32:44 +09:00
|
|
|
|
2018-02-18 06:22:10 +09:00
|
|
|
if (typeof params.disableRange !== 'boolean') {
|
2018-06-21 15:12:30 +09:00
|
|
|
params.disableRange = false;
|
2018-02-18 06:22:10 +09:00
|
|
|
}
|
2018-02-18 06:28:08 +09:00
|
|
|
if (typeof params.disableStream !== 'boolean') {
|
2018-06-21 15:12:30 +09:00
|
|
|
params.disableStream = false;
|
2018-02-18 06:28:08 +09:00
|
|
|
}
|
2018-02-18 06:08:45 +09:00
|
|
|
if (typeof params.disableAutoFetch !== 'boolean') {
|
|
|
|
params.disableAutoFetch = false;
|
|
|
|
}
|
2018-02-18 06:51:03 +09:00
|
|
|
if (typeof params.disableCreateObjectURL !== 'boolean') {
|
|
|
|
params.disableCreateObjectURL =
|
|
|
|
apiCompatibilityParams.disableCreateObjectURL || false;
|
|
|
|
}
|
2018-02-18 06:08:45 +09:00
|
|
|
|
2018-02-15 01:35:08 +09:00
|
|
|
// Set the main-thread verbosity level.
|
|
|
|
setVerbosityLevel(params.verbosity);
|
|
|
|
|
2015-10-28 02:55:15 +09:00
|
|
|
if (!worker) {
|
2018-02-15 00:03:54 +09:00
|
|
|
const workerParams = {
|
2018-02-15 01:35:08 +09:00
|
|
|
verbosity: params.verbosity,
|
2018-11-09 00:22:26 +09:00
|
|
|
port: GlobalWorkerOptions.workerPort,
|
2018-02-15 00:03:54 +09:00
|
|
|
};
|
2017-02-25 04:33:18 +09:00
|
|
|
// Worker was not provided -- creating and owning our own. If message port
|
2018-02-14 22:25:47 +09:00
|
|
|
// is specified in global worker options, using it.
|
2018-11-09 00:22:26 +09:00
|
|
|
worker = workerParams.port ? PDFWorker.fromPort(workerParams) :
|
|
|
|
new PDFWorker(workerParams);
|
2015-10-28 02:55:15 +09:00
|
|
|
task._worker = worker;
|
|
|
|
}
|
2018-11-09 00:24:20 +09:00
|
|
|
const docId = task.docId;
|
|
|
|
worker.promise.then(function() {
|
2015-10-28 02:55:15 +09:00
|
|
|
if (task.destroyed) {
|
|
|
|
throw new Error('Loading aborted');
|
|
|
|
}
|
|
|
|
return _fetchDocument(worker, params, rangeTransport, docId).then(
|
2018-11-09 00:24:20 +09:00
|
|
|
function(workerId) {
|
2015-10-28 02:55:15 +09:00
|
|
|
if (task.destroyed) {
|
|
|
|
throw new Error('Loading aborted');
|
|
|
|
}
|
2017-07-01 02:59:52 +09:00
|
|
|
|
|
|
|
let networkStream;
|
|
|
|
if (rangeTransport) {
|
2018-02-18 07:19:43 +09:00
|
|
|
networkStream = new PDFDataTransportStream({
|
|
|
|
length: params.length,
|
|
|
|
initialData: params.initialData,
|
[Firefox regression] Fix `disableRange=true` bug in `PDFDataTransportStream`
Currently if trying to set `disableRange=true` in the built-in PDF Viewer in Firefox, either through `about:config` or via the URL hash, the PDF document will never load. It appears that this has been broken for a couple of years, without anyone noticing.
Obviously it's not a good idea to set `disableRange=true`, however it seems that this bug affects the PDF Viewer in Firefox even with default settings:
- In the case where `initialData` already contains the *entire* file, we're forced to dispatch a range request to re-fetch already available data just so that file loading may complete.
- (In the case where the data arrives, via streaming, before being specifically requested through `requestDataRange`, we're also forced to re-fetch data unnecessarily.) *This part was removed, to reduce the scope/risk of the patch somewhat.*
In the cases outlined above, we're having to re-fetch already available data thus potentially delaying loading/rendering of PDF files in Firefox (and wasting resources in the process).
2019-03-27 00:05:30 +09:00
|
|
|
progressiveDone: params.progressiveDone,
|
2018-02-18 07:19:43 +09:00
|
|
|
disableRange: params.disableRange,
|
|
|
|
disableStream: params.disableStream,
|
|
|
|
}, rangeTransport);
|
2017-07-01 02:59:52 +09:00
|
|
|
} else if (!params.data) {
|
2018-02-18 07:19:43 +09:00
|
|
|
networkStream = createPDFNetworkStream({
|
|
|
|
url: params.url,
|
|
|
|
length: params.length,
|
|
|
|
httpHeaders: params.httpHeaders,
|
|
|
|
withCredentials: params.withCredentials,
|
|
|
|
rangeChunkSize: params.rangeChunkSize,
|
|
|
|
disableRange: params.disableRange,
|
|
|
|
disableStream: params.disableStream,
|
|
|
|
});
|
2017-07-01 02:59:52 +09:00
|
|
|
}
|
|
|
|
|
2018-11-09 00:24:20 +09:00
|
|
|
const messageHandler = new MessageHandler(docId, workerId, worker.port);
|
2017-07-01 02:59:52 +09:00
|
|
|
messageHandler.postMessageTransfers = worker.postMessageTransfers;
|
2018-11-09 00:24:20 +09:00
|
|
|
const transport = new WorkerTransport(messageHandler, task, networkStream,
|
|
|
|
params);
|
2015-10-28 02:55:15 +09:00
|
|
|
task._transport = transport;
|
2016-01-30 02:43:19 +09:00
|
|
|
messageHandler.send('Ready', null);
|
2015-10-28 02:55:15 +09:00
|
|
|
});
|
2015-12-17 09:37:43 +09:00
|
|
|
}).catch(task._capability.reject);
|
2015-01-06 12:45:01 +09:00
|
|
|
|
|
|
|
return task;
|
2016-03-29 04:49:22 +09:00
|
|
|
}
|
2012-04-13 04:11:22 +09:00
|
|
|
|
2015-10-28 02:55:15 +09:00
|
|
|
/**
|
|
|
|
* Starts fetching of specified PDF document/data.
|
|
|
|
* @param {PDFWorker} worker
|
|
|
|
* @param {Object} source
|
|
|
|
* @param {PDFDataRangeTransport} pdfDataRangeTransport
|
|
|
|
* @param {string} docId Unique document id, used as MessageHandler id.
|
|
|
|
* @returns {Promise} The promise, which is resolved when worker id of
|
|
|
|
* MessageHandler is known.
|
|
|
|
* @private
|
|
|
|
*/
|
|
|
|
function _fetchDocument(worker, source, pdfDataRangeTransport, docId) {
|
|
|
|
if (worker.destroyed) {
|
|
|
|
return Promise.reject(new Error('Worker was destroyed'));
|
|
|
|
}
|
|
|
|
|
|
|
|
if (pdfDataRangeTransport) {
|
|
|
|
source.length = pdfDataRangeTransport.length;
|
|
|
|
source.initialData = pdfDataRangeTransport.initialData;
|
[Firefox regression] Fix `disableRange=true` bug in `PDFDataTransportStream`
Currently if trying to set `disableRange=true` in the built-in PDF Viewer in Firefox, either through `about:config` or via the URL hash, the PDF document will never load. It appears that this has been broken for a couple of years, without anyone noticing.
Obviously it's not a good idea to set `disableRange=true`, however it seems that this bug affects the PDF Viewer in Firefox even with default settings:
- In the case where `initialData` already contains the *entire* file, we're forced to dispatch a range request to re-fetch already available data just so that file loading may complete.
- (In the case where the data arrives, via streaming, before being specifically requested through `requestDataRange`, we're also forced to re-fetch data unnecessarily.) *This part was removed, to reduce the scope/risk of the patch somewhat.*
In the cases outlined above, we're having to re-fetch already available data thus potentially delaying loading/rendering of PDF files in Firefox (and wasting resources in the process).
2019-03-27 00:05:30 +09:00
|
|
|
source.progressiveDone = pdfDataRangeTransport.progressiveDone;
|
2015-10-28 02:55:15 +09:00
|
|
|
}
|
|
|
|
return worker.messageHandler.sendWithPromise('GetDocRequest', {
|
2017-04-25 23:17:18 +09:00
|
|
|
docId,
|
2019-09-01 23:43:58 +09:00
|
|
|
apiVersion: typeof PDFJSDev !== 'undefined' && !PDFJSDev.test('TESTING') ?
|
|
|
|
PDFJSDev.eval('BUNDLE_VERSION') : null,
|
2018-06-06 03:30:14 +09:00
|
|
|
source: { // Only send the required properties, and *not* the entire object.
|
2017-07-30 01:20:00 +09:00
|
|
|
data: source.data,
|
|
|
|
url: source.url,
|
|
|
|
password: source.password,
|
|
|
|
disableAutoFetch: source.disableAutoFetch,
|
|
|
|
rangeChunkSize: source.rangeChunkSize,
|
|
|
|
length: source.length,
|
|
|
|
},
|
2018-02-18 00:13:33 +09:00
|
|
|
maxImageSize: source.maxImageSize,
|
2018-02-18 05:57:20 +09:00
|
|
|
disableFontFace: source.disableFontFace,
|
2018-02-18 06:51:03 +09:00
|
|
|
disableCreateObjectURL: source.disableCreateObjectURL,
|
2018-02-15 00:53:50 +09:00
|
|
|
postMessageTransfers: worker.postMessageTransfers,
|
2016-10-01 19:05:07 +09:00
|
|
|
docBaseUrl: source.docBaseUrl,
|
2017-05-08 12:32:44 +09:00
|
|
|
nativeImageDecoderSupport: source.nativeImageDecoderSupport,
|
[api-minor] Always allow e.g. rendering to continue even if there are errors, and add a `stopAtErrors` parameter to `getDocument` to opt-out of this behaviour (issue 6342, issue 3795, bug 1130815)
Other PDF readers, e.g. Adobe Reader and PDFium (in Chrome), will attempt to render as much of a page as possible even if there are errors present.
Currently we just bail as soon the first error is hit, which means that we'll usually not render anything in these cases and just display a blank page instead.
NOTE: This patch changes the default behaviour of the PDF.js API to always attempt to recover as much data as possible, even when encountering errors during e.g. `getOperatorList`/`getTextContent`, which thus improve our handling of corrupt PDF files and allow the default viewer to handle errors slightly more gracefully.
In the event that an API consumer wishes to use the old behaviour, where we stop parsing as soon as an error is encountered, the `stopAtErrors` parameter can be set at `getDocument`.
Fixes, inasmuch it's possible since the PDF files are corrupt, e.g. issue 6342, issue 3795, and [bug 1130815](https://bugzilla.mozilla.org/show_bug.cgi?id=1130815) (and probably others too).
2017-02-19 22:03:08 +09:00
|
|
|
ignoreErrors: source.ignoreErrors,
|
2018-02-18 05:49:14 +09:00
|
|
|
isEvalSupported: source.isEvalSupported,
|
2018-11-09 00:24:20 +09:00
|
|
|
}).then(function(workerId) {
|
2015-10-28 02:55:15 +09:00
|
|
|
if (worker.destroyed) {
|
|
|
|
throw new Error('Worker was destroyed');
|
|
|
|
}
|
|
|
|
return workerId;
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2015-01-06 12:45:01 +09:00
|
|
|
/**
|
|
|
|
* PDF document loading operation.
|
|
|
|
* @class
|
2015-11-13 04:39:58 +09:00
|
|
|
* @alias PDFDocumentLoadingTask
|
2015-01-06 12:45:01 +09:00
|
|
|
*/
|
2018-11-08 21:40:06 +09:00
|
|
|
const PDFDocumentLoadingTask = (function PDFDocumentLoadingTaskClosure() {
|
|
|
|
let nextDocumentId = 0;
|
2015-10-28 02:55:15 +09:00
|
|
|
|
|
|
|
/** @constructs PDFDocumentLoadingTask */
|
2018-11-08 21:40:06 +09:00
|
|
|
class PDFDocumentLoadingTask {
|
|
|
|
constructor() {
|
|
|
|
this._capability = createPromiseCapability();
|
|
|
|
this._transport = null;
|
|
|
|
this._worker = null;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Unique document loading task id -- used in MessageHandlers.
|
|
|
|
* @type {string}
|
|
|
|
*/
|
|
|
|
this.docId = 'd' + (nextDocumentId++);
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Shows if loading task is destroyed.
|
|
|
|
* @type {boolean}
|
|
|
|
*/
|
|
|
|
this.destroyed = false;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Callback to request a password if wrong or no password was provided.
|
|
|
|
* The callback receives two parameters: function that needs to be called
|
|
|
|
* with new password and reason (see {PasswordResponses}).
|
|
|
|
*/
|
|
|
|
this.onPassword = null;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Callback to be able to monitor the loading progress of the PDF file
|
|
|
|
* (necessary to implement e.g. a loading bar). The callback receives
|
|
|
|
* an {Object} with the properties: {number} loaded and {number} total.
|
|
|
|
*/
|
|
|
|
this.onProgress = null;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Callback to when unsupported feature is used. The callback receives
|
|
|
|
* an {UNSUPPORTED_FEATURES} argument.
|
|
|
|
*/
|
|
|
|
this.onUnsupportedFeature = null;
|
|
|
|
}
|
2015-01-06 12:45:01 +09:00
|
|
|
|
|
|
|
/**
|
|
|
|
* @return {Promise}
|
|
|
|
*/
|
|
|
|
get promise() {
|
|
|
|
return this._capability.promise;
|
2018-11-08 21:40:06 +09:00
|
|
|
}
|
2015-01-06 12:45:01 +09:00
|
|
|
|
2015-10-21 07:45:55 +09:00
|
|
|
/**
|
|
|
|
* Aborts all network requests and destroys worker.
|
|
|
|
* @return {Promise} A promise that is resolved after destruction activity
|
|
|
|
* is completed.
|
|
|
|
*/
|
2017-04-25 23:17:18 +09:00
|
|
|
destroy() {
|
2015-10-28 00:07:20 +09:00
|
|
|
this.destroyed = true;
|
2015-10-28 02:55:15 +09:00
|
|
|
|
2018-11-08 21:40:06 +09:00
|
|
|
const transportDestroyed = !this._transport ? Promise.resolve() :
|
2015-10-28 02:55:15 +09:00
|
|
|
this._transport.destroy();
|
2017-05-03 23:39:54 +09:00
|
|
|
return transportDestroyed.then(() => {
|
2015-10-28 02:55:15 +09:00
|
|
|
this._transport = null;
|
|
|
|
if (this._worker) {
|
|
|
|
this._worker.destroy();
|
|
|
|
this._worker = null;
|
|
|
|
}
|
2017-05-03 23:39:54 +09:00
|
|
|
});
|
2018-11-08 21:40:06 +09:00
|
|
|
}
|
2015-01-06 12:45:01 +09:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Registers callbacks to indicate the document loading completion.
|
|
|
|
*
|
|
|
|
* @param {function} onFulfilled The callback for the loading completion.
|
|
|
|
* @param {function} onRejected The callback for the loading failure.
|
|
|
|
* @return {Promise} A promise that is resolved after the onFulfilled or
|
|
|
|
* onRejected callback.
|
|
|
|
*/
|
2018-11-08 21:40:06 +09:00
|
|
|
then(onFulfilled, onRejected) {
|
|
|
|
deprecated('PDFDocumentLoadingTask.then method, ' +
|
|
|
|
'use the `promise` getter instead.');
|
2015-01-06 12:45:01 +09:00
|
|
|
return this.promise.then.apply(this.promise, arguments);
|
2018-11-08 21:40:06 +09:00
|
|
|
}
|
|
|
|
}
|
2015-01-06 12:45:01 +09:00
|
|
|
return PDFDocumentLoadingTask;
|
|
|
|
})();
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Abstract class to support range requests file loading.
|
2015-11-13 04:39:58 +09:00
|
|
|
* @param {number} length
|
|
|
|
* @param {Uint8Array} initialData
|
2019-04-06 18:04:44 +09:00
|
|
|
* @param {boolean} progressiveDone
|
2015-01-06 12:45:01 +09:00
|
|
|
*/
|
2018-10-21 00:15:27 +09:00
|
|
|
class PDFDataRangeTransport {
|
[Firefox regression] Fix `disableRange=true` bug in `PDFDataTransportStream`
Currently if trying to set `disableRange=true` in the built-in PDF Viewer in Firefox, either through `about:config` or via the URL hash, the PDF document will never load. It appears that this has been broken for a couple of years, without anyone noticing.
Obviously it's not a good idea to set `disableRange=true`, however it seems that this bug affects the PDF Viewer in Firefox even with default settings:
- In the case where `initialData` already contains the *entire* file, we're forced to dispatch a range request to re-fetch already available data just so that file loading may complete.
- (In the case where the data arrives, via streaming, before being specifically requested through `requestDataRange`, we're also forced to re-fetch data unnecessarily.) *This part was removed, to reduce the scope/risk of the patch somewhat.*
In the cases outlined above, we're having to re-fetch already available data thus potentially delaying loading/rendering of PDF files in Firefox (and wasting resources in the process).
2019-03-27 00:05:30 +09:00
|
|
|
constructor(length, initialData, progressiveDone = false) {
|
2015-01-06 12:45:01 +09:00
|
|
|
this.length = length;
|
|
|
|
this.initialData = initialData;
|
[Firefox regression] Fix `disableRange=true` bug in `PDFDataTransportStream`
Currently if trying to set `disableRange=true` in the built-in PDF Viewer in Firefox, either through `about:config` or via the URL hash, the PDF document will never load. It appears that this has been broken for a couple of years, without anyone noticing.
Obviously it's not a good idea to set `disableRange=true`, however it seems that this bug affects the PDF Viewer in Firefox even with default settings:
- In the case where `initialData` already contains the *entire* file, we're forced to dispatch a range request to re-fetch already available data just so that file loading may complete.
- (In the case where the data arrives, via streaming, before being specifically requested through `requestDataRange`, we're also forced to re-fetch data unnecessarily.) *This part was removed, to reduce the scope/risk of the patch somewhat.*
In the cases outlined above, we're having to re-fetch already available data thus potentially delaying loading/rendering of PDF files in Firefox (and wasting resources in the process).
2019-03-27 00:05:30 +09:00
|
|
|
this.progressiveDone = progressiveDone;
|
2015-01-06 12:45:01 +09:00
|
|
|
|
|
|
|
this._rangeListeners = [];
|
|
|
|
this._progressListeners = [];
|
|
|
|
this._progressiveReadListeners = [];
|
[Firefox regression] Fix `disableRange=true` bug in `PDFDataTransportStream`
Currently if trying to set `disableRange=true` in the built-in PDF Viewer in Firefox, either through `about:config` or via the URL hash, the PDF document will never load. It appears that this has been broken for a couple of years, without anyone noticing.
Obviously it's not a good idea to set `disableRange=true`, however it seems that this bug affects the PDF Viewer in Firefox even with default settings:
- In the case where `initialData` already contains the *entire* file, we're forced to dispatch a range request to re-fetch already available data just so that file loading may complete.
- (In the case where the data arrives, via streaming, before being specifically requested through `requestDataRange`, we're also forced to re-fetch data unnecessarily.) *This part was removed, to reduce the scope/risk of the patch somewhat.*
In the cases outlined above, we're having to re-fetch already available data thus potentially delaying loading/rendering of PDF files in Firefox (and wasting resources in the process).
2019-03-27 00:05:30 +09:00
|
|
|
this._progressiveDoneListeners = [];
|
2015-01-06 12:45:01 +09:00
|
|
|
this._readyCapability = createPromiseCapability();
|
|
|
|
}
|
|
|
|
|
2018-10-21 00:15:27 +09:00
|
|
|
addRangeListener(listener) {
|
|
|
|
this._rangeListeners.push(listener);
|
|
|
|
}
|
2015-01-06 12:45:01 +09:00
|
|
|
|
2018-10-21 00:15:27 +09:00
|
|
|
addProgressListener(listener) {
|
|
|
|
this._progressListeners.push(listener);
|
|
|
|
}
|
2015-01-06 12:45:01 +09:00
|
|
|
|
2018-10-21 00:15:27 +09:00
|
|
|
addProgressiveReadListener(listener) {
|
|
|
|
this._progressiveReadListeners.push(listener);
|
|
|
|
}
|
2015-01-06 12:45:01 +09:00
|
|
|
|
[Firefox regression] Fix `disableRange=true` bug in `PDFDataTransportStream`
Currently if trying to set `disableRange=true` in the built-in PDF Viewer in Firefox, either through `about:config` or via the URL hash, the PDF document will never load. It appears that this has been broken for a couple of years, without anyone noticing.
Obviously it's not a good idea to set `disableRange=true`, however it seems that this bug affects the PDF Viewer in Firefox even with default settings:
- In the case where `initialData` already contains the *entire* file, we're forced to dispatch a range request to re-fetch already available data just so that file loading may complete.
- (In the case where the data arrives, via streaming, before being specifically requested through `requestDataRange`, we're also forced to re-fetch data unnecessarily.) *This part was removed, to reduce the scope/risk of the patch somewhat.*
In the cases outlined above, we're having to re-fetch already available data thus potentially delaying loading/rendering of PDF files in Firefox (and wasting resources in the process).
2019-03-27 00:05:30 +09:00
|
|
|
addProgressiveDoneListener(listener) {
|
|
|
|
this._progressiveDoneListeners.push(listener);
|
|
|
|
}
|
|
|
|
|
2018-10-21 00:15:27 +09:00
|
|
|
onDataRange(begin, chunk) {
|
|
|
|
for (const listener of this._rangeListeners) {
|
|
|
|
listener(begin, chunk);
|
|
|
|
}
|
|
|
|
}
|
2015-01-06 12:45:01 +09:00
|
|
|
|
2019-04-06 18:04:44 +09:00
|
|
|
onDataProgress(loaded, total) {
|
2018-10-21 00:15:27 +09:00
|
|
|
this._readyCapability.promise.then(() => {
|
|
|
|
for (const listener of this._progressListeners) {
|
2019-04-06 18:04:44 +09:00
|
|
|
listener(loaded, total);
|
2018-10-21 00:15:27 +09:00
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
2015-01-06 12:45:01 +09:00
|
|
|
|
2018-10-21 00:15:27 +09:00
|
|
|
onDataProgressiveRead(chunk) {
|
|
|
|
this._readyCapability.promise.then(() => {
|
|
|
|
for (const listener of this._progressiveReadListeners) {
|
|
|
|
listener(chunk);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
2015-01-06 12:45:01 +09:00
|
|
|
|
[Firefox regression] Fix `disableRange=true` bug in `PDFDataTransportStream`
Currently if trying to set `disableRange=true` in the built-in PDF Viewer in Firefox, either through `about:config` or via the URL hash, the PDF document will never load. It appears that this has been broken for a couple of years, without anyone noticing.
Obviously it's not a good idea to set `disableRange=true`, however it seems that this bug affects the PDF Viewer in Firefox even with default settings:
- In the case where `initialData` already contains the *entire* file, we're forced to dispatch a range request to re-fetch already available data just so that file loading may complete.
- (In the case where the data arrives, via streaming, before being specifically requested through `requestDataRange`, we're also forced to re-fetch data unnecessarily.) *This part was removed, to reduce the scope/risk of the patch somewhat.*
In the cases outlined above, we're having to re-fetch already available data thus potentially delaying loading/rendering of PDF files in Firefox (and wasting resources in the process).
2019-03-27 00:05:30 +09:00
|
|
|
onDataProgressiveDone() {
|
|
|
|
this._readyCapability.promise.then(() => {
|
|
|
|
for (const listener of this._progressiveDoneListeners) {
|
|
|
|
listener();
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2018-10-21 00:15:27 +09:00
|
|
|
transportReady() {
|
|
|
|
this._readyCapability.resolve();
|
|
|
|
}
|
2015-10-21 07:45:55 +09:00
|
|
|
|
2018-10-21 00:15:27 +09:00
|
|
|
requestDataRange(begin, end) {
|
|
|
|
unreachable('Abstract method PDFDataRangeTransport.requestDataRange');
|
|
|
|
}
|
|
|
|
|
|
|
|
abort() {}
|
|
|
|
}
|
2015-01-06 12:45:01 +09:00
|
|
|
|
2012-04-14 01:25:08 +09:00
|
|
|
/**
|
|
|
|
* Proxy to a PDFDocument in the worker thread. Also, contains commonly used
|
|
|
|
* properties that can be read synchronously.
|
|
|
|
*/
|
2018-08-27 01:04:57 +09:00
|
|
|
class PDFDocumentProxy {
|
2019-03-11 20:43:44 +09:00
|
|
|
constructor(pdfInfo, transport) {
|
2018-08-27 01:04:57 +09:00
|
|
|
this._pdfInfo = pdfInfo;
|
|
|
|
this._transport = transport;
|
2012-04-13 04:11:22 +09:00
|
|
|
}
|
2018-02-18 06:08:45 +09:00
|
|
|
|
2018-08-27 01:04:57 +09:00
|
|
|
/**
|
|
|
|
* @return {number} Total number of pages the PDF contains.
|
|
|
|
*/
|
|
|
|
get numPages() {
|
|
|
|
return this._pdfInfo.numPages;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @return {string} A (not guaranteed to be) unique ID to identify a PDF.
|
|
|
|
*/
|
|
|
|
get fingerprint() {
|
|
|
|
return this._pdfInfo.fingerprint;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @param {number} pageNumber - The page number to get. The first page is 1.
|
|
|
|
* @return {Promise} A promise that is resolved with a {@link PDFPageProxy}
|
|
|
|
* object.
|
|
|
|
*/
|
|
|
|
getPage(pageNumber) {
|
|
|
|
return this._transport.getPage(pageNumber);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @param {{num: number, gen: number}} ref - The page reference. Must have
|
|
|
|
* the `num` and `gen` properties.
|
|
|
|
* @return {Promise} A promise that is resolved with the page index that is
|
|
|
|
* associated with the reference.
|
|
|
|
*/
|
|
|
|
getPageIndex(ref) {
|
|
|
|
return this._transport.getPageIndex(ref);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @return {Promise} A promise that is resolved with a lookup table for
|
|
|
|
* mapping named destinations to reference numbers.
|
|
|
|
*
|
|
|
|
* This can be slow for large documents. Use `getDestination` instead.
|
|
|
|
*/
|
|
|
|
getDestinations() {
|
|
|
|
return this._transport.getDestinations();
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @param {string} id - The named destination to get.
|
|
|
|
* @return {Promise} A promise that is resolved with all information
|
|
|
|
* of the given named destination.
|
|
|
|
*/
|
|
|
|
getDestination(id) {
|
|
|
|
return this._transport.getDestination(id);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @return {Promise} A promise that is resolved with an {Array} containing
|
|
|
|
* the page labels that correspond to the page indexes, or `null` when
|
|
|
|
* no page labels are present in the PDF file.
|
|
|
|
*/
|
|
|
|
getPageLabels() {
|
|
|
|
return this._transport.getPageLabels();
|
|
|
|
}
|
|
|
|
|
2019-04-03 20:48:18 +09:00
|
|
|
/**
|
|
|
|
* @return {Promise} A promise that is resolved with a {string} containing
|
|
|
|
* the page layout name.
|
|
|
|
*/
|
|
|
|
getPageLayout() {
|
|
|
|
return this._transport.getPageLayout();
|
|
|
|
}
|
|
|
|
|
2018-08-27 01:04:57 +09:00
|
|
|
/**
|
|
|
|
* @return {Promise} A promise that is resolved with a {string} containing
|
|
|
|
* the page mode name.
|
|
|
|
*/
|
|
|
|
getPageMode() {
|
|
|
|
return this._transport.getPageMode();
|
|
|
|
}
|
|
|
|
|
2019-04-14 20:13:59 +09:00
|
|
|
/**
|
|
|
|
* @return {Promise} A promise that is resolved with an {Object} containing
|
|
|
|
* the viewer preferences.
|
|
|
|
*/
|
|
|
|
getViewerPreferences() {
|
|
|
|
return this._transport.getViewerPreferences();
|
|
|
|
}
|
|
|
|
|
2018-12-06 04:09:15 +09:00
|
|
|
/**
|
|
|
|
* @return {Promise} A promise that is resolved with an {Array} containing the
|
|
|
|
* destination, or `null` when no open action is present in the PDF file.
|
|
|
|
*/
|
|
|
|
getOpenActionDestination() {
|
|
|
|
return this._transport.getOpenActionDestination();
|
|
|
|
}
|
|
|
|
|
2018-08-27 01:04:57 +09:00
|
|
|
/**
|
|
|
|
* @return {Promise} A promise that is resolved with a lookup table for
|
|
|
|
* mapping named attachments to their content.
|
|
|
|
*/
|
|
|
|
getAttachments() {
|
|
|
|
return this._transport.getAttachments();
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @return {Promise} A promise that is resolved with an {Array} of all the
|
|
|
|
* JavaScript strings in the name tree, or `null` if no JavaScript exists.
|
|
|
|
*/
|
|
|
|
getJavaScript() {
|
|
|
|
return this._transport.getJavaScript();
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @return {Promise} A promise that is resolved with an {Array} that is a
|
|
|
|
* tree outline (if it has one) of the PDF. The tree is in the format of:
|
|
|
|
* [
|
|
|
|
* {
|
|
|
|
* title: string,
|
|
|
|
* bold: boolean,
|
|
|
|
* italic: boolean,
|
|
|
|
* color: rgb Uint8ClampedArray,
|
2019-06-07 19:19:28 +09:00
|
|
|
* count: integer or undefined,
|
2018-08-27 01:04:57 +09:00
|
|
|
* dest: dest obj,
|
|
|
|
* url: string,
|
|
|
|
* items: array of more items like this
|
|
|
|
* },
|
|
|
|
* ...
|
|
|
|
* ]
|
|
|
|
*/
|
|
|
|
getOutline() {
|
|
|
|
return this._transport.getOutline();
|
|
|
|
}
|
|
|
|
|
2018-08-27 04:37:05 +09:00
|
|
|
/**
|
|
|
|
* @return {Promise} A promise that is resolved with an {Array} that contains
|
|
|
|
* the permission flags for the PDF document, or `null` when
|
|
|
|
* no permissions are present in the PDF file.
|
|
|
|
*/
|
|
|
|
getPermissions() {
|
|
|
|
return this._transport.getPermissions();
|
|
|
|
}
|
|
|
|
|
2018-08-27 01:04:57 +09:00
|
|
|
/**
|
|
|
|
* @return {Promise} A promise that is resolved with an {Object} that has
|
|
|
|
* `info` and `metadata` properties. `info` is an {Object} filled with
|
|
|
|
* anything available in the information dictionary and similarly
|
|
|
|
* `metadata` is a {Metadata} object with information from the metadata
|
|
|
|
* section of the PDF.
|
|
|
|
*/
|
|
|
|
getMetadata() {
|
|
|
|
return this._transport.getMetadata();
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @return {Promise} A promise that is resolved with a {TypedArray} that has
|
|
|
|
* the raw data from the PDF.
|
|
|
|
*/
|
|
|
|
getData() {
|
|
|
|
return this._transport.getData();
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @return {Promise} A promise that is resolved when the document's data
|
|
|
|
* is loaded. It is resolved with an {Object} that contains the `length`
|
|
|
|
* property that indicates size of the PDF data in bytes.
|
|
|
|
*/
|
|
|
|
getDownloadInfo() {
|
|
|
|
return this._transport.downloadInfoCapability.promise;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @return {Promise} A promise this is resolved with current statistics about
|
|
|
|
* document structures (see {@link PDFDocumentStats}).
|
|
|
|
*/
|
|
|
|
getStats() {
|
|
|
|
return this._transport.getStats();
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Cleans up resources allocated by the document, e.g. created `@font-face`.
|
|
|
|
*/
|
|
|
|
cleanup() {
|
|
|
|
this._transport.startCleanup();
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Destroys the current document instance and terminates the worker.
|
|
|
|
*/
|
|
|
|
destroy() {
|
|
|
|
return this.loadingTask.destroy();
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @return {Object} A subset of the current {DocumentInitParameters},
|
|
|
|
* which are either needed in the viewer and/or whose default values
|
|
|
|
* may be affected by the `apiCompatibilityParams`.
|
|
|
|
*/
|
|
|
|
get loadingParams() {
|
|
|
|
return this._transport.loadingParams;
|
|
|
|
}
|
2019-03-11 20:43:44 +09:00
|
|
|
|
|
|
|
/**
|
|
|
|
* @return {PDFDocumentLoadingTask} The loadingTask for the current document.
|
|
|
|
*/
|
|
|
|
get loadingTask() {
|
|
|
|
return this._transport.loadingTask;
|
|
|
|
}
|
2018-08-27 01:04:57 +09:00
|
|
|
}
|
2012-04-13 04:11:22 +09:00
|
|
|
|
2018-12-21 19:47:37 +09:00
|
|
|
/**
|
|
|
|
* Page getViewport parameters.
|
|
|
|
*
|
|
|
|
* @typedef {Object} GetViewportParameters
|
|
|
|
* @property {number} scale - The desired scale of the viewport.
|
|
|
|
* @property {number} rotation - (optional) The desired rotation, in degrees, of
|
|
|
|
* the viewport. If omitted it defaults to the page rotation.
|
|
|
|
* @property {boolean} dontFlip - (optional) If true, the y-axis will not be
|
|
|
|
* flipped. The default value is `false`.
|
|
|
|
*/
|
|
|
|
|
2015-11-24 00:57:43 +09:00
|
|
|
/**
|
|
|
|
* Page getTextContent parameters.
|
|
|
|
*
|
|
|
|
* @typedef {Object} getTextContentParameters
|
2016-10-17 20:04:55 +09:00
|
|
|
* @property {boolean} normalizeWhitespace - replaces all occurrences of
|
2015-11-24 00:57:43 +09:00
|
|
|
* whitespace with standard spaces (0x20). The default value is `false`.
|
2016-10-17 20:04:55 +09:00
|
|
|
* @property {boolean} disableCombineTextItems - do not attempt to combine
|
2016-07-04 01:29:47 +09:00
|
|
|
* same line {@link TextItem}'s. The default value is `false`.
|
2015-11-24 00:57:43 +09:00
|
|
|
*/
|
|
|
|
|
2014-04-10 08:44:07 +09:00
|
|
|
/**
|
|
|
|
* Page text content.
|
|
|
|
*
|
|
|
|
* @typedef {Object} TextContent
|
|
|
|
* @property {array} items - array of {@link TextItem}
|
2019-04-05 00:55:29 +09:00
|
|
|
* @property {Object} styles - {@link TextStyle} objects, indexed by font name.
|
2014-04-10 08:44:07 +09:00
|
|
|
*/
|
|
|
|
|
2014-01-22 04:28:18 +09:00
|
|
|
/**
|
|
|
|
* Page text content part.
|
|
|
|
*
|
2014-04-10 08:44:07 +09:00
|
|
|
* @typedef {Object} TextItem
|
2014-01-22 04:28:18 +09:00
|
|
|
* @property {string} str - text content.
|
|
|
|
* @property {string} dir - text direction: 'ttb', 'ltr' or 'rtl'.
|
2014-04-10 08:44:07 +09:00
|
|
|
* @property {array} transform - transformation matrix.
|
|
|
|
* @property {number} width - width in device space.
|
|
|
|
* @property {number} height - height in device space.
|
|
|
|
* @property {string} fontName - font name used by pdf.js for converted font.
|
|
|
|
*/
|
|
|
|
|
|
|
|
/**
|
2014-04-12 00:57:48 +09:00
|
|
|
* Text style.
|
|
|
|
*
|
2014-04-10 08:44:07 +09:00
|
|
|
* @typedef {Object} TextStyle
|
|
|
|
* @property {number} ascent - font ascent.
|
|
|
|
* @property {number} descent - font descent.
|
|
|
|
* @property {boolean} vertical - text is in vertical mode.
|
|
|
|
* @property {string} fontFamily - possible font family
|
2014-01-22 04:28:18 +09:00
|
|
|
*/
|
|
|
|
|
2015-11-22 21:56:52 +09:00
|
|
|
/**
|
|
|
|
* Page annotation parameters.
|
|
|
|
*
|
|
|
|
* @typedef {Object} GetAnnotationsParameters
|
2016-10-17 20:04:55 +09:00
|
|
|
* @property {string} intent - Determines the annotations that will be fetched,
|
|
|
|
* can be either 'display' (viewable annotations) or 'print'
|
|
|
|
* (printable annotations).
|
|
|
|
* If the parameter is omitted, all annotations are fetched.
|
2015-11-22 21:56:52 +09:00
|
|
|
*/
|
|
|
|
|
2014-04-12 00:57:48 +09:00
|
|
|
/**
|
|
|
|
* Page render parameters.
|
|
|
|
*
|
|
|
|
* @typedef {Object} RenderParameters
|
|
|
|
* @property {Object} canvasContext - A 2D context of a DOM Canvas object.
|
2016-03-29 04:49:22 +09:00
|
|
|
* @property {PageViewport} viewport - Rendering viewport obtained by
|
2018-12-21 19:47:37 +09:00
|
|
|
* calling the `PDFPageProxy.getViewport` method.
|
2014-04-12 00:57:48 +09:00
|
|
|
* @property {string} intent - Rendering intent, can be 'display' or 'print'
|
|
|
|
* (default value is 'display').
|
2018-02-13 22:16:10 +09:00
|
|
|
* @property {boolean} enableWebGL - (optional) Enables WebGL accelerated
|
|
|
|
* rendering for some operations. The default value is `false`.
|
2016-09-15 05:49:37 +09:00
|
|
|
* @property {boolean} renderInteractiveForms - (optional) Whether or not
|
|
|
|
* interactive form elements are rendered in the display
|
|
|
|
* layer. If so, we do not render them on canvas as well.
|
2015-11-17 01:50:02 +09:00
|
|
|
* @property {Array} transform - (optional) Additional transform, applied
|
|
|
|
* just before viewport transform.
|
2014-04-12 00:57:48 +09:00
|
|
|
* @property {Object} imageLayer - (optional) An object that has beginLayout,
|
|
|
|
* endLayout and appendImage functions.
|
2017-01-28 02:58:39 +09:00
|
|
|
* @property {Object} canvasFactory - (optional) The factory that will be used
|
|
|
|
* when creating canvases. The default value is
|
|
|
|
* {DOMCanvasFactory}.
|
2017-05-16 20:01:03 +09:00
|
|
|
* @property {Object} background - (optional) Background to use for the canvas.
|
|
|
|
* Can use any valid canvas.fillStyle: A DOMString parsed as
|
|
|
|
* CSS <color> value, a CanvasGradient object (a linear or
|
|
|
|
* radial gradient) or a CanvasPattern object (a repetitive
|
|
|
|
* image). The default value is 'rgb(255,255,255)'.
|
2014-04-12 00:57:48 +09:00
|
|
|
*/
|
2015-02-03 00:12:52 +09:00
|
|
|
|
2014-06-17 03:35:38 +09:00
|
|
|
/**
|
|
|
|
* PDF page operator list.
|
|
|
|
*
|
|
|
|
* @typedef {Object} PDFOperatorList
|
|
|
|
* @property {Array} fnArray - Array containing the operator functions.
|
|
|
|
* @property {Array} argsArray - Array containing the arguments of the
|
|
|
|
* functions.
|
|
|
|
*/
|
2014-04-12 00:57:48 +09:00
|
|
|
|
2014-01-22 04:28:18 +09:00
|
|
|
/**
|
|
|
|
* Proxy to a PDFPage in the worker thread.
|
2015-11-13 04:39:58 +09:00
|
|
|
* @alias PDFPageProxy
|
2014-01-22 04:28:18 +09:00
|
|
|
*/
|
2018-11-08 22:13:42 +09:00
|
|
|
class PDFPageProxy {
|
|
|
|
constructor(pageIndex, pageInfo, transport, pdfBug = false) {
|
2014-05-09 05:02:53 +09:00
|
|
|
this.pageIndex = pageIndex;
|
2018-06-06 03:30:06 +09:00
|
|
|
this._pageInfo = pageInfo;
|
2018-11-08 22:13:42 +09:00
|
|
|
this._transport = transport;
|
2018-02-18 07:13:49 +09:00
|
|
|
this._stats = (pdfBug ? new StatTimer() : DummyStatTimer);
|
|
|
|
this._pdfBug = pdfBug;
|
2012-10-29 05:10:34 +09:00
|
|
|
this.commonObjs = transport.commonObjs;
|
|
|
|
this.objs = new PDFObjects();
|
2018-11-08 22:13:42 +09:00
|
|
|
|
2012-10-29 05:10:34 +09:00
|
|
|
this.cleanupAfterRender = false;
|
2015-10-21 07:45:55 +09:00
|
|
|
this.pendingCleanup = false;
|
2016-01-28 02:04:13 +09:00
|
|
|
this.intentStates = Object.create(null);
|
2015-10-21 07:45:55 +09:00
|
|
|
this.destroyed = false;
|
2012-04-13 04:11:22 +09:00
|
|
|
}
|
2018-03-18 01:10:37 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
/**
|
|
|
|
* @return {number} Page number of the page. First page is 1.
|
|
|
|
*/
|
|
|
|
get pageNumber() {
|
|
|
|
return this.pageIndex + 1;
|
|
|
|
}
|
2015-11-22 21:56:52 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
/**
|
|
|
|
* @return {number} The number of degrees the page is rotated clockwise.
|
|
|
|
*/
|
|
|
|
get rotate() {
|
|
|
|
return this._pageInfo.rotate;
|
|
|
|
}
|
2014-03-07 23:48:42 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
/**
|
|
|
|
* @return {Object} The reference that points to this page. It has 'num' and
|
|
|
|
* 'gen' properties.
|
|
|
|
*/
|
|
|
|
get ref() {
|
|
|
|
return this._pageInfo.ref;
|
|
|
|
}
|
2012-04-13 04:11:22 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
/**
|
|
|
|
* @return {number} The default size of units in 1/72nds of an inch.
|
|
|
|
*/
|
|
|
|
get userUnit() {
|
|
|
|
return this._pageInfo.userUnit;
|
|
|
|
}
|
2017-05-03 23:39:54 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
/**
|
|
|
|
* @return {Array} An array of the visible portion of the PDF page in the
|
|
|
|
* user space units - [x1, y1, x2, y2].
|
|
|
|
*/
|
|
|
|
get view() {
|
|
|
|
return this._pageInfo.view;
|
|
|
|
}
|
2017-05-03 23:39:54 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
/**
|
2018-12-21 19:47:37 +09:00
|
|
|
* @param {GetViewportParameters} params - Viewport parameters.
|
2018-11-08 22:13:42 +09:00
|
|
|
* @return {PageViewport} Contains 'width' and 'height' properties
|
2018-12-21 19:47:37 +09:00
|
|
|
* along with transforms required for rendering.
|
2018-11-08 22:13:42 +09:00
|
|
|
*/
|
2018-12-21 19:47:37 +09:00
|
|
|
getViewport({ scale, rotation = this.rotate, dontFlip = false, } = {}) {
|
|
|
|
if ((typeof PDFJSDev !== 'undefined' && PDFJSDev.test('GENERIC')) &&
|
|
|
|
(arguments.length > 1 || typeof arguments[0] === 'number')) {
|
|
|
|
deprecated('getViewport is called with obsolete arguments.');
|
|
|
|
scale = arguments[0];
|
|
|
|
rotation = typeof arguments[1] === 'number' ? arguments[1] : this.rotate;
|
|
|
|
dontFlip = typeof arguments[2] === 'boolean' ? arguments[2] : false;
|
|
|
|
}
|
2018-11-08 22:13:42 +09:00
|
|
|
return new PageViewport({
|
|
|
|
viewBox: this.view,
|
|
|
|
scale,
|
2018-12-21 19:47:37 +09:00
|
|
|
rotation,
|
2018-11-08 22:13:42 +09:00
|
|
|
dontFlip,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @param {GetAnnotationsParameters} params - Annotation parameters.
|
|
|
|
* @return {Promise} A promise that is resolved with an {Array} of the
|
|
|
|
* annotation objects.
|
|
|
|
*/
|
|
|
|
getAnnotations({ intent = null, } = {}) {
|
|
|
|
if (!this.annotationsPromise || this.annotationsIntent !== intent) {
|
|
|
|
this.annotationsPromise = this._transport.getAnnotations(this.pageIndex,
|
|
|
|
intent);
|
|
|
|
this.annotationsIntent = intent;
|
|
|
|
}
|
|
|
|
return this.annotationsPromise;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Begins the process of rendering a page to the desired context.
|
|
|
|
* @param {RenderParameters} params Page render parameters.
|
|
|
|
* @return {RenderTask} An object that contains the promise, which
|
|
|
|
* is resolved when the page finishes rendering.
|
|
|
|
*/
|
|
|
|
render({ canvasContext, viewport, intent = 'display', enableWebGL = false,
|
|
|
|
renderInteractiveForms = false, transform = null, imageLayer = null,
|
|
|
|
canvasFactory = null, background = null, }) {
|
|
|
|
const stats = this._stats;
|
|
|
|
stats.time('Overall');
|
|
|
|
|
2019-06-03 05:18:21 +09:00
|
|
|
// If there was a pending destroy, cancel it so no cleanup happens during
|
2018-11-08 22:13:42 +09:00
|
|
|
// this call to render.
|
|
|
|
this.pendingCleanup = false;
|
|
|
|
|
|
|
|
const renderingIntent = (intent === 'print' ? 'print' : 'display');
|
|
|
|
const canvasFactoryInstance = canvasFactory || new DOMCanvasFactory();
|
|
|
|
const webGLContext = new WebGLContext({
|
|
|
|
enable: enableWebGL,
|
|
|
|
});
|
|
|
|
|
|
|
|
if (!this.intentStates[renderingIntent]) {
|
|
|
|
this.intentStates[renderingIntent] = Object.create(null);
|
|
|
|
}
|
|
|
|
const intentState = this.intentStates[renderingIntent];
|
|
|
|
|
|
|
|
// If there's no displayReadyCapability yet, then the operatorList
|
|
|
|
// was never requested before. Make the request and create the promise.
|
|
|
|
if (!intentState.displayReadyCapability) {
|
|
|
|
intentState.displayReadyCapability = createPromiseCapability();
|
|
|
|
intentState.operatorList = {
|
|
|
|
fnArray: [],
|
|
|
|
argsArray: [],
|
|
|
|
lastChunk: false,
|
2017-05-03 23:39:54 +09:00
|
|
|
};
|
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
stats.time('Page Request');
|
Use streams for OperatorList chunking (issue 10023)
*Please note:* The majority of this patch was written by Yury, and it's simply been rebased and slightly extended to prevent issues when dealing with `RenderingCancelledException`.
By leveraging streams this (finally) provides a simple way in which parsing can be aborted on the worker-thread, which will ultimately help save resources.
With this patch worker-thread parsing will *only* be aborted when the document is destroyed, and not when rendering is cancelled. There's a couple of reasons for this:
- The API currently expects the *entire* OperatorList to be extracted, or an Error to occur, once it's been started. Hence additional re-factoring/re-writing of the API code will be necessary to properly support cancelling and re-starting of OperatorList parsing in cases where the `lastChunk` hasn't yet been seen.
- Even with the above addressed, immediately cancelling when encountering a `RenderingCancelledException` will lead to worse performance in e.g. the default viewer. When zooming and/or rotation of the document occurs it's very likely that `cancel` will be (almost) immediately followed by a new `render` call. In that case you'd obviously *not* want to abort parsing on the worker-thread, since then you'd risk throwing away a partially parsed Page and thus be forced to re-parse it again which will regress perceived performance.
- This patch is already *somewhat* risky, given that it touches fundamentally important/critical code, and trying to keep it somewhat small should hopefully reduce the risk of regressions (and simplify reviewing as well).
Time permitting, once this has landed and been in Nightly for awhile, I'll try to work on the remaining points outlined above.
Co-Authored-By: Yury Delendik <ydelendik@mozilla.com>
Co-Authored-By: Jonas Jenwald <jonas.jenwald@gmail.com>
2017-11-10 10:58:43 +09:00
|
|
|
this._pumpOperatorList({
|
2018-11-08 22:13:42 +09:00
|
|
|
pageIndex: this.pageNumber - 1,
|
|
|
|
intent: renderingIntent,
|
|
|
|
renderInteractiveForms: renderInteractiveForms === true,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
const complete = (error) => {
|
|
|
|
const i = intentState.renderTasks.indexOf(internalRenderTask);
|
|
|
|
if (i >= 0) {
|
|
|
|
intentState.renderTasks.splice(i, 1);
|
|
|
|
}
|
|
|
|
|
2019-06-03 05:18:21 +09:00
|
|
|
// Attempt to reduce memory usage during *printing*, by always running
|
|
|
|
// cleanup once rendering has finished (regardless of cleanupAfterRender).
|
|
|
|
if (this.cleanupAfterRender || renderingIntent === 'print') {
|
2018-11-08 22:13:42 +09:00
|
|
|
this.pendingCleanup = true;
|
|
|
|
}
|
|
|
|
this._tryCleanup();
|
|
|
|
|
|
|
|
if (error) {
|
|
|
|
internalRenderTask.capability.reject(error);
|
Use streams for OperatorList chunking (issue 10023)
*Please note:* The majority of this patch was written by Yury, and it's simply been rebased and slightly extended to prevent issues when dealing with `RenderingCancelledException`.
By leveraging streams this (finally) provides a simple way in which parsing can be aborted on the worker-thread, which will ultimately help save resources.
With this patch worker-thread parsing will *only* be aborted when the document is destroyed, and not when rendering is cancelled. There's a couple of reasons for this:
- The API currently expects the *entire* OperatorList to be extracted, or an Error to occur, once it's been started. Hence additional re-factoring/re-writing of the API code will be necessary to properly support cancelling and re-starting of OperatorList parsing in cases where the `lastChunk` hasn't yet been seen.
- Even with the above addressed, immediately cancelling when encountering a `RenderingCancelledException` will lead to worse performance in e.g. the default viewer. When zooming and/or rotation of the document occurs it's very likely that `cancel` will be (almost) immediately followed by a new `render` call. In that case you'd obviously *not* want to abort parsing on the worker-thread, since then you'd risk throwing away a partially parsed Page and thus be forced to re-parse it again which will regress perceived performance.
- This patch is already *somewhat* risky, given that it touches fundamentally important/critical code, and trying to keep it somewhat small should hopefully reduce the risk of regressions (and simplify reviewing as well).
Time permitting, once this has landed and been in Nightly for awhile, I'll try to work on the remaining points outlined above.
Co-Authored-By: Yury Delendik <ydelendik@mozilla.com>
Co-Authored-By: Jonas Jenwald <jonas.jenwald@gmail.com>
2017-11-10 10:58:43 +09:00
|
|
|
|
|
|
|
this._abortOperatorList({
|
|
|
|
intentState,
|
|
|
|
reason: error,
|
|
|
|
});
|
2018-11-08 22:13:42 +09:00
|
|
|
} else {
|
|
|
|
internalRenderTask.capability.resolve();
|
|
|
|
}
|
|
|
|
stats.timeEnd('Rendering');
|
|
|
|
stats.timeEnd('Overall');
|
|
|
|
};
|
|
|
|
|
2018-11-08 22:33:56 +09:00
|
|
|
const internalRenderTask = new InternalRenderTask({
|
|
|
|
callback: complete,
|
|
|
|
params: { // Include the required properties, and *not* the entire object.
|
|
|
|
canvasContext,
|
|
|
|
viewport,
|
|
|
|
transform,
|
|
|
|
imageLayer,
|
|
|
|
background,
|
|
|
|
},
|
|
|
|
objs: this.objs,
|
|
|
|
commonObjs: this.commonObjs,
|
|
|
|
operatorList: intentState.operatorList,
|
|
|
|
pageNumber: this.pageNumber,
|
|
|
|
canvasFactory: canvasFactoryInstance,
|
|
|
|
webGLContext,
|
|
|
|
useRequestAnimationFrame: renderingIntent !== 'print',
|
|
|
|
pdfBug: this._pdfBug,
|
|
|
|
});
|
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
if (!intentState.renderTasks) {
|
|
|
|
intentState.renderTasks = [];
|
|
|
|
}
|
|
|
|
intentState.renderTasks.push(internalRenderTask);
|
|
|
|
const renderTask = internalRenderTask.task;
|
|
|
|
|
|
|
|
intentState.displayReadyCapability.promise.then((transparency) => {
|
|
|
|
if (this.pendingCleanup) {
|
|
|
|
complete();
|
|
|
|
return;
|
2014-03-07 23:48:42 +09:00
|
|
|
}
|
2018-11-08 22:13:42 +09:00
|
|
|
stats.time('Rendering');
|
|
|
|
internalRenderTask.initializeGraphics(transparency);
|
|
|
|
internalRenderTask.operatorListChanged();
|
|
|
|
}).catch(complete);
|
2015-01-06 12:45:01 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
return renderTask;
|
|
|
|
}
|
2012-04-13 04:11:22 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
/**
|
|
|
|
* @return {Promise} A promise resolved with an {@link PDFOperatorList}
|
|
|
|
* object that represents page's operator list.
|
|
|
|
*/
|
|
|
|
getOperatorList() {
|
|
|
|
function operatorListChanged() {
|
|
|
|
if (intentState.operatorList.lastChunk) {
|
|
|
|
intentState.opListReadCapability.resolve(intentState.operatorList);
|
2014-06-17 03:35:38 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
const i = intentState.renderTasks.indexOf(opListTask);
|
|
|
|
if (i >= 0) {
|
|
|
|
intentState.renderTasks.splice(i, 1);
|
2014-06-17 03:35:38 +09:00
|
|
|
}
|
|
|
|
}
|
2018-11-08 22:13:42 +09:00
|
|
|
}
|
2014-06-17 03:35:38 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
const renderingIntent = 'oplist';
|
|
|
|
if (!this.intentStates[renderingIntent]) {
|
|
|
|
this.intentStates[renderingIntent] = Object.create(null);
|
|
|
|
}
|
|
|
|
const intentState = this.intentStates[renderingIntent];
|
|
|
|
let opListTask;
|
|
|
|
|
|
|
|
if (!intentState.opListReadCapability) {
|
|
|
|
opListTask = {};
|
|
|
|
opListTask.operatorListChanged = operatorListChanged;
|
|
|
|
intentState.opListReadCapability = createPromiseCapability();
|
|
|
|
intentState.renderTasks = [];
|
|
|
|
intentState.renderTasks.push(opListTask);
|
|
|
|
intentState.operatorList = {
|
|
|
|
fnArray: [],
|
|
|
|
argsArray: [],
|
|
|
|
lastChunk: false,
|
|
|
|
};
|
2014-06-17 03:35:38 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
this._stats.time('Page Request');
|
Use streams for OperatorList chunking (issue 10023)
*Please note:* The majority of this patch was written by Yury, and it's simply been rebased and slightly extended to prevent issues when dealing with `RenderingCancelledException`.
By leveraging streams this (finally) provides a simple way in which parsing can be aborted on the worker-thread, which will ultimately help save resources.
With this patch worker-thread parsing will *only* be aborted when the document is destroyed, and not when rendering is cancelled. There's a couple of reasons for this:
- The API currently expects the *entire* OperatorList to be extracted, or an Error to occur, once it's been started. Hence additional re-factoring/re-writing of the API code will be necessary to properly support cancelling and re-starting of OperatorList parsing in cases where the `lastChunk` hasn't yet been seen.
- Even with the above addressed, immediately cancelling when encountering a `RenderingCancelledException` will lead to worse performance in e.g. the default viewer. When zooming and/or rotation of the document occurs it's very likely that `cancel` will be (almost) immediately followed by a new `render` call. In that case you'd obviously *not* want to abort parsing on the worker-thread, since then you'd risk throwing away a partially parsed Page and thus be forced to re-parse it again which will regress perceived performance.
- This patch is already *somewhat* risky, given that it touches fundamentally important/critical code, and trying to keep it somewhat small should hopefully reduce the risk of regressions (and simplify reviewing as well).
Time permitting, once this has landed and been in Nightly for awhile, I'll try to work on the remaining points outlined above.
Co-Authored-By: Yury Delendik <ydelendik@mozilla.com>
Co-Authored-By: Jonas Jenwald <jonas.jenwald@gmail.com>
2017-11-10 10:58:43 +09:00
|
|
|
this._pumpOperatorList({
|
2018-11-08 22:13:42 +09:00
|
|
|
pageIndex: this.pageIndex,
|
|
|
|
intent: renderingIntent,
|
2017-04-17 21:46:53 +09:00
|
|
|
});
|
2018-11-08 22:13:42 +09:00
|
|
|
}
|
|
|
|
return intentState.opListReadCapability.promise;
|
|
|
|
}
|
2017-04-17 21:46:53 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
/**
|
|
|
|
* @param {getTextContentParameters} params - getTextContent parameters.
|
|
|
|
* @return {ReadableStream} ReadableStream to read textContent chunks.
|
|
|
|
*/
|
|
|
|
streamTextContent({ normalizeWhitespace = false,
|
|
|
|
disableCombineTextItems = false, } = {}) {
|
|
|
|
const TEXT_CONTENT_CHUNK_SIZE = 100;
|
|
|
|
|
|
|
|
return this._transport.messageHandler.sendWithStream('GetTextContent', {
|
|
|
|
pageIndex: this.pageNumber - 1,
|
|
|
|
normalizeWhitespace: normalizeWhitespace === true,
|
|
|
|
combineTextItems: disableCombineTextItems !== true,
|
|
|
|
}, {
|
|
|
|
highWaterMark: TEXT_CONTENT_CHUNK_SIZE,
|
|
|
|
size(textContent) {
|
|
|
|
return textContent.items.length;
|
|
|
|
},
|
|
|
|
});
|
|
|
|
}
|
2017-04-17 21:46:53 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
/**
|
|
|
|
* @param {getTextContentParameters} params - getTextContent parameters.
|
|
|
|
* @return {Promise} That is resolved a {@link TextContent}
|
|
|
|
* object that represent the page text content.
|
|
|
|
*/
|
|
|
|
getTextContent(params = {}) {
|
|
|
|
const readableStream = this.streamTextContent(params);
|
2015-10-21 07:45:55 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
return new Promise(function(resolve, reject) {
|
|
|
|
function pump() {
|
|
|
|
reader.read().then(function({ value, done, }) {
|
|
|
|
if (done) {
|
|
|
|
resolve(textContent);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
Object.assign(textContent.styles, value.styles);
|
|
|
|
textContent.items.push(...value.items);
|
|
|
|
pump();
|
|
|
|
}, reject);
|
|
|
|
}
|
2015-10-21 07:45:55 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
const reader = readableStream.getReader();
|
|
|
|
const textContent = {
|
|
|
|
items: [],
|
|
|
|
styles: Object.create(null),
|
|
|
|
};
|
|
|
|
pump();
|
|
|
|
});
|
|
|
|
}
|
2015-10-21 07:45:55 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
/**
|
|
|
|
* Destroys page object.
|
|
|
|
*/
|
|
|
|
_destroy() {
|
|
|
|
this.destroyed = true;
|
|
|
|
this._transport.pageCache[this.pageIndex] = null;
|
|
|
|
|
|
|
|
const waitOn = [];
|
Use streams for OperatorList chunking (issue 10023)
*Please note:* The majority of this patch was written by Yury, and it's simply been rebased and slightly extended to prevent issues when dealing with `RenderingCancelledException`.
By leveraging streams this (finally) provides a simple way in which parsing can be aborted on the worker-thread, which will ultimately help save resources.
With this patch worker-thread parsing will *only* be aborted when the document is destroyed, and not when rendering is cancelled. There's a couple of reasons for this:
- The API currently expects the *entire* OperatorList to be extracted, or an Error to occur, once it's been started. Hence additional re-factoring/re-writing of the API code will be necessary to properly support cancelling and re-starting of OperatorList parsing in cases where the `lastChunk` hasn't yet been seen.
- Even with the above addressed, immediately cancelling when encountering a `RenderingCancelledException` will lead to worse performance in e.g. the default viewer. When zooming and/or rotation of the document occurs it's very likely that `cancel` will be (almost) immediately followed by a new `render` call. In that case you'd obviously *not* want to abort parsing on the worker-thread, since then you'd risk throwing away a partially parsed Page and thus be forced to re-parse it again which will regress perceived performance.
- This patch is already *somewhat* risky, given that it touches fundamentally important/critical code, and trying to keep it somewhat small should hopefully reduce the risk of regressions (and simplify reviewing as well).
Time permitting, once this has landed and been in Nightly for awhile, I'll try to work on the remaining points outlined above.
Co-Authored-By: Yury Delendik <ydelendik@mozilla.com>
Co-Authored-By: Jonas Jenwald <jonas.jenwald@gmail.com>
2017-11-10 10:58:43 +09:00
|
|
|
Object.keys(this.intentStates).forEach((intent) => {
|
|
|
|
const intentState = this.intentStates[intent];
|
|
|
|
this._abortOperatorList({
|
|
|
|
intentState,
|
|
|
|
reason: new Error('Page was destroyed.'),
|
|
|
|
force: true,
|
|
|
|
});
|
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
if (intent === 'oplist') {
|
|
|
|
// Avoid errors below, since the renderTasks are just stubs.
|
2013-08-07 09:35:54 +09:00
|
|
|
return;
|
|
|
|
}
|
2018-11-08 22:13:42 +09:00
|
|
|
intentState.renderTasks.forEach(function(renderTask) {
|
|
|
|
const renderCompleted = renderTask.capability.promise.
|
|
|
|
catch(function() {}); // ignoring failures
|
|
|
|
waitOn.push(renderCompleted);
|
|
|
|
renderTask.cancel();
|
|
|
|
});
|
Use streams for OperatorList chunking (issue 10023)
*Please note:* The majority of this patch was written by Yury, and it's simply been rebased and slightly extended to prevent issues when dealing with `RenderingCancelledException`.
By leveraging streams this (finally) provides a simple way in which parsing can be aborted on the worker-thread, which will ultimately help save resources.
With this patch worker-thread parsing will *only* be aborted when the document is destroyed, and not when rendering is cancelled. There's a couple of reasons for this:
- The API currently expects the *entire* OperatorList to be extracted, or an Error to occur, once it's been started. Hence additional re-factoring/re-writing of the API code will be necessary to properly support cancelling and re-starting of OperatorList parsing in cases where the `lastChunk` hasn't yet been seen.
- Even with the above addressed, immediately cancelling when encountering a `RenderingCancelledException` will lead to worse performance in e.g. the default viewer. When zooming and/or rotation of the document occurs it's very likely that `cancel` will be (almost) immediately followed by a new `render` call. In that case you'd obviously *not* want to abort parsing on the worker-thread, since then you'd risk throwing away a partially parsed Page and thus be forced to re-parse it again which will regress perceived performance.
- This patch is already *somewhat* risky, given that it touches fundamentally important/critical code, and trying to keep it somewhat small should hopefully reduce the risk of regressions (and simplify reviewing as well).
Time permitting, once this has landed and been in Nightly for awhile, I'll try to work on the remaining points outlined above.
Co-Authored-By: Yury Delendik <ydelendik@mozilla.com>
Co-Authored-By: Jonas Jenwald <jonas.jenwald@gmail.com>
2017-11-10 10:58:43 +09:00
|
|
|
});
|
2018-11-08 22:13:42 +09:00
|
|
|
this.objs.clear();
|
|
|
|
this.annotationsPromise = null;
|
|
|
|
this.pendingCleanup = false;
|
|
|
|
return Promise.all(waitOn);
|
|
|
|
}
|
2013-08-07 09:35:54 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
/**
|
|
|
|
* Cleans up resources allocated by the page.
|
|
|
|
* @param {boolean} resetStats - (optional) Reset page stats, if enabled.
|
|
|
|
* The default value is `false`.
|
|
|
|
*/
|
|
|
|
cleanup(resetStats = false) {
|
|
|
|
this.pendingCleanup = true;
|
|
|
|
this._tryCleanup(resetStats);
|
|
|
|
}
|
2013-08-01 03:17:36 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
/**
|
|
|
|
* For internal use only. Attempts to clean up if rendering is in a state
|
|
|
|
* where that's possible.
|
|
|
|
* @ignore
|
|
|
|
*/
|
|
|
|
_tryCleanup(resetStats = false) {
|
|
|
|
if (!this.pendingCleanup ||
|
2019-08-30 18:35:05 +09:00
|
|
|
Object.keys(this.intentStates).some((intent) => {
|
2018-11-08 22:13:42 +09:00
|
|
|
const intentState = this.intentStates[intent];
|
|
|
|
return (intentState.renderTasks.length !== 0 ||
|
2019-07-10 20:24:21 +09:00
|
|
|
!intentState.operatorList.lastChunk);
|
2019-08-30 18:35:05 +09:00
|
|
|
})) {
|
2018-11-08 22:13:42 +09:00
|
|
|
return;
|
|
|
|
}
|
2013-08-07 09:35:54 +09:00
|
|
|
|
2019-08-30 18:35:05 +09:00
|
|
|
Object.keys(this.intentStates).forEach((intent) => {
|
2018-11-08 22:13:42 +09:00
|
|
|
delete this.intentStates[intent];
|
2019-08-30 18:35:05 +09:00
|
|
|
});
|
2018-11-08 22:13:42 +09:00
|
|
|
this.objs.clear();
|
|
|
|
this.annotationsPromise = null;
|
|
|
|
if (resetStats && this._stats instanceof StatTimer) {
|
|
|
|
this._stats = new StatTimer();
|
|
|
|
}
|
|
|
|
this.pendingCleanup = false;
|
|
|
|
}
|
[api-major] Only create a `StatTimer` for pages when `enableStats == true` (issue 5215)
Unless the debugging tools (i.e. `PDFBug`) are enabled, or the `browsertest` is running, the `PDFPageProxy.stats` aren't actually used for anything.
Rather than initializing unnecessary `StatTimer` instances, we can simply re-use *one* dummy class (with static methods) for every page. Note that by using a dummy `StatTimer` in this way, rather than letting `PDFPageProxy.stats` be undefined, we don't need to guard *every* single stats collection callsite.
Since it wouldn't make much sense to attempt to use `PDFPageProxy.stats` when stat collection is disabled, it was instead changed to a "private" property (i.e. `PDFPageProxy._stats`) and a getter was added for accessing `PDFPageProxy.stats`. This getter will now return `null` when stat collection is disabled, making that case easy to handle.
For benchmarking purposes, the test-suite used to re-create the `StatTimer` after loading/rendering each page. However, modifying properties on various API code from the outside in this way seems very error-prone, and is an anti-pattern that we really should avoid at all cost. Hence the `PDFPageProxy.cleanup` method was modified to accept an optional parameter, which will take care of resetting `this.stats` when necessary, and `test/driver.js` was updated accordingly.
Finally, a tiny bit more validation was added on the viewer side, to ensure that all the code we're attempting to access is defined when handling `PDFPageProxy` stats.
2017-12-07 00:30:04 +09:00
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
/**
|
|
|
|
* For internal use only.
|
|
|
|
* @ignore
|
|
|
|
*/
|
|
|
|
_startRenderPage(transparency, intent) {
|
|
|
|
const intentState = this.intentStates[intent];
|
|
|
|
// TODO Refactor RenderPageRequest to separate rendering
|
|
|
|
// and operator list logic
|
|
|
|
if (intentState.displayReadyCapability) {
|
|
|
|
intentState.displayReadyCapability.resolve(transparency);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* For internal use only.
|
|
|
|
* @ignore
|
|
|
|
*/
|
Use streams for OperatorList chunking (issue 10023)
*Please note:* The majority of this patch was written by Yury, and it's simply been rebased and slightly extended to prevent issues when dealing with `RenderingCancelledException`.
By leveraging streams this (finally) provides a simple way in which parsing can be aborted on the worker-thread, which will ultimately help save resources.
With this patch worker-thread parsing will *only* be aborted when the document is destroyed, and not when rendering is cancelled. There's a couple of reasons for this:
- The API currently expects the *entire* OperatorList to be extracted, or an Error to occur, once it's been started. Hence additional re-factoring/re-writing of the API code will be necessary to properly support cancelling and re-starting of OperatorList parsing in cases where the `lastChunk` hasn't yet been seen.
- Even with the above addressed, immediately cancelling when encountering a `RenderingCancelledException` will lead to worse performance in e.g. the default viewer. When zooming and/or rotation of the document occurs it's very likely that `cancel` will be (almost) immediately followed by a new `render` call. In that case you'd obviously *not* want to abort parsing on the worker-thread, since then you'd risk throwing away a partially parsed Page and thus be forced to re-parse it again which will regress perceived performance.
- This patch is already *somewhat* risky, given that it touches fundamentally important/critical code, and trying to keep it somewhat small should hopefully reduce the risk of regressions (and simplify reviewing as well).
Time permitting, once this has landed and been in Nightly for awhile, I'll try to work on the remaining points outlined above.
Co-Authored-By: Yury Delendik <ydelendik@mozilla.com>
Co-Authored-By: Jonas Jenwald <jonas.jenwald@gmail.com>
2017-11-10 10:58:43 +09:00
|
|
|
_renderPageChunk(operatorListChunk, intentState) {
|
2018-11-08 22:13:42 +09:00
|
|
|
// Add the new chunk to the current operator list.
|
|
|
|
for (let i = 0, ii = operatorListChunk.length; i < ii; i++) {
|
|
|
|
intentState.operatorList.fnArray.push(operatorListChunk.fnArray[i]);
|
|
|
|
intentState.operatorList.argsArray.push(
|
|
|
|
operatorListChunk.argsArray[i]);
|
|
|
|
}
|
|
|
|
intentState.operatorList.lastChunk = operatorListChunk.lastChunk;
|
|
|
|
|
|
|
|
// Notify all the rendering tasks there are more operators to be consumed.
|
|
|
|
for (let i = 0; i < intentState.renderTasks.length; i++) {
|
|
|
|
intentState.renderTasks[i].operatorListChanged();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (operatorListChunk.lastChunk) {
|
|
|
|
this._tryCleanup();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
Use streams for OperatorList chunking (issue 10023)
*Please note:* The majority of this patch was written by Yury, and it's simply been rebased and slightly extended to prevent issues when dealing with `RenderingCancelledException`.
By leveraging streams this (finally) provides a simple way in which parsing can be aborted on the worker-thread, which will ultimately help save resources.
With this patch worker-thread parsing will *only* be aborted when the document is destroyed, and not when rendering is cancelled. There's a couple of reasons for this:
- The API currently expects the *entire* OperatorList to be extracted, or an Error to occur, once it's been started. Hence additional re-factoring/re-writing of the API code will be necessary to properly support cancelling and re-starting of OperatorList parsing in cases where the `lastChunk` hasn't yet been seen.
- Even with the above addressed, immediately cancelling when encountering a `RenderingCancelledException` will lead to worse performance in e.g. the default viewer. When zooming and/or rotation of the document occurs it's very likely that `cancel` will be (almost) immediately followed by a new `render` call. In that case you'd obviously *not* want to abort parsing on the worker-thread, since then you'd risk throwing away a partially parsed Page and thus be forced to re-parse it again which will regress perceived performance.
- This patch is already *somewhat* risky, given that it touches fundamentally important/critical code, and trying to keep it somewhat small should hopefully reduce the risk of regressions (and simplify reviewing as well).
Time permitting, once this has landed and been in Nightly for awhile, I'll try to work on the remaining points outlined above.
Co-Authored-By: Yury Delendik <ydelendik@mozilla.com>
Co-Authored-By: Jonas Jenwald <jonas.jenwald@gmail.com>
2017-11-10 10:58:43 +09:00
|
|
|
/**
|
|
|
|
* For internal use only.
|
|
|
|
* @ignore
|
|
|
|
*/
|
|
|
|
_pumpOperatorList(args) {
|
|
|
|
assert(args.intent,
|
|
|
|
'PDFPageProxy._pumpOperatorList: Expected "intent" argument.');
|
|
|
|
|
|
|
|
const readableStream =
|
|
|
|
this._transport.messageHandler.sendWithStream('GetOperatorList', args);
|
|
|
|
const reader = readableStream.getReader();
|
|
|
|
|
|
|
|
const intentState = this.intentStates[args.intent];
|
|
|
|
intentState.streamReader = reader;
|
|
|
|
|
|
|
|
const pump = () => {
|
|
|
|
reader.read().then(({ value, done, }) => {
|
|
|
|
if (done) {
|
|
|
|
intentState.streamReader = null;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (this._transport.destroyed) {
|
|
|
|
return; // Ignore any pending requests if the worker was terminated.
|
|
|
|
}
|
2019-09-10 00:35:24 +09:00
|
|
|
this._renderPageChunk(value, intentState);
|
Use streams for OperatorList chunking (issue 10023)
*Please note:* The majority of this patch was written by Yury, and it's simply been rebased and slightly extended to prevent issues when dealing with `RenderingCancelledException`.
By leveraging streams this (finally) provides a simple way in which parsing can be aborted on the worker-thread, which will ultimately help save resources.
With this patch worker-thread parsing will *only* be aborted when the document is destroyed, and not when rendering is cancelled. There's a couple of reasons for this:
- The API currently expects the *entire* OperatorList to be extracted, or an Error to occur, once it's been started. Hence additional re-factoring/re-writing of the API code will be necessary to properly support cancelling and re-starting of OperatorList parsing in cases where the `lastChunk` hasn't yet been seen.
- Even with the above addressed, immediately cancelling when encountering a `RenderingCancelledException` will lead to worse performance in e.g. the default viewer. When zooming and/or rotation of the document occurs it's very likely that `cancel` will be (almost) immediately followed by a new `render` call. In that case you'd obviously *not* want to abort parsing on the worker-thread, since then you'd risk throwing away a partially parsed Page and thus be forced to re-parse it again which will regress perceived performance.
- This patch is already *somewhat* risky, given that it touches fundamentally important/critical code, and trying to keep it somewhat small should hopefully reduce the risk of regressions (and simplify reviewing as well).
Time permitting, once this has landed and been in Nightly for awhile, I'll try to work on the remaining points outlined above.
Co-Authored-By: Yury Delendik <ydelendik@mozilla.com>
Co-Authored-By: Jonas Jenwald <jonas.jenwald@gmail.com>
2017-11-10 10:58:43 +09:00
|
|
|
pump();
|
|
|
|
}, (reason) => {
|
|
|
|
intentState.streamReader = null;
|
|
|
|
|
|
|
|
if (this._transport.destroyed) {
|
|
|
|
return; // Ignore any pending requests if the worker was terminated.
|
|
|
|
}
|
|
|
|
if (intentState.operatorList) {
|
|
|
|
// Mark operator list as complete.
|
|
|
|
intentState.operatorList.lastChunk = true;
|
|
|
|
|
|
|
|
for (let i = 0; i < intentState.renderTasks.length; i++) {
|
|
|
|
intentState.renderTasks[i].operatorListChanged();
|
|
|
|
}
|
|
|
|
this._tryCleanup();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (intentState.displayReadyCapability) {
|
|
|
|
intentState.displayReadyCapability.reject(reason);
|
|
|
|
} else if (intentState.opListReadCapability) {
|
|
|
|
intentState.opListReadCapability.reject(reason);
|
|
|
|
} else {
|
|
|
|
throw reason;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
};
|
|
|
|
pump();
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* For internal use only.
|
|
|
|
* @ignore
|
|
|
|
*/
|
|
|
|
_abortOperatorList({ intentState, reason, force = false, }) {
|
|
|
|
assert(reason instanceof Error,
|
|
|
|
'PDFPageProxy._abortOperatorList: Expected "reason" argument.');
|
|
|
|
|
|
|
|
if (!intentState.streamReader) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (!force && intentState.renderTasks.length !== 0) {
|
|
|
|
// Ensure that an Error occuring in *only* one `InternalRenderTask`, e.g.
|
|
|
|
// multiple render() calls on the same canvas, won't break all rendering.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (reason instanceof RenderingCancelledException) {
|
|
|
|
// Aborting parsing on the worker-thread when rendering is cancelled will
|
|
|
|
// break subsequent rendering operations. TODO: Remove this restriction.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
intentState.streamReader.cancel(
|
|
|
|
new AbortException(reason && reason.message));
|
|
|
|
intentState.streamReader = null;
|
|
|
|
}
|
|
|
|
|
2018-11-08 22:13:42 +09:00
|
|
|
/**
|
|
|
|
* @return {Object} Returns page stats, if enabled.
|
|
|
|
*/
|
|
|
|
get stats() {
|
|
|
|
return (this._stats instanceof StatTimer ? this._stats : null);
|
|
|
|
}
|
|
|
|
}
|
2014-01-22 04:28:18 +09:00
|
|
|
|
2017-05-03 02:20:13 +09:00
|
|
|
class LoopbackPort {
|
2018-06-04 19:38:05 +09:00
|
|
|
constructor(defer = true) {
|
2017-05-03 02:20:13 +09:00
|
|
|
this._listeners = [];
|
|
|
|
this._defer = defer;
|
|
|
|
this._deferred = Promise.resolve(undefined);
|
|
|
|
}
|
|
|
|
|
|
|
|
postMessage(obj, transfers) {
|
|
|
|
function cloneValue(value) {
|
|
|
|
// Trying to perform a structured clone close to the spec, including
|
|
|
|
// transfers.
|
|
|
|
if (typeof value !== 'object' || value === null) {
|
|
|
|
return value;
|
|
|
|
}
|
|
|
|
if (cloned.has(value)) { // already cloned the object
|
|
|
|
return cloned.get(value);
|
|
|
|
}
|
2018-11-09 00:24:20 +09:00
|
|
|
let buffer, result;
|
2017-05-03 02:20:13 +09:00
|
|
|
if ((buffer = value.buffer) && isArrayBuffer(buffer)) {
|
|
|
|
// We found object with ArrayBuffer (typed array).
|
2018-11-09 00:24:20 +09:00
|
|
|
const transferable = transfers && transfers.includes(buffer);
|
2017-05-03 02:20:13 +09:00
|
|
|
if (value === buffer) {
|
|
|
|
// Special case when we are faking typed arrays in compatibility.js.
|
|
|
|
result = value;
|
|
|
|
} else if (transferable) {
|
|
|
|
result = new value.constructor(buffer, value.byteOffset,
|
|
|
|
value.byteLength);
|
|
|
|
} else {
|
|
|
|
result = new value.constructor(value);
|
|
|
|
}
|
|
|
|
cloned.set(value, result);
|
|
|
|
return result;
|
|
|
|
}
|
2017-08-23 07:50:27 +09:00
|
|
|
result = Array.isArray(value) ? [] : {};
|
2017-05-03 02:20:13 +09:00
|
|
|
cloned.set(value, result); // adding to cache now for cyclic references
|
|
|
|
// Cloning all value and object properties, however ignoring properties
|
|
|
|
// defined via getter.
|
2018-11-09 00:24:20 +09:00
|
|
|
for (const i in value) {
|
|
|
|
let desc, p = value;
|
2017-05-03 02:20:13 +09:00
|
|
|
while (!(desc = Object.getOwnPropertyDescriptor(p, i))) {
|
|
|
|
p = Object.getPrototypeOf(p);
|
|
|
|
}
|
|
|
|
if (typeof desc.value === 'undefined' ||
|
|
|
|
typeof desc.value === 'function') {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
result[i] = cloneValue(desc.value);
|
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!this._defer) {
|
2019-08-30 18:35:05 +09:00
|
|
|
this._listeners.forEach((listener) => {
|
Fix inconsistent spacing and trailing commas in objects in remaining `src/` files, so we can enable the `comma-dangle` and `object-curly-spacing` ESLint rules later on
http://eslint.org/docs/rules/comma-dangle
http://eslint.org/docs/rules/object-curly-spacing
Given that we currently have quite inconsistent object formatting, fixing this in *one* big patch probably wouldn't be feasible (since I cannot imagine anyone wanting to review that); hence I've opted to try and do this piecewise instead.
Please note: This patch was created automatically, using the ESLint `--fix` command line option. In a couple of places this caused lines to become too long, and I've fixed those manually; please refer to the interdiff below for the only hand-edits in this patch.
```diff
diff --git a/src/display/canvas.js b/src/display/canvas.js
index 5739f6f2..4216b2d2 100644
--- a/src/display/canvas.js
+++ b/src/display/canvas.js
@@ -2071,7 +2071,7 @@ var CanvasGraphics = (function CanvasGraphicsClosure() {
var map = [];
for (var i = 0, ii = positions.length; i < ii; i += 2) {
map.push({ transform: [scaleX, 0, 0, scaleY, positions[i],
- positions[i + 1]], x: 0, y: 0, w: width, h: height, });
+ positions[i + 1]], x: 0, y: 0, w: width, h: height, });
}
this.paintInlineImageXObjectGroup(imgData, map);
},
diff --git a/src/display/svg.js b/src/display/svg.js
index 9eb05dfa..2aa21482 100644
--- a/src/display/svg.js
+++ b/src/display/svg.js
@@ -458,7 +458,11 @@ SVGGraphics = (function SVGGraphicsClosure() {
for (var x = 0; x < fnArrayLen; x++) {
var fnId = fnArray[x];
- opList.push({ 'fnId': fnId, 'fn': REVOPS[fnId], 'args': argsArray[x], });
+ opList.push({
+ 'fnId': fnId,
+ 'fn': REVOPS[fnId],
+ 'args': argsArray[x],
+ });
}
return opListToTree(opList);
},
```
2017-06-02 18:26:37 +09:00
|
|
|
listener.call(this, { data: obj, });
|
2019-08-30 18:35:05 +09:00
|
|
|
});
|
2017-05-03 02:20:13 +09:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2018-11-09 00:24:20 +09:00
|
|
|
const cloned = new WeakMap();
|
|
|
|
const e = { data: cloneValue(obj), };
|
2017-05-03 23:39:54 +09:00
|
|
|
this._deferred.then(() => {
|
2019-08-30 18:35:05 +09:00
|
|
|
this._listeners.forEach((listener) => {
|
2017-05-03 02:20:13 +09:00
|
|
|
listener.call(this, e);
|
2019-08-30 18:35:05 +09:00
|
|
|
});
|
2017-05-03 23:39:54 +09:00
|
|
|
});
|
2017-05-03 02:20:13 +09:00
|
|
|
}
|
|
|
|
|
|
|
|
addEventListener(name, listener) {
|
|
|
|
this._listeners.push(listener);
|
|
|
|
}
|
|
|
|
|
|
|
|
removeEventListener(name, listener) {
|
2018-11-09 00:24:20 +09:00
|
|
|
const i = this._listeners.indexOf(listener);
|
2017-05-03 02:20:13 +09:00
|
|
|
this._listeners.splice(i, 1);
|
|
|
|
}
|
|
|
|
|
|
|
|
terminate() {
|
2019-05-30 23:25:48 +09:00
|
|
|
this._listeners.length = 0;
|
2017-05-03 02:20:13 +09:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-15 00:03:54 +09:00
|
|
|
/**
|
|
|
|
* @typedef {Object} PDFWorkerParameters
|
|
|
|
* @property {string} name - (optional) The name of the worker.
|
|
|
|
* @property {Object} port - (optional) The `workerPort`.
|
2018-02-15 01:35:08 +09:00
|
|
|
* @property {number} verbosity - (optional) Controls the logging level; the
|
|
|
|
* constants from {VerbosityLevel} should be used.
|
2018-02-15 00:03:54 +09:00
|
|
|
*/
|
|
|
|
|
2012-04-14 01:25:08 +09:00
|
|
|
/**
|
2015-10-28 02:55:15 +09:00
|
|
|
* PDF.js web worker abstraction, it controls instantiation of PDF documents and
|
2018-02-15 00:03:54 +09:00
|
|
|
* WorkerTransport for them. If creation of a web worker is not possible,
|
2015-10-28 02:55:15 +09:00
|
|
|
* a "fake" worker will be used instead.
|
|
|
|
* @class
|
2012-04-14 01:25:08 +09:00
|
|
|
*/
|
2018-11-09 00:22:26 +09:00
|
|
|
const PDFWorker = (function PDFWorkerClosure() {
|
|
|
|
const pdfWorkerPorts = new WeakMap();
|
2017-06-10 10:07:51 +09:00
|
|
|
let nextFakeWorkerId = 0;
|
2018-11-09 00:22:26 +09:00
|
|
|
let fakeWorkerFilesLoadedCapability;
|
2015-10-28 02:55:15 +09:00
|
|
|
|
2015-12-24 08:46:08 +09:00
|
|
|
function getWorkerSrc() {
|
2018-02-14 22:49:24 +09:00
|
|
|
if (GlobalWorkerOptions.workerSrc) {
|
|
|
|
return GlobalWorkerOptions.workerSrc;
|
2015-12-24 08:46:08 +09:00
|
|
|
}
|
2018-10-07 21:28:16 +09:00
|
|
|
if (typeof fallbackWorkerSrc !== 'undefined') {
|
|
|
|
return fallbackWorkerSrc;
|
2016-10-15 00:57:53 +09:00
|
|
|
}
|
2018-02-14 22:49:24 +09:00
|
|
|
throw new Error('No "GlobalWorkerOptions.workerSrc" specified.');
|
2015-12-24 08:46:08 +09:00
|
|
|
}
|
|
|
|
|
2018-01-20 02:16:17 +09:00
|
|
|
function getMainThreadWorkerMessageHandler() {
|
2018-10-09 22:35:28 +09:00
|
|
|
try {
|
|
|
|
if (typeof window !== 'undefined') {
|
|
|
|
return (window.pdfjsWorker && window.pdfjsWorker.WorkerMessageHandler);
|
|
|
|
}
|
|
|
|
} catch (ex) { }
|
|
|
|
return null;
|
2018-01-20 02:16:17 +09:00
|
|
|
}
|
|
|
|
|
2015-10-28 02:55:15 +09:00
|
|
|
// Loads worker code into main thread.
|
|
|
|
function setupFakeWorkerGlobal() {
|
2016-10-15 00:57:53 +09:00
|
|
|
if (fakeWorkerFilesLoadedCapability) {
|
|
|
|
return fakeWorkerFilesLoadedCapability.promise;
|
|
|
|
}
|
|
|
|
fakeWorkerFilesLoadedCapability = createPromiseCapability();
|
2018-01-20 02:16:17 +09:00
|
|
|
|
2018-11-09 00:22:26 +09:00
|
|
|
const mainWorkerMessageHandler = getMainThreadWorkerMessageHandler();
|
2018-01-20 02:16:17 +09:00
|
|
|
if (mainWorkerMessageHandler) {
|
|
|
|
// The worker was already loaded using a `<script>` tag.
|
|
|
|
fakeWorkerFilesLoadedCapability.resolve(mainWorkerMessageHandler);
|
|
|
|
return fakeWorkerFilesLoadedCapability.promise;
|
|
|
|
}
|
2018-01-21 01:15:16 +09:00
|
|
|
// In the developer build load worker_loader.js which in turn loads all the
|
2016-10-15 00:57:53 +09:00
|
|
|
// other files and resolves the promise. In production only the
|
|
|
|
// pdf.worker.js file is needed.
|
|
|
|
if (typeof PDFJSDev === 'undefined' || !PDFJSDev.test('PRODUCTION')) {
|
2017-04-02 21:25:33 +09:00
|
|
|
if (typeof SystemJS === 'object') {
|
2017-07-01 02:59:52 +09:00
|
|
|
SystemJS.import('pdfjs/core/worker').then((worker) => {
|
2018-01-21 01:15:16 +09:00
|
|
|
fakeWorkerFilesLoadedCapability.resolve(worker.WorkerMessageHandler);
|
2018-10-05 20:54:16 +09:00
|
|
|
}).catch(fakeWorkerFilesLoadedCapability.reject);
|
2015-11-24 02:46:40 +09:00
|
|
|
} else if (typeof require === 'function') {
|
2018-10-05 20:54:16 +09:00
|
|
|
try {
|
2018-11-09 00:22:26 +09:00
|
|
|
const worker = require('../core/worker.js');
|
2018-10-05 20:54:16 +09:00
|
|
|
fakeWorkerFilesLoadedCapability.resolve(worker.WorkerMessageHandler);
|
|
|
|
} catch (ex) {
|
|
|
|
fakeWorkerFilesLoadedCapability.reject(ex);
|
|
|
|
}
|
2015-11-24 02:46:40 +09:00
|
|
|
} else {
|
2018-10-05 20:54:16 +09:00
|
|
|
fakeWorkerFilesLoadedCapability.reject(new Error(
|
|
|
|
'SystemJS or CommonJS must be used to load fake worker.'));
|
2015-11-24 02:46:40 +09:00
|
|
|
}
|
2016-10-15 00:57:53 +09:00
|
|
|
} else {
|
2018-10-05 20:54:16 +09:00
|
|
|
const loader = fakeWorkerFilesLoader || function() {
|
|
|
|
return loadScript(getWorkerSrc()).then(function() {
|
|
|
|
return window.pdfjsWorker.WorkerMessageHandler;
|
2016-10-15 00:57:53 +09:00
|
|
|
});
|
|
|
|
};
|
2018-10-05 20:54:16 +09:00
|
|
|
loader().then(fakeWorkerFilesLoadedCapability.resolve,
|
|
|
|
fakeWorkerFilesLoadedCapability.reject);
|
2015-10-28 02:55:15 +09:00
|
|
|
}
|
2016-03-03 10:16:38 +09:00
|
|
|
return fakeWorkerFilesLoadedCapability.promise;
|
2015-10-28 02:55:15 +09:00
|
|
|
}
|
2012-04-12 07:52:15 +09:00
|
|
|
|
2016-01-16 06:05:46 +09:00
|
|
|
function createCDNWrapper(url) {
|
|
|
|
// We will rely on blob URL's property to specify origin.
|
|
|
|
// We want this function to fail in case if createObjectURL or Blob do not
|
|
|
|
// exist or fail for some reason -- our Worker creation will fail anyway.
|
2018-11-09 00:22:26 +09:00
|
|
|
const wrapper = 'importScripts(\'' + url + '\');';
|
2016-01-16 06:05:46 +09:00
|
|
|
return URL.createObjectURL(new Blob([wrapper]));
|
|
|
|
}
|
|
|
|
|
2018-02-15 00:03:54 +09:00
|
|
|
/**
|
|
|
|
* @param {PDFWorkerParameters} params - The worker initialization parameters.
|
|
|
|
*/
|
2018-11-09 00:22:26 +09:00
|
|
|
class PDFWorker {
|
[api-minor] Remove the `postMessageTransfers` parameter, and thus the ability to manually disable transferring of data, from the API
By transfering, rather than copying, `ArrayBuffer`s between the main- and worker-threads, you can avoid unnecessary allocations by only having *one* copy of the same data.
Hence manually setting `postMessageTransfers: false`, when calling `getDocument`, is a performance footgun[1] which will do nothing but waste memory.
Given that every reasonably modern browser supports `postMessage` transfers[2], I really don't see why it should be possible to force-disable this functionality.
Looking at the browser support, for `postMessage` transfers[2], it's highly unlikely that PDF.js is even usable in browsers without it. However, the feature testing of `postMessage` transfers is kept for the time being just to err on the safe side.
---
[1] This is somewhat similar to the, now removed, `disableWorker` parameter which also provided API users a much too simple way of reducing performance.
[2] See e.g. https://developer.mozilla.org/en-US/docs/Web/API/MessagePort/postMessage#Browser_compatibility and https://developer.mozilla.org/en-US/docs/Web/API/Transferable#Browser_compatibility
2019-09-05 17:42:30 +09:00
|
|
|
constructor({ name = null, port = null,
|
2018-11-09 00:22:26 +09:00
|
|
|
verbosity = getVerbosityLevel(), } = {}) {
|
|
|
|
if (port && pdfWorkerPorts.has(port)) {
|
|
|
|
throw new Error('Cannot use more than one PDFWorker per port');
|
|
|
|
}
|
2017-06-10 10:07:51 +09:00
|
|
|
|
2018-11-09 00:22:26 +09:00
|
|
|
this.name = name;
|
|
|
|
this.destroyed = false;
|
[api-minor] Remove the `postMessageTransfers` parameter, and thus the ability to manually disable transferring of data, from the API
By transfering, rather than copying, `ArrayBuffer`s between the main- and worker-threads, you can avoid unnecessary allocations by only having *one* copy of the same data.
Hence manually setting `postMessageTransfers: false`, when calling `getDocument`, is a performance footgun[1] which will do nothing but waste memory.
Given that every reasonably modern browser supports `postMessage` transfers[2], I really don't see why it should be possible to force-disable this functionality.
Looking at the browser support, for `postMessage` transfers[2], it's highly unlikely that PDF.js is even usable in browsers without it. However, the feature testing of `postMessage` transfers is kept for the time being just to err on the safe side.
---
[1] This is somewhat similar to the, now removed, `disableWorker` parameter which also provided API users a much too simple way of reducing performance.
[2] See e.g. https://developer.mozilla.org/en-US/docs/Web/API/MessagePort/postMessage#Browser_compatibility and https://developer.mozilla.org/en-US/docs/Web/API/Transferable#Browser_compatibility
2019-09-05 17:42:30 +09:00
|
|
|
this.postMessageTransfers = true;
|
2018-11-09 00:22:26 +09:00
|
|
|
this.verbosity = verbosity;
|
2015-01-06 12:45:01 +09:00
|
|
|
|
2018-11-09 00:22:26 +09:00
|
|
|
this._readyCapability = createPromiseCapability();
|
|
|
|
this._port = null;
|
|
|
|
this._webWorker = null;
|
|
|
|
this._messageHandler = null;
|
2017-02-25 04:33:18 +09:00
|
|
|
|
2018-11-09 00:22:26 +09:00
|
|
|
if (port) {
|
|
|
|
pdfWorkerPorts.set(port, this);
|
|
|
|
this._initializeFromPort(port);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
this._initialize();
|
2017-02-25 04:33:18 +09:00
|
|
|
}
|
|
|
|
|
2015-10-28 02:55:15 +09:00
|
|
|
get promise() {
|
|
|
|
return this._readyCapability.promise;
|
2018-11-09 00:22:26 +09:00
|
|
|
}
|
2015-10-28 02:55:15 +09:00
|
|
|
|
|
|
|
get port() {
|
|
|
|
return this._port;
|
2018-11-09 00:22:26 +09:00
|
|
|
}
|
2013-05-10 07:35:23 +09:00
|
|
|
|
2015-10-28 02:55:15 +09:00
|
|
|
get messageHandler() {
|
|
|
|
return this._messageHandler;
|
2018-11-09 00:22:26 +09:00
|
|
|
}
|
2015-10-28 02:55:15 +09:00
|
|
|
|
2018-11-09 00:22:26 +09:00
|
|
|
_initializeFromPort(port) {
|
2017-02-25 04:33:18 +09:00
|
|
|
this._port = port;
|
|
|
|
this._messageHandler = new MessageHandler('main', 'worker', port);
|
2018-11-09 00:22:26 +09:00
|
|
|
this._messageHandler.on('ready', function() {
|
2017-02-25 04:33:18 +09:00
|
|
|
// Ignoring 'ready' event -- MessageHandler shall be already initialized
|
|
|
|
// and ready to accept the messages.
|
|
|
|
});
|
|
|
|
this._readyCapability.resolve();
|
2018-11-09 00:22:26 +09:00
|
|
|
}
|
2017-02-25 04:33:18 +09:00
|
|
|
|
2018-11-09 00:22:26 +09:00
|
|
|
_initialize() {
|
2015-10-28 02:55:15 +09:00
|
|
|
// If worker support isn't disabled explicit and the browser has worker
|
2016-02-11 10:48:56 +09:00
|
|
|
// support, create a new web worker and test if it/the browser fulfills
|
2015-10-28 02:55:15 +09:00
|
|
|
// all requirements to run parts of pdf.js in a web worker.
|
|
|
|
// Right now, the requirement is, that an Uint8Array is still an
|
|
|
|
// Uint8Array as it arrives on the worker. (Chrome added this with v.15.)
|
2018-01-18 02:20:00 +09:00
|
|
|
if (typeof Worker !== 'undefined' && !isWorkerDisabled &&
|
2018-01-20 02:16:17 +09:00
|
|
|
!getMainThreadWorkerMessageHandler()) {
|
2018-10-07 21:28:16 +09:00
|
|
|
let workerSrc = getWorkerSrc();
|
2012-04-12 07:52:15 +09:00
|
|
|
|
2015-10-28 02:55:15 +09:00
|
|
|
try {
|
2016-10-15 00:57:53 +09:00
|
|
|
// Wraps workerSrc path into blob URL, if the former does not belong
|
|
|
|
// to the same origin.
|
|
|
|
if (typeof PDFJSDev !== 'undefined' && PDFJSDev.test('GENERIC') &&
|
|
|
|
!isSameOrigin(window.location.href, workerSrc)) {
|
|
|
|
workerSrc = createCDNWrapper(
|
|
|
|
new URL(workerSrc, window.location).href);
|
|
|
|
}
|
|
|
|
|
2015-10-28 02:55:15 +09:00
|
|
|
// Some versions of FF can't create a worker on localhost, see:
|
|
|
|
// https://bugzilla.mozilla.org/show_bug.cgi?id=683280
|
2018-11-09 00:22:26 +09:00
|
|
|
const worker = new Worker(workerSrc);
|
|
|
|
const messageHandler = new MessageHandler('main', 'worker', worker);
|
|
|
|
const terminateEarly = () => {
|
2015-12-25 21:24:19 +09:00
|
|
|
worker.removeEventListener('error', onWorkerError);
|
2017-10-07 01:55:28 +09:00
|
|
|
messageHandler.destroy();
|
2015-12-25 21:24:19 +09:00
|
|
|
worker.terminate();
|
2015-10-28 02:55:15 +09:00
|
|
|
if (this.destroyed) {
|
|
|
|
this._readyCapability.reject(new Error('Worker was destroyed'));
|
2015-12-25 21:24:19 +09:00
|
|
|
} else {
|
|
|
|
// Fall back to fake worker if the termination is caused by an
|
|
|
|
// error (e.g. NetworkError / SecurityError).
|
|
|
|
this._setupFakeWorker();
|
|
|
|
}
|
2017-05-03 23:39:54 +09:00
|
|
|
};
|
2015-12-25 21:24:19 +09:00
|
|
|
|
2018-11-09 00:22:26 +09:00
|
|
|
const onWorkerError = () => {
|
2015-12-25 21:24:19 +09:00
|
|
|
if (!this._webWorker) {
|
|
|
|
// Worker failed to initialize due to an error. Clean up and fall
|
|
|
|
// back to the fake worker.
|
|
|
|
terminateEarly();
|
|
|
|
}
|
2017-05-03 23:39:54 +09:00
|
|
|
};
|
2015-12-25 21:24:19 +09:00
|
|
|
worker.addEventListener('error', onWorkerError);
|
|
|
|
|
2017-05-03 23:39:54 +09:00
|
|
|
messageHandler.on('test', (data) => {
|
2015-12-25 21:24:19 +09:00
|
|
|
worker.removeEventListener('error', onWorkerError);
|
|
|
|
if (this.destroyed) {
|
|
|
|
terminateEarly();
|
2015-10-28 02:55:15 +09:00
|
|
|
return; // worker was destroyed
|
2013-11-12 12:30:26 +09:00
|
|
|
}
|
2019-09-05 17:30:09 +09:00
|
|
|
if (data) { // supportTypedArray
|
2015-10-28 02:55:15 +09:00
|
|
|
this._messageHandler = messageHandler;
|
|
|
|
this._port = worker;
|
|
|
|
this._webWorker = worker;
|
|
|
|
if (!data.supportTransfers) {
|
2017-07-01 02:59:52 +09:00
|
|
|
this.postMessageTransfers = false;
|
2015-10-28 02:55:15 +09:00
|
|
|
}
|
|
|
|
this._readyCapability.resolve();
|
2016-03-29 04:49:22 +09:00
|
|
|
// Send global setting, e.g. verbosity level.
|
2016-03-04 01:13:37 +09:00
|
|
|
messageHandler.send('configure', {
|
2018-02-15 01:35:08 +09:00
|
|
|
verbosity: this.verbosity,
|
2016-03-04 01:13:37 +09:00
|
|
|
});
|
2015-10-28 02:55:15 +09:00
|
|
|
} else {
|
|
|
|
this._setupFakeWorker();
|
2017-10-07 01:55:28 +09:00
|
|
|
messageHandler.destroy();
|
2015-10-28 02:55:15 +09:00
|
|
|
worker.terminate();
|
|
|
|
}
|
2017-05-03 23:39:54 +09:00
|
|
|
});
|
2015-10-28 02:55:15 +09:00
|
|
|
|
2017-05-03 23:39:54 +09:00
|
|
|
messageHandler.on('ready', (data) => {
|
2015-12-25 21:24:19 +09:00
|
|
|
worker.removeEventListener('error', onWorkerError);
|
2015-12-17 09:37:43 +09:00
|
|
|
if (this.destroyed) {
|
2015-12-25 21:24:19 +09:00
|
|
|
terminateEarly();
|
2015-12-17 09:37:43 +09:00
|
|
|
return; // worker was destroyed
|
|
|
|
}
|
|
|
|
try {
|
|
|
|
sendTest();
|
2016-12-10 22:28:27 +09:00
|
|
|
} catch (e) {
|
2015-12-17 09:37:43 +09:00
|
|
|
// We need fallback to a faked worker.
|
|
|
|
this._setupFakeWorker();
|
|
|
|
}
|
2017-05-03 23:39:54 +09:00
|
|
|
});
|
2015-12-17 09:37:43 +09:00
|
|
|
|
2018-02-15 00:53:50 +09:00
|
|
|
const sendTest = () => {
|
|
|
|
let testObj = new Uint8Array([this.postMessageTransfers ? 255 : 0]);
|
2015-12-17 09:37:43 +09:00
|
|
|
// Some versions of Opera throw a DATA_CLONE_ERR on serializing the
|
|
|
|
// typed array. Also, checking if we can use transfers.
|
|
|
|
try {
|
|
|
|
messageHandler.send('test', testObj, [testObj.buffer]);
|
|
|
|
} catch (ex) {
|
[api-minor] Remove the `postMessageTransfers` parameter, and thus the ability to manually disable transferring of data, from the API
By transfering, rather than copying, `ArrayBuffer`s between the main- and worker-threads, you can avoid unnecessary allocations by only having *one* copy of the same data.
Hence manually setting `postMessageTransfers: false`, when calling `getDocument`, is a performance footgun[1] which will do nothing but waste memory.
Given that every reasonably modern browser supports `postMessage` transfers[2], I really don't see why it should be possible to force-disable this functionality.
Looking at the browser support, for `postMessage` transfers[2], it's highly unlikely that PDF.js is even usable in browsers without it. However, the feature testing of `postMessage` transfers is kept for the time being just to err on the safe side.
---
[1] This is somewhat similar to the, now removed, `disableWorker` parameter which also provided API users a much too simple way of reducing performance.
[2] See e.g. https://developer.mozilla.org/en-US/docs/Web/API/MessagePort/postMessage#Browser_compatibility and https://developer.mozilla.org/en-US/docs/Web/API/Transferable#Browser_compatibility
2019-09-05 17:42:30 +09:00
|
|
|
warn('Cannot use postMessage transfers.');
|
2015-12-17 09:37:43 +09:00
|
|
|
testObj[0] = 0;
|
|
|
|
messageHandler.send('test', testObj);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// It might take time for worker to initialize (especially when AMD
|
|
|
|
// loader is used). We will try to send test immediately, and then
|
|
|
|
// when 'ready' message will arrive. The worker shall process only
|
|
|
|
// first received 'test'.
|
|
|
|
sendTest();
|
2015-10-28 02:55:15 +09:00
|
|
|
return;
|
|
|
|
} catch (e) {
|
|
|
|
info('The worker has been disabled.');
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Either workers are disabled, not supported or have thrown an exception.
|
|
|
|
// Thus, we fallback to a faked worker.
|
|
|
|
this._setupFakeWorker();
|
2018-11-09 00:22:26 +09:00
|
|
|
}
|
2012-04-12 07:52:15 +09:00
|
|
|
|
2018-11-09 00:22:26 +09:00
|
|
|
_setupFakeWorker() {
|
2018-01-20 02:16:17 +09:00
|
|
|
if (!isWorkerDisabled) {
|
2015-12-22 04:46:50 +09:00
|
|
|
warn('Setting up fake worker.');
|
2016-03-29 04:49:22 +09:00
|
|
|
isWorkerDisabled = true;
|
2015-12-22 04:46:50 +09:00
|
|
|
}
|
2015-10-28 00:07:20 +09:00
|
|
|
|
2017-05-03 23:39:54 +09:00
|
|
|
setupFakeWorkerGlobal().then((WorkerMessageHandler) => {
|
2015-10-28 02:55:15 +09:00
|
|
|
if (this.destroyed) {
|
|
|
|
this._readyCapability.reject(new Error('Worker was destroyed'));
|
|
|
|
return;
|
2013-11-12 12:30:26 +09:00
|
|
|
}
|
2018-11-09 00:22:26 +09:00
|
|
|
const port = new LoopbackPort();
|
2015-10-28 02:55:15 +09:00
|
|
|
this._port = port;
|
|
|
|
|
|
|
|
// All fake workers use the same port, making id unique.
|
2018-11-09 00:22:26 +09:00
|
|
|
const id = 'fake' + (nextFakeWorkerId++);
|
2015-10-28 02:55:15 +09:00
|
|
|
|
|
|
|
// If the main thread is our worker, setup the handling for the
|
|
|
|
// messages -- the main thread sends to it self.
|
2018-11-09 00:22:26 +09:00
|
|
|
const workerHandler = new MessageHandler(id + '_worker', id, port);
|
2016-03-03 10:16:38 +09:00
|
|
|
WorkerMessageHandler.setup(workerHandler, port);
|
2015-10-28 02:55:15 +09:00
|
|
|
|
2018-11-09 00:22:26 +09:00
|
|
|
const messageHandler = new MessageHandler(id, id + '_worker', port);
|
2015-10-28 02:55:15 +09:00
|
|
|
this._messageHandler = messageHandler;
|
|
|
|
this._readyCapability.resolve();
|
2018-10-05 20:54:16 +09:00
|
|
|
}).catch((reason) => {
|
|
|
|
this._readyCapability.reject(
|
|
|
|
new Error(`Setting up fake worker failed: "${reason.message}".`));
|
2017-05-03 23:39:54 +09:00
|
|
|
});
|
2018-11-09 00:22:26 +09:00
|
|
|
}
|
2015-10-28 02:55:15 +09:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Destroys the worker instance.
|
|
|
|
*/
|
2018-11-09 00:22:26 +09:00
|
|
|
destroy() {
|
2015-10-28 02:55:15 +09:00
|
|
|
this.destroyed = true;
|
|
|
|
if (this._webWorker) {
|
|
|
|
// We need to terminate only web worker created resource.
|
|
|
|
this._webWorker.terminate();
|
|
|
|
this._webWorker = null;
|
|
|
|
}
|
2017-08-03 05:48:42 +09:00
|
|
|
pdfWorkerPorts.delete(this._port);
|
2015-10-28 02:55:15 +09:00
|
|
|
this._port = null;
|
|
|
|
if (this._messageHandler) {
|
2017-10-07 01:55:28 +09:00
|
|
|
this._messageHandler.destroy();
|
2015-10-28 02:55:15 +09:00
|
|
|
this._messageHandler = null;
|
2012-04-12 07:52:15 +09:00
|
|
|
}
|
2017-06-10 10:07:51 +09:00
|
|
|
}
|
|
|
|
|
2018-11-09 00:22:26 +09:00
|
|
|
/**
|
|
|
|
* @param {PDFWorkerParameters} params - The worker initialization
|
|
|
|
* parameters.
|
|
|
|
*/
|
|
|
|
static fromPort(params) {
|
|
|
|
if (!params || !params.port) {
|
|
|
|
throw new Error('PDFWorker.fromPort - invalid method signature.');
|
|
|
|
}
|
|
|
|
if (pdfWorkerPorts.has(params.port)) {
|
|
|
|
return pdfWorkerPorts.get(params.port);
|
|
|
|
}
|
|
|
|
return new PDFWorker(params);
|
|
|
|
}
|
2018-01-29 23:58:40 +09:00
|
|
|
|
2018-11-09 00:22:26 +09:00
|
|
|
static getWorkerSrc() {
|
|
|
|
return getWorkerSrc();
|
|
|
|
}
|
|
|
|
}
|
2015-10-28 02:55:15 +09:00
|
|
|
return PDFWorker;
|
|
|
|
})();
|
|
|
|
|
2012-04-14 01:25:08 +09:00
|
|
|
/**
|
|
|
|
* For internal use only.
|
2014-01-22 04:28:18 +09:00
|
|
|
* @ignore
|
2012-04-14 01:25:08 +09:00
|
|
|
*/
|
2018-08-27 01:32:25 +09:00
|
|
|
class WorkerTransport {
|
|
|
|
constructor(messageHandler, loadingTask, networkStream, params) {
|
2015-10-28 02:55:15 +09:00
|
|
|
this.messageHandler = messageHandler;
|
|
|
|
this.loadingTask = loadingTask;
|
2012-10-29 05:10:34 +09:00
|
|
|
this.commonObjs = new PDFObjects();
|
Fallback to the built-in font renderer when font loading fails
After PR 9340 all glyphs are now re-mapped to a Private Use Area (PUA) which means that if a font fails to load, for whatever reason[1], all glyphs in the font will now render as Unicode glyph outlines.
This obviously doesn't look good, to say the least, and might be seen as a "regression" since previously many glyphs were left in their original positions which provided a slightly better fallback[2].
Hence this patch, which implements a *general* fallback to the PDF.js built-in font renderer for fonts that fail to load (i.e. are rejected by the sanitizer). One caveat here is that this only works for the Font Loading API, since it's easy to handle errors in that case[3].
The solution implemented in this patch does *not* in any way delay the loading of valid fonts, which was the problem with my previous attempt at a solution, and will only require a bit of extra work/waiting for those fonts that actually fail to load.
*Please note:* This patch doesn't fix any of the underlying PDF.js font conversion bugs that's responsible for creating corrupt font files, however it does *improve* rendering in a number of cases; refer to this possibly incomplete list:
[Bug 1524888](https://bugzilla.mozilla.org/show_bug.cgi?id=1524888)
Issue 10175
Issue 10232
---
[1] Usually because the PDF.js font conversion code wasn't able to parse the font file correctly.
[2] Glyphs fell back to some default font, which while not accurate was more useful than the current state.
[3] Furthermore I'm not sure how to implement this generally, assuming that's even possible, and don't really have time/interest to look into it either.
2019-02-11 08:47:56 +09:00
|
|
|
this.fontLoader = new FontLoader({
|
|
|
|
docId: loadingTask.docId,
|
|
|
|
onUnsupportedFeature: this._onUnsupportedFeature.bind(this),
|
|
|
|
});
|
2018-02-18 00:57:24 +09:00
|
|
|
this._params = params;
|
2018-06-06 03:30:14 +09:00
|
|
|
this.CMapReaderFactory = new params.CMapReaderFactory({
|
2018-02-18 00:57:24 +09:00
|
|
|
baseUrl: params.cMapUrl,
|
|
|
|
isCompressed: params.cMapPacked,
|
2017-02-12 23:54:41 +09:00
|
|
|
});
|
2012-04-12 07:52:15 +09:00
|
|
|
|
2015-10-21 07:45:55 +09:00
|
|
|
this.destroyed = false;
|
|
|
|
this.destroyCapability = null;
|
2016-12-31 21:59:07 +09:00
|
|
|
this._passwordCapability = null;
|
2015-01-06 12:45:01 +09:00
|
|
|
|
2017-07-01 02:59:52 +09:00
|
|
|
this._networkStream = networkStream;
|
|
|
|
this._fullReader = null;
|
|
|
|
this._lastProgress = null;
|
|
|
|
|
2012-04-12 07:52:15 +09:00
|
|
|
this.pageCache = [];
|
2014-05-09 05:02:53 +09:00
|
|
|
this.pagePromises = [];
|
2014-04-30 00:07:05 +09:00
|
|
|
this.downloadInfoCapability = createPromiseCapability();
|
2013-05-10 07:35:23 +09:00
|
|
|
|
2015-10-28 02:55:15 +09:00
|
|
|
this.setupMessageHandler();
|
2012-04-12 07:52:15 +09:00
|
|
|
}
|
2015-10-21 07:45:55 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
destroy() {
|
|
|
|
if (this.destroyCapability) {
|
|
|
|
return this.destroyCapability.promise;
|
|
|
|
}
|
2015-10-21 07:45:55 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
this.destroyed = true;
|
|
|
|
this.destroyCapability = createPromiseCapability();
|
2016-12-31 21:59:07 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
if (this._passwordCapability) {
|
|
|
|
this._passwordCapability.reject(
|
|
|
|
new Error('Worker was destroyed during onPassword callback'));
|
|
|
|
}
|
2017-07-01 02:59:52 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
const waitOn = [];
|
|
|
|
// We need to wait for all renderings to be completed, e.g.
|
|
|
|
// timeout/rAF can take a long time.
|
|
|
|
this.pageCache.forEach(function(page) {
|
|
|
|
if (page) {
|
|
|
|
waitOn.push(page._destroy());
|
|
|
|
}
|
|
|
|
});
|
2019-05-30 23:25:48 +09:00
|
|
|
this.pageCache.length = 0;
|
|
|
|
this.pagePromises.length = 0;
|
2018-08-27 01:32:25 +09:00
|
|
|
// We also need to wait for the worker to finish its long running tasks.
|
|
|
|
const terminated = this.messageHandler.sendWithPromise('Terminate', null);
|
|
|
|
waitOn.push(terminated);
|
|
|
|
Promise.all(waitOn).then(() => {
|
|
|
|
this.fontLoader.clear();
|
|
|
|
if (this._networkStream) {
|
2019-08-01 23:31:32 +09:00
|
|
|
this._networkStream.cancelAllRequests(
|
|
|
|
new AbortException('Worker was terminated.'));
|
2018-08-27 01:32:25 +09:00
|
|
|
}
|
2013-08-13 02:48:06 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
if (this.messageHandler) {
|
|
|
|
this.messageHandler.destroy();
|
|
|
|
this.messageHandler = null;
|
|
|
|
}
|
|
|
|
this.destroyCapability.resolve();
|
|
|
|
}, this.destroyCapability.reject);
|
|
|
|
return this.destroyCapability.promise;
|
|
|
|
}
|
|
|
|
|
|
|
|
setupMessageHandler() {
|
|
|
|
const { messageHandler, loadingTask, } = this;
|
2017-07-01 02:59:52 +09:00
|
|
|
|
2019-08-31 21:16:06 +09:00
|
|
|
messageHandler.on('GetReader', (data, sink) => {
|
2018-08-27 01:32:25 +09:00
|
|
|
assert(this._networkStream);
|
|
|
|
this._fullReader = this._networkStream.getFullReader();
|
|
|
|
this._fullReader.onProgress = (evt) => {
|
|
|
|
this._lastProgress = {
|
|
|
|
loaded: evt.loaded,
|
|
|
|
total: evt.total,
|
2017-07-01 02:59:52 +09:00
|
|
|
};
|
2018-08-27 01:32:25 +09:00
|
|
|
};
|
|
|
|
sink.onPull = () => {
|
|
|
|
this._fullReader.read().then(function({ value, done, }) {
|
|
|
|
if (done) {
|
|
|
|
sink.close();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
assert(isArrayBuffer(value));
|
|
|
|
// Enqueue data chunk into sink, and transfer it
|
|
|
|
// to other side as `Transferable` object.
|
|
|
|
sink.enqueue(new Uint8Array(value), 1, [value]);
|
|
|
|
}).catch((reason) => {
|
|
|
|
sink.error(reason);
|
|
|
|
});
|
|
|
|
};
|
2017-07-01 02:59:52 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
sink.onCancel = (reason) => {
|
|
|
|
this._fullReader.cancel(reason);
|
|
|
|
};
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2018-08-27 01:32:25 +09:00
|
|
|
|
2019-08-31 21:16:06 +09:00
|
|
|
messageHandler.on('ReaderHeadersReady', (data) => {
|
2018-08-27 01:32:25 +09:00
|
|
|
const headersCapability = createPromiseCapability();
|
|
|
|
const fullReader = this._fullReader;
|
|
|
|
fullReader.headersReady.then(() => {
|
|
|
|
// If stream or range are disabled, it's our only way to report
|
|
|
|
// loading progress.
|
Ensure that `onProgress` is always called when the entire PDF file has been loaded, regardless of how it was fetched (issue 10160)
*Please note:* I'm totally fine with this patch being rejected, and the issue closed as WONTFIX; however these changes should address the issue if that's desired.
From a conceptual point of view, reporting loading progress doesn't really make a lot of sense for PDF files opened by passing raw binary data directly to `getDocument` (since obviously *all* data was loaded).
This is compared to PDF files loaded via e.g. `XMLHttpRequest` or the Fetch API, where the entire PDF file isn't available from the start and knowing the loading progress makes total sense.
However I can certainly see why the current API could be considered inconsistent, which isn't great, since a registered `onProgress` callback will never be called for certain `getDocument` calls.
The simplest solution to this inconsistency thus seem to be to ensure that `onProgress` is always called when handling the `DataLoaded` message, since that will *always* be dispatched[1] from the worker-thread.
---
[1] Note that this isn't guaranteed to happen, since setting `disableAutoFetch = true` often prevents the *entire* file from ever loading. However, this isn't relevant for the issue at hand, and is a well-known consequence of using `disableAutoFetch = true`; note how the default viewer even has a specialized code-path for hiding the loadingBar.
2018-10-16 20:24:02 +09:00
|
|
|
if (!fullReader.isStreamingSupported || !fullReader.isRangeSupported) {
|
|
|
|
if (this._lastProgress && loadingTask.onProgress) {
|
|
|
|
loadingTask.onProgress(this._lastProgress);
|
2017-07-01 02:59:52 +09:00
|
|
|
}
|
2018-08-27 01:32:25 +09:00
|
|
|
fullReader.onProgress = (evt) => {
|
|
|
|
if (loadingTask.onProgress) {
|
|
|
|
loadingTask.onProgress({
|
|
|
|
loaded: evt.loaded,
|
|
|
|
total: evt.total,
|
|
|
|
});
|
2017-07-01 02:59:52 +09:00
|
|
|
}
|
2017-04-25 23:17:18 +09:00
|
|
|
};
|
2013-05-10 07:35:23 +09:00
|
|
|
}
|
2012-10-16 19:10:37 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
headersCapability.resolve({
|
|
|
|
isStreamingSupported: fullReader.isStreamingSupported,
|
|
|
|
isRangeSupported: fullReader.isRangeSupported,
|
|
|
|
contentLength: fullReader.contentLength,
|
|
|
|
});
|
|
|
|
}, headersCapability.reject);
|
2013-01-30 03:13:28 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
return headersCapability.promise;
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2014-09-13 23:47:16 +09:00
|
|
|
|
2019-08-31 21:16:06 +09:00
|
|
|
messageHandler.on('GetRangeReader', (data, sink) => {
|
2018-08-27 01:32:25 +09:00
|
|
|
assert(this._networkStream);
|
|
|
|
const rangeReader =
|
|
|
|
this._networkStream.getRangeReader(data.begin, data.end);
|
2012-10-16 19:10:37 +09:00
|
|
|
|
2019-03-28 01:54:05 +09:00
|
|
|
// When streaming is enabled, it's possible that the data requested here
|
|
|
|
// has already been fetched via the `_fullRequestReader` implementation.
|
|
|
|
// However, given that the PDF data is loaded asynchronously on the
|
|
|
|
// main-thread and then sent via `postMessage` to the worker-thread,
|
|
|
|
// it may not have been available during parsing (hence the attempt to
|
|
|
|
// use range requests here).
|
|
|
|
//
|
|
|
|
// To avoid wasting time and resources here, we'll thus *not* dispatch
|
|
|
|
// range requests if the data was already loaded but has not been sent to
|
|
|
|
// the worker-thread yet (which will happen via the `_fullRequestReader`).
|
|
|
|
if (!rangeReader) {
|
|
|
|
sink.close();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
sink.onPull = () => {
|
|
|
|
rangeReader.read().then(function({ value, done, }) {
|
|
|
|
if (done) {
|
|
|
|
sink.close();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
assert(isArrayBuffer(value));
|
|
|
|
sink.enqueue(new Uint8Array(value), 1, [value]);
|
|
|
|
}).catch((reason) => {
|
|
|
|
sink.error(reason);
|
|
|
|
});
|
|
|
|
};
|
2012-04-12 07:52:15 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
sink.onCancel = (reason) => {
|
|
|
|
rangeReader.cancel(reason);
|
|
|
|
};
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2013-08-01 03:17:36 +09:00
|
|
|
|
2019-08-31 21:16:06 +09:00
|
|
|
messageHandler.on('GetDoc', ({ pdfInfo, }) => {
|
2019-03-11 20:43:44 +09:00
|
|
|
this._numPages = pdfInfo.numPages;
|
|
|
|
loadingTask._capability.resolve(new PDFDocumentProxy(pdfInfo, this));
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2013-08-01 03:17:36 +09:00
|
|
|
|
2019-08-31 21:16:06 +09:00
|
|
|
messageHandler.on('PasswordRequest', (exception) => {
|
2018-08-27 01:32:25 +09:00
|
|
|
this._passwordCapability = createPromiseCapability();
|
2012-04-12 07:52:15 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
if (loadingTask.onPassword) {
|
|
|
|
const updatePassword = (password) => {
|
|
|
|
this._passwordCapability.resolve({
|
|
|
|
password,
|
|
|
|
});
|
|
|
|
};
|
|
|
|
try {
|
|
|
|
loadingTask.onPassword(updatePassword, exception.code);
|
|
|
|
} catch (ex) {
|
|
|
|
this._passwordCapability.reject(ex);
|
2015-10-30 03:06:22 +09:00
|
|
|
}
|
2018-08-27 01:32:25 +09:00
|
|
|
} else {
|
|
|
|
this._passwordCapability.reject(
|
|
|
|
new PasswordException(exception.message, exception.code));
|
|
|
|
}
|
|
|
|
return this._passwordCapability.promise;
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2018-08-27 01:32:25 +09:00
|
|
|
|
|
|
|
messageHandler.on('PasswordException', function(exception) {
|
|
|
|
loadingTask._capability.reject(
|
|
|
|
new PasswordException(exception.message, exception.code));
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2018-08-27 01:32:25 +09:00
|
|
|
|
|
|
|
messageHandler.on('InvalidPDF', function(exception) {
|
|
|
|
loadingTask._capability.reject(
|
|
|
|
new InvalidPDFException(exception.message));
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2018-08-27 01:32:25 +09:00
|
|
|
|
|
|
|
messageHandler.on('MissingPDF', function(exception) {
|
|
|
|
loadingTask._capability.reject(
|
|
|
|
new MissingPDFException(exception.message));
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2018-08-27 01:32:25 +09:00
|
|
|
|
|
|
|
messageHandler.on('UnexpectedResponse', function(exception) {
|
|
|
|
loadingTask._capability.reject(
|
|
|
|
new UnexpectedResponseException(exception.message, exception.status));
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2018-08-27 01:32:25 +09:00
|
|
|
|
|
|
|
messageHandler.on('UnknownError', function(exception) {
|
|
|
|
loadingTask._capability.reject(
|
|
|
|
new UnknownErrorException(exception.message, exception.details));
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2018-08-27 01:32:25 +09:00
|
|
|
|
2019-08-31 21:16:06 +09:00
|
|
|
messageHandler.on('DataLoaded', (data) => {
|
Ensure that `onProgress` is always called when the entire PDF file has been loaded, regardless of how it was fetched (issue 10160)
*Please note:* I'm totally fine with this patch being rejected, and the issue closed as WONTFIX; however these changes should address the issue if that's desired.
From a conceptual point of view, reporting loading progress doesn't really make a lot of sense for PDF files opened by passing raw binary data directly to `getDocument` (since obviously *all* data was loaded).
This is compared to PDF files loaded via e.g. `XMLHttpRequest` or the Fetch API, where the entire PDF file isn't available from the start and knowing the loading progress makes total sense.
However I can certainly see why the current API could be considered inconsistent, which isn't great, since a registered `onProgress` callback will never be called for certain `getDocument` calls.
The simplest solution to this inconsistency thus seem to be to ensure that `onProgress` is always called when handling the `DataLoaded` message, since that will *always* be dispatched[1] from the worker-thread.
---
[1] Note that this isn't guaranteed to happen, since setting `disableAutoFetch = true` often prevents the *entire* file from ever loading. However, this isn't relevant for the issue at hand, and is a well-known consequence of using `disableAutoFetch = true`; note how the default viewer even has a specialized code-path for hiding the loadingBar.
2018-10-16 20:24:02 +09:00
|
|
|
// For consistency: Ensure that progress is always reported when the
|
|
|
|
// entire PDF file has been loaded, regardless of how it was fetched.
|
|
|
|
if (loadingTask.onProgress) {
|
|
|
|
loadingTask.onProgress({
|
|
|
|
loaded: data.length,
|
|
|
|
total: data.length,
|
|
|
|
});
|
|
|
|
}
|
2018-08-27 01:32:25 +09:00
|
|
|
this.downloadInfoCapability.resolve(data);
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2018-08-27 01:32:25 +09:00
|
|
|
|
2019-08-31 21:16:06 +09:00
|
|
|
messageHandler.on('StartRenderPage', (data) => {
|
2018-08-27 01:32:25 +09:00
|
|
|
if (this.destroyed) {
|
|
|
|
return; // Ignore any pending requests if the worker was terminated.
|
|
|
|
}
|
2015-10-30 03:06:22 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
const page = this.pageCache[data.pageIndex];
|
|
|
|
page._stats.timeEnd('Page Request');
|
|
|
|
page._startRenderPage(data.transparency, data.intent);
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2012-04-12 07:52:15 +09:00
|
|
|
|
2019-08-31 21:16:06 +09:00
|
|
|
messageHandler.on('commonobj', (data) => {
|
2018-08-27 01:32:25 +09:00
|
|
|
if (this.destroyed) {
|
|
|
|
return; // Ignore any pending requests if the worker was terminated.
|
|
|
|
}
|
2012-10-29 05:10:34 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
const [id, type, exportedData] = data;
|
2018-11-07 22:36:29 +09:00
|
|
|
if (this.commonObjs.has(id)) {
|
2018-08-27 01:32:25 +09:00
|
|
|
return;
|
|
|
|
}
|
2015-10-30 03:06:22 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
switch (type) {
|
|
|
|
case 'Font':
|
|
|
|
const params = this._params;
|
2012-10-29 05:10:34 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
if ('error' in exportedData) {
|
|
|
|
const exportedError = exportedData.error;
|
|
|
|
warn(`Error during font loading: ${exportedError}`);
|
|
|
|
this.commonObjs.resolve(id, exportedError);
|
2012-04-12 07:52:15 +09:00
|
|
|
break;
|
2018-08-27 01:32:25 +09:00
|
|
|
}
|
2015-10-30 03:06:22 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
let fontRegistry = null;
|
|
|
|
if (params.pdfBug && globalScope.FontInspector &&
|
|
|
|
globalScope.FontInspector.enabled) {
|
|
|
|
fontRegistry = {
|
|
|
|
registerFont(font, url) {
|
|
|
|
globalScope['FontInspector'].fontAdded(font, url);
|
|
|
|
},
|
|
|
|
};
|
|
|
|
}
|
|
|
|
const font = new FontFaceObject(exportedData, {
|
|
|
|
isEvalSupported: params.isEvalSupported,
|
|
|
|
disableFontFace: params.disableFontFace,
|
|
|
|
ignoreErrors: params.ignoreErrors,
|
|
|
|
onUnsupportedFeature: this._onUnsupportedFeature.bind(this),
|
|
|
|
fontRegistry,
|
2013-06-15 23:04:54 +09:00
|
|
|
});
|
2012-06-24 04:48:33 +09:00
|
|
|
|
2019-02-10 22:11:23 +09:00
|
|
|
this.fontLoader.bind(font).then(() => {
|
2019-02-10 22:01:49 +09:00
|
|
|
this.commonObjs.resolve(id, font);
|
Fallback to the built-in font renderer when font loading fails
After PR 9340 all glyphs are now re-mapped to a Private Use Area (PUA) which means that if a font fails to load, for whatever reason[1], all glyphs in the font will now render as Unicode glyph outlines.
This obviously doesn't look good, to say the least, and might be seen as a "regression" since previously many glyphs were left in their original positions which provided a slightly better fallback[2].
Hence this patch, which implements a *general* fallback to the PDF.js built-in font renderer for fonts that fail to load (i.e. are rejected by the sanitizer). One caveat here is that this only works for the Font Loading API, since it's easy to handle errors in that case[3].
The solution implemented in this patch does *not* in any way delay the loading of valid fonts, which was the problem with my previous attempt at a solution, and will only require a bit of extra work/waiting for those fonts that actually fail to load.
*Please note:* This patch doesn't fix any of the underlying PDF.js font conversion bugs that's responsible for creating corrupt font files, however it does *improve* rendering in a number of cases; refer to this possibly incomplete list:
[Bug 1524888](https://bugzilla.mozilla.org/show_bug.cgi?id=1524888)
Issue 10175
Issue 10232
---
[1] Usually because the PDF.js font conversion code wasn't able to parse the font file correctly.
[2] Glyphs fell back to some default font, which while not accurate was more useful than the current state.
[3] Furthermore I'm not sure how to implement this generally, assuming that's even possible, and don't really have time/interest to look into it either.
2019-02-11 08:47:56 +09:00
|
|
|
}, (reason) => {
|
|
|
|
messageHandler.sendWithPromise('FontFallback', {
|
|
|
|
id,
|
|
|
|
}).finally(() => {
|
|
|
|
this.commonObjs.resolve(id, font);
|
|
|
|
});
|
2019-02-10 22:01:49 +09:00
|
|
|
});
|
2018-08-27 01:32:25 +09:00
|
|
|
break;
|
|
|
|
case 'FontPath':
|
2019-04-11 19:26:15 +09:00
|
|
|
case 'FontType3Res':
|
2018-08-27 01:32:25 +09:00
|
|
|
this.commonObjs.resolve(id, exportedData);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
throw new Error(`Got unknown common object type ${type}`);
|
|
|
|
}
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2015-10-30 03:06:22 +09:00
|
|
|
|
2019-08-31 21:16:06 +09:00
|
|
|
messageHandler.on('obj', (data) => {
|
2018-08-27 01:32:25 +09:00
|
|
|
if (this.destroyed) {
|
2019-05-10 19:54:06 +09:00
|
|
|
// Ignore any pending requests if the worker was terminated.
|
|
|
|
return undefined;
|
2018-08-27 01:32:25 +09:00
|
|
|
}
|
2016-03-11 22:59:09 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
const [id, pageIndex, type, imageData] = data;
|
|
|
|
const pageProxy = this.pageCache[pageIndex];
|
2018-11-07 22:36:29 +09:00
|
|
|
if (pageProxy.objs.has(id)) {
|
2019-05-10 19:54:06 +09:00
|
|
|
return undefined;
|
2018-08-27 01:32:25 +09:00
|
|
|
}
|
2016-03-11 22:59:09 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
switch (type) {
|
|
|
|
case 'JpegStream':
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
const img = new Image();
|
|
|
|
img.onload = function() {
|
|
|
|
resolve(img);
|
|
|
|
};
|
|
|
|
img.onerror = function() {
|
|
|
|
// Note that when the browser image loading/decoding fails,
|
|
|
|
// we'll fallback to the built-in PDF.js JPEG decoder; see
|
|
|
|
// `PartialEvaluator.buildPaintImageXObject` in the
|
|
|
|
// `src/core/evaluator.js` file.
|
2019-03-14 22:01:55 +09:00
|
|
|
reject(new Error('Error during JPEG image loading'));
|
|
|
|
|
|
|
|
// Always remember to release the image data if errors occurred.
|
|
|
|
releaseImageResources(img);
|
2018-08-27 01:32:25 +09:00
|
|
|
};
|
|
|
|
img.src = imageData;
|
|
|
|
}).then((img) => {
|
|
|
|
pageProxy.objs.resolve(id, img);
|
|
|
|
});
|
|
|
|
case 'Image':
|
|
|
|
pageProxy.objs.resolve(id, imageData);
|
|
|
|
|
|
|
|
// Heuristic that will allow us not to store large data.
|
|
|
|
const MAX_IMAGE_SIZE_TO_STORE = 8000000;
|
|
|
|
if (imageData && 'data' in imageData &&
|
|
|
|
imageData.data.length > MAX_IMAGE_SIZE_TO_STORE) {
|
|
|
|
pageProxy.cleanupAfterRender = true;
|
2016-03-11 22:59:09 +09:00
|
|
|
}
|
2018-08-27 01:32:25 +09:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
throw new Error(`Got unknown object type ${type}`);
|
|
|
|
}
|
2019-05-10 19:54:06 +09:00
|
|
|
return undefined;
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2012-04-12 07:52:15 +09:00
|
|
|
|
2019-08-31 21:16:06 +09:00
|
|
|
messageHandler.on('DocProgress', (data) => {
|
2018-08-27 01:32:25 +09:00
|
|
|
if (this.destroyed) {
|
|
|
|
return; // Ignore any pending requests if the worker was terminated.
|
|
|
|
}
|
2015-12-01 05:42:47 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
if (loadingTask.onProgress) {
|
|
|
|
loadingTask.onProgress({
|
|
|
|
loaded: data.loaded,
|
|
|
|
total: data.total,
|
|
|
|
});
|
|
|
|
}
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2015-10-30 03:06:22 +09:00
|
|
|
|
2019-08-31 21:16:06 +09:00
|
|
|
messageHandler.on('UnsupportedFeature',
|
|
|
|
this._onUnsupportedFeature.bind(this));
|
2012-04-12 07:52:15 +09:00
|
|
|
|
2019-08-31 21:16:06 +09:00
|
|
|
messageHandler.on('JpegDecode', (data) => {
|
2018-06-13 18:02:10 +09:00
|
|
|
if (this.destroyed) {
|
2018-08-27 01:32:25 +09:00
|
|
|
return Promise.reject(new Error('Worker was destroyed'));
|
2018-06-13 18:02:10 +09:00
|
|
|
}
|
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
if (typeof document === 'undefined') {
|
|
|
|
// Make sure that this code is not executing in node.js, as
|
|
|
|
// it's using DOM image, and there is no library to support that.
|
|
|
|
return Promise.reject(new Error('"document" is not defined.'));
|
2014-02-24 03:16:14 +09:00
|
|
|
}
|
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
const [imageUrl, components] = data;
|
|
|
|
if (components !== 3 && components !== 1) {
|
|
|
|
return Promise.reject(
|
|
|
|
new Error('Only 3 components or 1 component can be returned'));
|
2014-03-14 21:24:04 +09:00
|
|
|
}
|
2012-04-15 05:54:31 +09:00
|
|
|
|
2018-11-09 00:24:20 +09:00
|
|
|
return new Promise(function(resolve, reject) {
|
2018-08-27 01:32:25 +09:00
|
|
|
const img = new Image();
|
2018-11-09 00:24:20 +09:00
|
|
|
img.onload = function() {
|
2019-02-28 20:05:11 +09:00
|
|
|
const { width, height, } = img;
|
2018-08-27 01:32:25 +09:00
|
|
|
const size = width * height;
|
|
|
|
const rgbaLength = size * 4;
|
|
|
|
const buf = new Uint8ClampedArray(size * components);
|
2019-02-28 20:05:11 +09:00
|
|
|
let tmpCanvas = document.createElement('canvas');
|
2018-08-27 01:32:25 +09:00
|
|
|
tmpCanvas.width = width;
|
|
|
|
tmpCanvas.height = height;
|
2019-02-28 20:05:11 +09:00
|
|
|
let tmpCtx = tmpCanvas.getContext('2d');
|
2018-08-27 01:32:25 +09:00
|
|
|
tmpCtx.drawImage(img, 0, 0);
|
|
|
|
const data = tmpCtx.getImageData(0, 0, width, height).data;
|
|
|
|
|
|
|
|
if (components === 3) {
|
|
|
|
for (let i = 0, j = 0; i < rgbaLength; i += 4, j += 3) {
|
|
|
|
buf[j] = data[i];
|
|
|
|
buf[j + 1] = data[i + 1];
|
|
|
|
buf[j + 2] = data[i + 2];
|
|
|
|
}
|
|
|
|
} else if (components === 1) {
|
|
|
|
for (let i = 0, j = 0; i < rgbaLength; i += 4, j++) {
|
|
|
|
buf[j] = data[i];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
resolve({ data: buf, width, height, });
|
2019-02-28 20:05:11 +09:00
|
|
|
|
2019-03-14 22:01:55 +09:00
|
|
|
// Immediately release the image data once decoding has finished.
|
|
|
|
releaseImageResources(img);
|
2019-02-28 20:05:11 +09:00
|
|
|
// Zeroing the width and height cause Firefox to release graphics
|
|
|
|
// resources immediately, which can greatly reduce memory consumption.
|
|
|
|
tmpCanvas.width = 0;
|
|
|
|
tmpCanvas.height = 0;
|
|
|
|
tmpCanvas = null;
|
|
|
|
tmpCtx = null;
|
2018-08-27 01:32:25 +09:00
|
|
|
};
|
2018-11-09 00:24:20 +09:00
|
|
|
img.onerror = function() {
|
2018-08-27 01:32:25 +09:00
|
|
|
reject(new Error('JpegDecode failed to load image'));
|
2019-03-14 22:01:55 +09:00
|
|
|
|
|
|
|
// Always remember to release the image data if errors occurred.
|
|
|
|
releaseImageResources(img);
|
2018-08-27 01:32:25 +09:00
|
|
|
};
|
|
|
|
img.src = imageUrl;
|
2016-09-05 21:43:16 +09:00
|
|
|
});
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2013-11-14 08:27:46 +09:00
|
|
|
|
Transfer, rather than copy, CMap data to the worker-thread
It recently occurred to me that the CMap data should be an excellent candidate for transfering.
This will help reduce peak memory usage for PDF documents using CMaps, since transfering of data avoids duplicating it on both the main- and worker-threads.
Unfortunately it's not possible to actually transfer data when *returning* data through `sendWithPromise`, and another solution had to be used.
Initially I looked at using one message for requesting the data, and another message for returning the actual CMap data. While that should have worked, it would have meant adding a lot more complexity particularly on the worker-thread.
Hence the simplest solution, at least in my opinion, is to utilize `sendWithStream` since that makes it *really* easy to transfer the CMap data. (This required PR 11115 to land first, since otherwise CMap fetch errors won't propagate correctly to the worker-thread.)
Please note that the patch *purposely* only changes the API to Worker communication, and not the API *itself* since changing the interface of `CMapReaderFactory` would be a breaking change.
Furthermore, given the relatively small size of the `.bcmap` files (the largest one is smaller than the default range-request size) streaming doesn't really seem necessary either.
2019-09-04 18:20:14 +09:00
|
|
|
messageHandler.on('FetchBuiltInCMap', (data, sink) => {
|
2018-08-27 01:32:25 +09:00
|
|
|
if (this.destroyed) {
|
Transfer, rather than copy, CMap data to the worker-thread
It recently occurred to me that the CMap data should be an excellent candidate for transfering.
This will help reduce peak memory usage for PDF documents using CMaps, since transfering of data avoids duplicating it on both the main- and worker-threads.
Unfortunately it's not possible to actually transfer data when *returning* data through `sendWithPromise`, and another solution had to be used.
Initially I looked at using one message for requesting the data, and another message for returning the actual CMap data. While that should have worked, it would have meant adding a lot more complexity particularly on the worker-thread.
Hence the simplest solution, at least in my opinion, is to utilize `sendWithStream` since that makes it *really* easy to transfer the CMap data. (This required PR 11115 to land first, since otherwise CMap fetch errors won't propagate correctly to the worker-thread.)
Please note that the patch *purposely* only changes the API to Worker communication, and not the API *itself* since changing the interface of `CMapReaderFactory` would be a breaking change.
Furthermore, given the relatively small size of the `.bcmap` files (the largest one is smaller than the default range-request size) streaming doesn't really seem necessary either.
2019-09-04 18:20:14 +09:00
|
|
|
sink.error(new Error('Worker was destroyed'));
|
|
|
|
return;
|
2018-08-27 01:32:25 +09:00
|
|
|
}
|
Transfer, rather than copy, CMap data to the worker-thread
It recently occurred to me that the CMap data should be an excellent candidate for transfering.
This will help reduce peak memory usage for PDF documents using CMaps, since transfering of data avoids duplicating it on both the main- and worker-threads.
Unfortunately it's not possible to actually transfer data when *returning* data through `sendWithPromise`, and another solution had to be used.
Initially I looked at using one message for requesting the data, and another message for returning the actual CMap data. While that should have worked, it would have meant adding a lot more complexity particularly on the worker-thread.
Hence the simplest solution, at least in my opinion, is to utilize `sendWithStream` since that makes it *really* easy to transfer the CMap data. (This required PR 11115 to land first, since otherwise CMap fetch errors won't propagate correctly to the worker-thread.)
Please note that the patch *purposely* only changes the API to Worker communication, and not the API *itself* since changing the interface of `CMapReaderFactory` would be a breaking change.
Furthermore, given the relatively small size of the `.bcmap` files (the largest one is smaller than the default range-request size) streaming doesn't really seem necessary either.
2019-09-04 18:20:14 +09:00
|
|
|
let fetched = false;
|
|
|
|
|
|
|
|
sink.onPull = () => {
|
|
|
|
if (fetched) {
|
|
|
|
sink.close();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
fetched = true;
|
|
|
|
|
|
|
|
this.CMapReaderFactory.fetch(data).then(function(builtInCMap) {
|
|
|
|
sink.enqueue(builtInCMap, 1, [builtInCMap.cMapData.buffer]);
|
|
|
|
}).catch(function(reason) {
|
|
|
|
sink.error(reason);
|
|
|
|
});
|
|
|
|
};
|
2019-08-31 21:16:06 +09:00
|
|
|
});
|
2018-08-27 01:32:25 +09:00
|
|
|
}
|
2013-02-07 08:19:29 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
_onUnsupportedFeature({ featureId, }) {
|
|
|
|
if (this.destroyed) {
|
|
|
|
return; // Ignore any pending requests if the worker was terminated.
|
|
|
|
}
|
|
|
|
if (this.loadingTask.onUnsupportedFeature) {
|
|
|
|
this.loadingTask.onUnsupportedFeature(featureId);
|
|
|
|
}
|
|
|
|
}
|
2013-11-15 06:43:38 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
getData() {
|
|
|
|
return this.messageHandler.sendWithPromise('GetData', null);
|
|
|
|
}
|
|
|
|
|
|
|
|
getPage(pageNumber) {
|
|
|
|
if (!Number.isInteger(pageNumber) ||
|
2019-03-11 20:43:44 +09:00
|
|
|
pageNumber <= 0 || pageNumber > this._numPages) {
|
2018-08-27 01:32:25 +09:00
|
|
|
return Promise.reject(new Error('Invalid page request'));
|
|
|
|
}
|
|
|
|
|
|
|
|
const pageIndex = pageNumber - 1;
|
|
|
|
if (pageIndex in this.pagePromises) {
|
|
|
|
return this.pagePromises[pageIndex];
|
|
|
|
}
|
|
|
|
const promise = this.messageHandler.sendWithPromise('GetPage', {
|
|
|
|
pageIndex,
|
|
|
|
}).then((pageInfo) => {
|
|
|
|
if (this.destroyed) {
|
|
|
|
throw new Error('Transport destroyed');
|
2018-08-11 23:00:48 +09:00
|
|
|
}
|
2018-08-27 01:32:25 +09:00
|
|
|
const page = new PDFPageProxy(pageIndex, pageInfo, this,
|
|
|
|
this._params.pdfBug);
|
|
|
|
this.pageCache[pageIndex] = page;
|
|
|
|
return page;
|
|
|
|
});
|
|
|
|
this.pagePromises[pageIndex] = promise;
|
|
|
|
return promise;
|
|
|
|
}
|
2014-10-05 22:56:40 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
getPageIndex(ref) {
|
|
|
|
return this.messageHandler.sendWithPromise('GetPageIndex', {
|
|
|
|
ref,
|
|
|
|
}).catch(function(reason) {
|
|
|
|
return Promise.reject(new Error(reason));
|
|
|
|
});
|
|
|
|
}
|
2015-12-26 05:57:08 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
getAnnotations(pageIndex, intent) {
|
|
|
|
return this.messageHandler.sendWithPromise('GetAnnotations', {
|
|
|
|
pageIndex,
|
|
|
|
intent,
|
|
|
|
});
|
|
|
|
}
|
2017-07-18 20:08:02 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
getDestinations() {
|
|
|
|
return this.messageHandler.sendWithPromise('GetDestinations', null);
|
|
|
|
}
|
2014-03-19 05:32:47 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
getDestination(id) {
|
|
|
|
if (typeof id !== 'string') {
|
|
|
|
return Promise.reject(new Error('Invalid destination request.'));
|
|
|
|
}
|
|
|
|
return this.messageHandler.sendWithPromise('GetDestination', {
|
|
|
|
id,
|
|
|
|
});
|
|
|
|
}
|
2014-05-08 04:15:34 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
getPageLabels() {
|
|
|
|
return this.messageHandler.sendWithPromise('GetPageLabels', null);
|
|
|
|
}
|
2014-05-08 04:06:44 +09:00
|
|
|
|
2019-04-03 20:48:18 +09:00
|
|
|
getPageLayout() {
|
|
|
|
return this.messageHandler.sendWithPromise('GetPageLayout', null);
|
|
|
|
}
|
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
getPageMode() {
|
|
|
|
return this.messageHandler.sendWithPromise('GetPageMode', null);
|
|
|
|
}
|
2014-05-08 04:38:40 +09:00
|
|
|
|
2019-04-14 20:13:59 +09:00
|
|
|
getViewerPreferences() {
|
|
|
|
return this.messageHandler.sendWithPromise('GetViewerPreferences', null);
|
|
|
|
}
|
|
|
|
|
2018-12-06 04:09:15 +09:00
|
|
|
getOpenActionDestination() {
|
2019-04-14 20:13:59 +09:00
|
|
|
return this.messageHandler.sendWithPromise('GetOpenActionDestination',
|
2018-12-06 04:09:15 +09:00
|
|
|
null);
|
|
|
|
}
|
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
getAttachments() {
|
|
|
|
return this.messageHandler.sendWithPromise('GetAttachments', null);
|
|
|
|
}
|
2014-06-16 23:52:04 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
getJavaScript() {
|
|
|
|
return this.messageHandler.sendWithPromise('GetJavaScript', null);
|
|
|
|
}
|
2018-02-18 06:08:45 +09:00
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
getOutline() {
|
|
|
|
return this.messageHandler.sendWithPromise('GetOutline', null);
|
|
|
|
}
|
2012-04-12 07:52:15 +09:00
|
|
|
|
2018-08-27 04:37:05 +09:00
|
|
|
getPermissions() {
|
|
|
|
return this.messageHandler.sendWithPromise('GetPermissions', null);
|
|
|
|
}
|
|
|
|
|
2018-08-27 01:32:25 +09:00
|
|
|
getMetadata() {
|
|
|
|
return this.messageHandler.sendWithPromise('GetMetadata', null).
|
|
|
|
then((results) => {
|
|
|
|
return {
|
|
|
|
info: results[0],
|
|
|
|
metadata: (results[1] ? new Metadata(results[1]) : null),
|
|
|
|
contentDispositionFilename: (this._fullReader ?
|
|
|
|
this._fullReader.filename : null),
|
|
|
|
};
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
getStats() {
|
|
|
|
return this.messageHandler.sendWithPromise('GetStats', null);
|
|
|
|
}
|
|
|
|
|
|
|
|
startCleanup() {
|
|
|
|
this.messageHandler.sendWithPromise('Cleanup', null).then(() => {
|
|
|
|
for (let i = 0, ii = this.pageCache.length; i < ii; i++) {
|
|
|
|
const page = this.pageCache[i];
|
|
|
|
if (page) {
|
|
|
|
page.cleanup();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
this.commonObjs.clear();
|
|
|
|
this.fontLoader.clear();
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
get loadingParams() {
|
|
|
|
const params = this._params;
|
|
|
|
return shadow(this, 'loadingParams', {
|
|
|
|
disableAutoFetch: params.disableAutoFetch,
|
|
|
|
disableCreateObjectURL: params.disableCreateObjectURL,
|
|
|
|
disableFontFace: params.disableFontFace,
|
|
|
|
nativeImageDecoderSupport: params.nativeImageDecoderSupport,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
2013-08-01 03:17:36 +09:00
|
|
|
|
|
|
|
/**
|
2018-11-07 22:17:14 +09:00
|
|
|
* A PDF document and page is built of many objects. E.g. there are objects for
|
|
|
|
* fonts, images, rendering code, etc. These objects may get processed inside of
|
|
|
|
* a worker. This class implements some basic methods to manage these objects.
|
2014-01-22 04:28:18 +09:00
|
|
|
* @ignore
|
2013-08-13 02:48:06 +09:00
|
|
|
*/
|
2018-11-07 22:17:14 +09:00
|
|
|
class PDFObjects {
|
|
|
|
constructor() {
|
|
|
|
this._objs = Object.create(null);
|
2013-08-13 02:48:06 +09:00
|
|
|
}
|
|
|
|
|
2018-11-07 22:17:14 +09:00
|
|
|
/**
|
|
|
|
* Ensures there is an object defined for `objId`.
|
|
|
|
* @private
|
|
|
|
*/
|
|
|
|
_ensureObj(objId) {
|
|
|
|
if (this._objs[objId]) {
|
|
|
|
return this._objs[objId];
|
|
|
|
}
|
|
|
|
return this._objs[objId] = {
|
|
|
|
capability: createPromiseCapability(),
|
|
|
|
data: null,
|
|
|
|
resolved: false,
|
|
|
|
};
|
|
|
|
}
|
2013-08-13 02:48:06 +09:00
|
|
|
|
2018-11-07 22:17:14 +09:00
|
|
|
/**
|
|
|
|
* If called *without* callback, this returns the data of `objId` but the
|
|
|
|
* object needs to be resolved. If it isn't, this method throws.
|
|
|
|
*
|
|
|
|
* If called *with* a callback, the callback is called with the data of the
|
|
|
|
* object once the object is resolved. That means, if you call this method
|
|
|
|
* and the object is already resolved, the callback gets called right away.
|
|
|
|
*/
|
|
|
|
get(objId, callback = null) {
|
|
|
|
// If there is a callback, then the get can be async and the object is
|
|
|
|
// not required to be resolved right now.
|
|
|
|
if (callback) {
|
|
|
|
this._ensureObj(objId).capability.promise.then(callback);
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
// If there isn't a callback, the user expects to get the resolved data
|
|
|
|
// directly.
|
|
|
|
const obj = this._objs[objId];
|
|
|
|
// If there isn't an object yet or the object isn't resolved, then the
|
|
|
|
// data isn't ready yet!
|
|
|
|
if (!obj || !obj.resolved) {
|
|
|
|
throw new Error(`Requesting object that isn't resolved yet ${objId}.`);
|
|
|
|
}
|
|
|
|
return obj.data;
|
|
|
|
}
|
2013-08-13 02:48:06 +09:00
|
|
|
|
2018-11-07 22:36:29 +09:00
|
|
|
has(objId) {
|
|
|
|
const obj = this._objs[objId];
|
|
|
|
return (obj ? obj.resolved : false);
|
|
|
|
}
|
|
|
|
|
2018-11-07 22:17:14 +09:00
|
|
|
/**
|
|
|
|
* Resolves the object `objId` with optional `data`.
|
|
|
|
*/
|
|
|
|
resolve(objId, data) {
|
|
|
|
const obj = this._ensureObj(objId);
|
2013-08-13 02:48:06 +09:00
|
|
|
|
2018-11-07 22:17:14 +09:00
|
|
|
obj.resolved = true;
|
|
|
|
obj.data = data;
|
|
|
|
obj.capability.resolve(data);
|
|
|
|
}
|
2013-08-13 02:48:06 +09:00
|
|
|
|
2018-11-07 22:17:14 +09:00
|
|
|
clear() {
|
2019-03-14 22:01:55 +09:00
|
|
|
for (const objId in this._objs) {
|
|
|
|
const { data, } = this._objs[objId];
|
|
|
|
|
|
|
|
if (typeof Image !== 'undefined' && data instanceof Image) {
|
|
|
|
// Always release the image data when clearing out the cached objects.
|
|
|
|
releaseImageResources(data);
|
|
|
|
}
|
|
|
|
}
|
2018-11-07 22:17:14 +09:00
|
|
|
this._objs = Object.create(null);
|
|
|
|
}
|
|
|
|
}
|
2014-01-04 09:17:05 +09:00
|
|
|
|
2014-01-22 04:28:18 +09:00
|
|
|
/**
|
|
|
|
* Allows controlling of the rendering tasks.
|
2015-11-13 04:39:58 +09:00
|
|
|
* @alias RenderTask
|
2014-01-22 04:28:18 +09:00
|
|
|
*/
|
2018-11-08 21:46:02 +09:00
|
|
|
class RenderTask {
|
|
|
|
constructor(internalRenderTask) {
|
2015-01-06 12:45:01 +09:00
|
|
|
this._internalRenderTask = internalRenderTask;
|
|
|
|
|
2014-01-22 04:28:18 +09:00
|
|
|
/**
|
2015-01-06 12:45:01 +09:00
|
|
|
* Callback for incremental rendering -- a function that will be called
|
|
|
|
* each time the rendering is paused. To continue rendering call the
|
|
|
|
* function that is the first argument to the callback.
|
|
|
|
* @type {function}
|
2014-01-22 04:28:18 +09:00
|
|
|
*/
|
2015-01-06 12:45:01 +09:00
|
|
|
this.onContinue = null;
|
2013-08-01 03:17:36 +09:00
|
|
|
}
|
|
|
|
|
2018-11-08 21:46:02 +09:00
|
|
|
/**
|
|
|
|
* Promise for rendering task completion.
|
|
|
|
* @return {Promise}
|
|
|
|
*/
|
|
|
|
get promise() {
|
|
|
|
return this._internalRenderTask.capability.promise;
|
|
|
|
}
|
2014-04-12 02:10:42 +09:00
|
|
|
|
2018-11-08 21:46:02 +09:00
|
|
|
/**
|
|
|
|
* Cancels the rendering task. If the task is currently rendering it will
|
|
|
|
* not be cancelled until graphics pauses with a timeout. The promise that
|
|
|
|
* this object extends will be rejected when cancelled.
|
|
|
|
*/
|
|
|
|
cancel() {
|
|
|
|
this._internalRenderTask.cancel();
|
|
|
|
}
|
2013-08-01 03:17:36 +09:00
|
|
|
|
2018-11-08 21:46:02 +09:00
|
|
|
/**
|
|
|
|
* Registers callbacks to indicate the rendering task completion.
|
|
|
|
*
|
|
|
|
* @param {function} onFulfilled The callback for the rendering completion.
|
|
|
|
* @param {function} onRejected The callback for the rendering failure.
|
|
|
|
* @return {Promise} A promise that is resolved after the onFulfilled or
|
|
|
|
* onRejected callback.
|
|
|
|
*/
|
|
|
|
then(onFulfilled, onRejected) {
|
|
|
|
deprecated('RenderTask.then method, use the `promise` getter instead.');
|
|
|
|
return this.promise.then.apply(this.promise, arguments);
|
|
|
|
}
|
|
|
|
}
|
2013-08-01 03:17:36 +09:00
|
|
|
|
2014-01-22 04:28:18 +09:00
|
|
|
/**
|
|
|
|
* For internal use only.
|
|
|
|
* @ignore
|
|
|
|
*/
|
2018-11-08 22:33:56 +09:00
|
|
|
const InternalRenderTask = (function InternalRenderTaskClosure() {
|
|
|
|
const canvasInRendering = new WeakSet();
|
|
|
|
|
|
|
|
class InternalRenderTask {
|
|
|
|
constructor({ callback, params, objs, commonObjs, operatorList, pageNumber,
|
|
|
|
canvasFactory, webGLContext, useRequestAnimationFrame = false,
|
|
|
|
pdfBug = false, }) {
|
|
|
|
this.callback = callback;
|
|
|
|
this.params = params;
|
|
|
|
this.objs = objs;
|
|
|
|
this.commonObjs = commonObjs;
|
|
|
|
this.operatorListIdx = null;
|
|
|
|
this.operatorList = operatorList;
|
|
|
|
this.pageNumber = pageNumber;
|
|
|
|
this.canvasFactory = canvasFactory;
|
|
|
|
this.webGLContext = webGLContext;
|
|
|
|
this._pdfBug = pdfBug;
|
2017-11-02 00:32:22 +09:00
|
|
|
|
2018-11-08 22:33:56 +09:00
|
|
|
this.running = false;
|
|
|
|
this.graphicsReadyCallback = null;
|
|
|
|
this.graphicsReady = false;
|
|
|
|
this._useRequestAnimationFrame = (useRequestAnimationFrame === true &&
|
|
|
|
typeof window !== 'undefined');
|
|
|
|
this.cancelled = false;
|
|
|
|
this.capability = createPromiseCapability();
|
|
|
|
this.task = new RenderTask(this);
|
|
|
|
// caching this-bound methods
|
|
|
|
this._continueBound = this._continue.bind(this);
|
|
|
|
this._scheduleNextBound = this._scheduleNext.bind(this);
|
|
|
|
this._nextBound = this._next.bind(this);
|
|
|
|
this._canvas = params.canvasContext.canvas;
|
|
|
|
}
|
2013-08-01 03:17:36 +09:00
|
|
|
|
2018-11-08 22:33:56 +09:00
|
|
|
initializeGraphics(transparency = false) {
|
2018-06-29 05:38:09 +09:00
|
|
|
if (this.cancelled) {
|
|
|
|
return;
|
|
|
|
}
|
2017-06-13 06:04:35 +09:00
|
|
|
if (this._canvas) {
|
|
|
|
if (canvasInRendering.has(this._canvas)) {
|
|
|
|
throw new Error(
|
|
|
|
'Cannot use the same canvas during multiple render() operations. ' +
|
|
|
|
'Use different canvas or ensure previous operations were ' +
|
|
|
|
'cancelled or completed.');
|
|
|
|
}
|
2018-11-01 02:15:23 +09:00
|
|
|
canvasInRendering.add(this._canvas);
|
2017-06-13 06:04:35 +09:00
|
|
|
}
|
|
|
|
|
2018-02-18 07:13:49 +09:00
|
|
|
if (this._pdfBug && globalScope.StepperManager &&
|
2013-08-01 03:17:36 +09:00
|
|
|
globalScope.StepperManager.enabled) {
|
|
|
|
this.stepper = globalScope.StepperManager.create(this.pageNumber - 1);
|
|
|
|
this.stepper.init(this.operatorList);
|
|
|
|
this.stepper.nextBreakPoint = this.stepper.getNextBreakPoint();
|
|
|
|
}
|
2018-11-08 22:33:56 +09:00
|
|
|
const {
|
|
|
|
canvasContext, viewport, transform, imageLayer, background,
|
|
|
|
} = this.params;
|
2013-08-01 03:17:36 +09:00
|
|
|
|
2018-11-08 22:33:56 +09:00
|
|
|
this.gfx = new CanvasGraphics(canvasContext, this.commonObjs, this.objs,
|
|
|
|
this.canvasFactory, this.webGLContext,
|
|
|
|
imageLayer);
|
2017-05-16 20:01:03 +09:00
|
|
|
this.gfx.beginDrawing({
|
2018-11-08 22:33:56 +09:00
|
|
|
transform,
|
|
|
|
viewport,
|
2017-05-16 20:01:03 +09:00
|
|
|
transparency,
|
2018-11-08 22:33:56 +09:00
|
|
|
background,
|
2017-05-16 20:01:03 +09:00
|
|
|
});
|
2013-08-01 03:17:36 +09:00
|
|
|
this.operatorListIdx = 0;
|
|
|
|
this.graphicsReady = true;
|
|
|
|
if (this.graphicsReadyCallback) {
|
|
|
|
this.graphicsReadyCallback();
|
|
|
|
}
|
2018-11-08 22:33:56 +09:00
|
|
|
}
|
2013-08-01 03:17:36 +09:00
|
|
|
|
2019-01-26 18:12:32 +09:00
|
|
|
cancel(error = null) {
|
2013-08-01 03:17:36 +09:00
|
|
|
this.running = false;
|
|
|
|
this.cancelled = true;
|
2018-11-01 00:22:17 +09:00
|
|
|
if (this.gfx) {
|
|
|
|
this.gfx.endDrawing();
|
|
|
|
}
|
2017-06-13 06:04:35 +09:00
|
|
|
if (this._canvas) {
|
|
|
|
canvasInRendering.delete(this._canvas);
|
|
|
|
}
|
2019-01-26 18:12:32 +09:00
|
|
|
this.callback(error || new RenderingCancelledException(
|
|
|
|
`Rendering cancelled, page ${this.pageNumber}`, 'canvas'));
|
2018-11-08 22:33:56 +09:00
|
|
|
}
|
2013-08-01 03:17:36 +09:00
|
|
|
|
2018-11-08 22:33:56 +09:00
|
|
|
operatorListChanged() {
|
2013-08-01 03:17:36 +09:00
|
|
|
if (!this.graphicsReady) {
|
|
|
|
if (!this.graphicsReadyCallback) {
|
2014-05-09 21:00:47 +09:00
|
|
|
this.graphicsReadyCallback = this._continueBound;
|
2013-08-01 03:17:36 +09:00
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (this.stepper) {
|
|
|
|
this.stepper.updateOperatorList(this.operatorList);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (this.running) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
this._continue();
|
2018-11-08 22:33:56 +09:00
|
|
|
}
|
2013-08-01 03:17:36 +09:00
|
|
|
|
2018-11-08 22:33:56 +09:00
|
|
|
_continue() {
|
2013-08-01 03:17:36 +09:00
|
|
|
this.running = true;
|
|
|
|
if (this.cancelled) {
|
|
|
|
return;
|
|
|
|
}
|
2015-01-06 12:45:01 +09:00
|
|
|
if (this.task.onContinue) {
|
2017-01-22 01:08:25 +09:00
|
|
|
this.task.onContinue(this._scheduleNextBound);
|
2013-08-01 03:17:36 +09:00
|
|
|
} else {
|
2014-05-09 21:00:47 +09:00
|
|
|
this._scheduleNext();
|
2013-08-01 03:17:36 +09:00
|
|
|
}
|
2018-11-08 22:33:56 +09:00
|
|
|
}
|
2013-08-01 03:17:36 +09:00
|
|
|
|
2018-11-08 22:33:56 +09:00
|
|
|
_scheduleNext() {
|
|
|
|
if (this._useRequestAnimationFrame) {
|
2018-06-13 18:01:58 +09:00
|
|
|
window.requestAnimationFrame(() => {
|
2019-01-26 18:12:32 +09:00
|
|
|
this._nextBound().catch(this.cancel.bind(this));
|
2018-06-13 18:01:58 +09:00
|
|
|
});
|
2015-05-12 22:44:42 +09:00
|
|
|
} else {
|
2019-01-26 18:12:32 +09:00
|
|
|
Promise.resolve().then(this._nextBound).catch(this.cancel.bind(this));
|
2015-05-12 22:44:42 +09:00
|
|
|
}
|
2018-11-08 22:33:56 +09:00
|
|
|
}
|
2014-05-09 21:00:47 +09:00
|
|
|
|
2018-11-08 22:33:56 +09:00
|
|
|
async _next() {
|
|
|
|
if (this.cancelled) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
this.operatorListIdx = this.gfx.executeOperatorList(this.operatorList,
|
|
|
|
this.operatorListIdx,
|
|
|
|
this._continueBound,
|
|
|
|
this.stepper);
|
|
|
|
if (this.operatorListIdx === this.operatorList.argsArray.length) {
|
|
|
|
this.running = false;
|
|
|
|
if (this.operatorList.lastChunk) {
|
|
|
|
this.gfx.endDrawing();
|
|
|
|
if (this._canvas) {
|
|
|
|
canvasInRendering.delete(this._canvas);
|
2017-06-13 06:04:35 +09:00
|
|
|
}
|
2018-11-08 22:33:56 +09:00
|
|
|
this.callback();
|
2013-08-01 03:17:36 +09:00
|
|
|
}
|
2018-11-08 22:33:56 +09:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2013-08-01 03:17:36 +09:00
|
|
|
return InternalRenderTask;
|
|
|
|
})();
|
2015-12-01 05:42:47 +09:00
|
|
|
|
2018-11-09 00:24:20 +09:00
|
|
|
const version = (typeof PDFJSDev !== 'undefined' ?
|
|
|
|
PDFJSDev.eval('BUNDLE_VERSION') : null);
|
|
|
|
const build = (typeof PDFJSDev !== 'undefined' ?
|
|
|
|
PDFJSDev.eval('BUNDLE_BUILD') : null);
|
2016-03-29 06:44:27 +09:00
|
|
|
|
2017-04-02 21:25:33 +09:00
|
|
|
export {
|
|
|
|
getDocument,
|
2017-05-03 02:20:13 +09:00
|
|
|
LoopbackPort,
|
2017-04-02 21:25:33 +09:00
|
|
|
PDFDataRangeTransport,
|
|
|
|
PDFWorker,
|
|
|
|
PDFDocumentProxy,
|
|
|
|
PDFPageProxy,
|
2018-01-14 08:34:46 +09:00
|
|
|
setPDFNetworkStreamFactory,
|
2017-04-02 21:25:33 +09:00
|
|
|
version,
|
|
|
|
build,
|
|
|
|
};
|