[api-minor] Remove the forceClamped
-functionality in the Streams (issue 14849)
As it turns out, most of the code-paths in the `PDFImage`-class won't actually pass the TypedArray (containing the image-data) to the `ColorSpace`-code. Hence we *generally* don't need to force the image-data to be a `Uint8ClampedArray`, and can just as well directly use a `Uint8Array` instead. In the following cases we're returning the data without any `ColorSpace`-parsing, and the exact TypedArray used shouldn't matter: -b72a448327/src/core/image.js (L714)
-b72a448327/src/core/image.js (L751)
In the following cases the image-data is only used *internally*, and again the exact TypedArray used shouldn't matter: -b72a448327/src/core/image.js (L762)
with the actual image-data being defined (as `Uint8ClampedArray`) further below -b72a448327/src/core/image.js (L837)
*Please note:* This is tagged `api-minor` because it's API-observable, given that *some* image/mask-data will now be returned as `Uint8Array` rather than using `Uint8ClampedArray` unconditionally. However, that seems like a small price to pay to (slightly) reduce memory usage during image-conversion.
This commit is contained in:
parent
b72a448327
commit
fbf6dee8ee
@ -40,7 +40,7 @@ class BaseStream {
|
||||
unreachable("Abstract method `getByte` called");
|
||||
}
|
||||
|
||||
getBytes(length, forceClamped = false) {
|
||||
getBytes(length) {
|
||||
unreachable("Abstract method `getBytes` called");
|
||||
}
|
||||
|
||||
@ -52,8 +52,8 @@ class BaseStream {
|
||||
return peekedByte;
|
||||
}
|
||||
|
||||
peekBytes(length, forceClamped = false) {
|
||||
const bytes = this.getBytes(length, forceClamped);
|
||||
peekBytes(length) {
|
||||
const bytes = this.getBytes(length);
|
||||
this.pos -= bytes.length;
|
||||
return bytes;
|
||||
}
|
||||
@ -80,7 +80,7 @@ class BaseStream {
|
||||
}
|
||||
|
||||
getString(length) {
|
||||
return bytesToString(this.getBytes(length, /* forceClamped = */ false));
|
||||
return bytesToString(this.getBytes(length));
|
||||
}
|
||||
|
||||
skip(n) {
|
||||
|
@ -169,7 +169,7 @@ class ChunkedStream extends Stream {
|
||||
return this.bytes[this.pos++];
|
||||
}
|
||||
|
||||
getBytes(length, forceClamped = false) {
|
||||
getBytes(length) {
|
||||
const bytes = this.bytes;
|
||||
const pos = this.pos;
|
||||
const strEnd = this.end;
|
||||
@ -178,9 +178,7 @@ class ChunkedStream extends Stream {
|
||||
if (strEnd > this.progressiveDataLength) {
|
||||
this.ensureRange(pos, strEnd);
|
||||
}
|
||||
const subarray = bytes.subarray(pos, strEnd);
|
||||
// `this.bytes` is always a `Uint8Array` here.
|
||||
return forceClamped ? new Uint8ClampedArray(subarray) : subarray;
|
||||
return bytes.subarray(pos, strEnd);
|
||||
}
|
||||
|
||||
let end = pos + length;
|
||||
@ -192,9 +190,7 @@ class ChunkedStream extends Stream {
|
||||
}
|
||||
|
||||
this.pos = end;
|
||||
const subarray = bytes.subarray(pos, end);
|
||||
// `this.bytes` is always a `Uint8Array` here.
|
||||
return forceClamped ? new Uint8ClampedArray(subarray) : subarray;
|
||||
return bytes.subarray(pos, end);
|
||||
}
|
||||
|
||||
getByteRange(begin, end) {
|
||||
|
@ -73,7 +73,7 @@ class DecodeStream extends BaseStream {
|
||||
return this.buffer[this.pos++];
|
||||
}
|
||||
|
||||
getBytes(length, forceClamped = false) {
|
||||
getBytes(length) {
|
||||
const pos = this.pos;
|
||||
let end;
|
||||
|
||||
@ -96,11 +96,7 @@ class DecodeStream extends BaseStream {
|
||||
}
|
||||
|
||||
this.pos = end;
|
||||
const subarray = this.buffer.subarray(pos, end);
|
||||
// `this.buffer` is either a `Uint8Array` or `Uint8ClampedArray` here.
|
||||
return forceClamped && !(subarray instanceof Uint8ClampedArray)
|
||||
? new Uint8ClampedArray(subarray)
|
||||
: subarray;
|
||||
return this.buffer.subarray(pos, end);
|
||||
}
|
||||
|
||||
reset() {
|
||||
|
@ -613,10 +613,7 @@ class PartialEvaluator {
|
||||
// for later.
|
||||
const interpolate = dict.get("I", "Interpolate");
|
||||
const bitStrideLength = (w + 7) >> 3;
|
||||
const imgArray = image.getBytes(
|
||||
bitStrideLength * h,
|
||||
/* forceClamped = */ true
|
||||
);
|
||||
const imgArray = image.getBytes(bitStrideLength * h);
|
||||
const decode = dict.getArray("D", "Decode");
|
||||
|
||||
if (this.parsingType3Font) {
|
||||
|
@ -308,15 +308,6 @@ class PDFImage {
|
||||
inverseDecode,
|
||||
interpolate,
|
||||
}) {
|
||||
if (
|
||||
typeof PDFJSDev === "undefined" ||
|
||||
PDFJSDev.test("!PRODUCTION || TESTING")
|
||||
) {
|
||||
assert(
|
||||
imgArray instanceof Uint8ClampedArray,
|
||||
'PDFImage.createRawMask: Unsupported "imgArray" type.'
|
||||
);
|
||||
}
|
||||
// |imgArray| might not contain full data for every pixel of the mask, so
|
||||
// we need to distinguish between |computedLength| and |actualLength|.
|
||||
// In particular, if inverseDecode is true, then the array we return must
|
||||
@ -332,14 +323,11 @@ class PDFImage {
|
||||
// form, so we can just transfer it.
|
||||
data = imgArray;
|
||||
} else if (!inverseDecode) {
|
||||
data = new Uint8ClampedArray(actualLength);
|
||||
data.set(imgArray);
|
||||
data = new Uint8Array(imgArray);
|
||||
} else {
|
||||
data = new Uint8ClampedArray(computedLength);
|
||||
data = new Uint8Array(computedLength);
|
||||
data.set(imgArray);
|
||||
for (i = actualLength; i < computedLength; i++) {
|
||||
data[i] = 0xff;
|
||||
}
|
||||
data.fill(0xff, actualLength);
|
||||
}
|
||||
|
||||
// If necessary, invert the original mask data (but not any extra we might
|
||||
@ -363,16 +351,6 @@ class PDFImage {
|
||||
inverseDecode,
|
||||
interpolate,
|
||||
}) {
|
||||
if (
|
||||
typeof PDFJSDev === "undefined" ||
|
||||
PDFJSDev.test("!PRODUCTION || TESTING")
|
||||
) {
|
||||
assert(
|
||||
imgArray instanceof Uint8ClampedArray,
|
||||
'PDFImage.createMask: Unsupported "imgArray" type.'
|
||||
);
|
||||
}
|
||||
|
||||
const isSingleOpaquePixel =
|
||||
width === 1 &&
|
||||
height === 1 &&
|
||||
@ -682,7 +660,6 @@ class PDFImage {
|
||||
|
||||
// Rows start at byte boundary.
|
||||
const rowBytes = (originalWidth * numComps * bpc + 7) >> 3;
|
||||
let imgArray;
|
||||
|
||||
if (!forceRGBA) {
|
||||
// If it is a 1-bit-per-pixel grayscale (i.e. black-and-white) image
|
||||
@ -710,20 +687,8 @@ class PDFImage {
|
||||
drawHeight === originalHeight
|
||||
) {
|
||||
imgData.kind = kind;
|
||||
imgData.data = this.getImageBytes(originalHeight * rowBytes, {});
|
||||
|
||||
imgArray = this.getImageBytes(originalHeight * rowBytes);
|
||||
// If imgArray came from a DecodeStream, we're safe to transfer it
|
||||
// (and thus detach its underlying buffer) because it will constitute
|
||||
// the entire DecodeStream's data. But if it came from a Stream, we
|
||||
// need to copy it because it'll only be a portion of the Stream's
|
||||
// data, and the rest will be read later on.
|
||||
if (this.image instanceof DecodeStream) {
|
||||
imgData.data = imgArray;
|
||||
} else {
|
||||
const newArray = new Uint8ClampedArray(imgArray.length);
|
||||
newArray.set(imgArray);
|
||||
imgData.data = newArray;
|
||||
}
|
||||
if (this.needsDecode) {
|
||||
// Invert the buffer (which must be grayscale if we reached here).
|
||||
assert(
|
||||
@ -748,18 +713,19 @@ class PDFImage {
|
||||
case "DeviceRGB":
|
||||
case "DeviceCMYK":
|
||||
imgData.kind = ImageKind.RGB_24BPP;
|
||||
imgData.data = this.getImageBytes(
|
||||
imageLength,
|
||||
imgData.data = this.getImageBytes(imageLength, {
|
||||
drawWidth,
|
||||
drawHeight,
|
||||
/* forceRGB = */ true
|
||||
);
|
||||
forceRGB: true,
|
||||
});
|
||||
return imgData;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
imgArray = this.getImageBytes(originalHeight * rowBytes);
|
||||
const imgArray = this.getImageBytes(originalHeight * rowBytes, {
|
||||
internal: true,
|
||||
});
|
||||
// imgArray can be incomplete (e.g. after CCITT fax encoding).
|
||||
const actualHeight =
|
||||
0 | (((imgArray.length / rowBytes) * drawHeight) / originalHeight);
|
||||
@ -834,7 +800,7 @@ class PDFImage {
|
||||
|
||||
// rows start at byte boundary
|
||||
const rowBytes = (width * numComps * bpc + 7) >> 3;
|
||||
const imgArray = this.getImageBytes(height * rowBytes);
|
||||
const imgArray = this.getImageBytes(height * rowBytes, { internal: true });
|
||||
|
||||
const comps = this.getComponents(imgArray);
|
||||
let i, length;
|
||||
@ -867,12 +833,29 @@ class PDFImage {
|
||||
}
|
||||
}
|
||||
|
||||
getImageBytes(length, drawWidth, drawHeight, forceRGB = false) {
|
||||
getImageBytes(
|
||||
length,
|
||||
{ drawWidth, drawHeight, forceRGB = false, internal = false }
|
||||
) {
|
||||
this.image.reset();
|
||||
this.image.drawWidth = drawWidth || this.width;
|
||||
this.image.drawHeight = drawHeight || this.height;
|
||||
this.image.forceRGB = !!forceRGB;
|
||||
return this.image.getBytes(length, /* forceClamped = */ true);
|
||||
const imageBytes = this.image.getBytes(length);
|
||||
|
||||
// If imageBytes came from a DecodeStream, we're safe to transfer it
|
||||
// (and thus detach its underlying buffer) because it will constitute
|
||||
// the entire DecodeStream's data. But if it came from a Stream, we
|
||||
// need to copy it because it'll only be a portion of the Stream's
|
||||
// data, and the rest will be read later on.
|
||||
if (internal || this.image instanceof DecodeStream) {
|
||||
return imageBytes;
|
||||
}
|
||||
assert(
|
||||
imageBytes instanceof Uint8Array,
|
||||
'PDFImage.getImageBytes: Unsupported "imageBytes" type.'
|
||||
);
|
||||
return new Uint8Array(imageBytes);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -14,7 +14,6 @@
|
||||
*/
|
||||
|
||||
import {
|
||||
assert,
|
||||
ImageKind,
|
||||
OPS,
|
||||
RenderingIntentFlag,
|
||||
@ -109,7 +108,7 @@ addState(
|
||||
}
|
||||
const imgWidth = Math.max(maxX, currentX) + IMAGE_PADDING;
|
||||
const imgHeight = currentY + maxLineHeight + IMAGE_PADDING;
|
||||
const imgData = new Uint8ClampedArray(imgWidth * imgHeight * 4);
|
||||
const imgData = new Uint8Array(imgWidth * imgHeight * 4);
|
||||
const imgRowSize = imgWidth << 2;
|
||||
for (let q = 0; q < count; q++) {
|
||||
const data = argsArray[iFirstPIIXO + (q << 2)][0].data;
|
||||
@ -678,17 +677,6 @@ class OperatorList {
|
||||
case OPS.paintInlineImageXObjectGroup:
|
||||
case OPS.paintImageMaskXObject:
|
||||
const arg = argsArray[i][0]; // First parameter in imgData.
|
||||
|
||||
if (
|
||||
typeof PDFJSDev === "undefined" ||
|
||||
PDFJSDev.test("!PRODUCTION || TESTING")
|
||||
) {
|
||||
assert(
|
||||
arg.data instanceof Uint8ClampedArray ||
|
||||
typeof arg.data === "string",
|
||||
'OperatorList._transfers: Unsupported "arg.data" type.'
|
||||
);
|
||||
}
|
||||
if (
|
||||
!arg.cached &&
|
||||
arg.data &&
|
||||
|
@ -45,24 +45,20 @@ class Stream extends BaseStream {
|
||||
return this.bytes[this.pos++];
|
||||
}
|
||||
|
||||
getBytes(length, forceClamped = false) {
|
||||
getBytes(length) {
|
||||
const bytes = this.bytes;
|
||||
const pos = this.pos;
|
||||
const strEnd = this.end;
|
||||
|
||||
if (!length) {
|
||||
const subarray = bytes.subarray(pos, strEnd);
|
||||
// `this.bytes` is always a `Uint8Array` here.
|
||||
return forceClamped ? new Uint8ClampedArray(subarray) : subarray;
|
||||
return bytes.subarray(pos, strEnd);
|
||||
}
|
||||
let end = pos + length;
|
||||
if (end > strEnd) {
|
||||
end = strEnd;
|
||||
}
|
||||
this.pos = end;
|
||||
const subarray = bytes.subarray(pos, end);
|
||||
// `this.bytes` is always a `Uint8Array` here.
|
||||
return forceClamped ? new Uint8ClampedArray(subarray) : subarray;
|
||||
return bytes.subarray(pos, end);
|
||||
}
|
||||
|
||||
getByteRange(begin, end) {
|
||||
|
@ -36,12 +36,6 @@ describe("stream", function () {
|
||||
const result = predictor.getBytes(6);
|
||||
|
||||
expect(result).toEqual(new Uint8Array([100, 3, 101, 2, 102, 1]));
|
||||
|
||||
predictor.reset();
|
||||
const clampedResult = predictor.getBytes(6, /* forceClamped = */ true);
|
||||
expect(clampedResult).toEqual(
|
||||
new Uint8ClampedArray([100, 3, 101, 2, 102, 1])
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user