Merge pull request #12665 from Snuffleupagus/update-packages
Update packages and translations
This commit is contained in:
commit
43550be484
16
external/builder/builder.js
vendored
16
external/builder/builder.js
vendored
@ -202,14 +202,14 @@ function preprocessCSS(mode, source, destination) {
|
||||
}
|
||||
|
||||
function expandImports(content, baseUrl) {
|
||||
return content.replace(/^\s*@import\s+url\(([^)]+)\);\s*$/gm, function (
|
||||
all,
|
||||
url
|
||||
) {
|
||||
var file = path.join(path.dirname(baseUrl), url);
|
||||
var imported = fs.readFileSync(file, "utf8").toString();
|
||||
return expandImports(imported, file);
|
||||
});
|
||||
return content.replace(
|
||||
/^\s*@import\s+url\(([^)]+)\);\s*$/gm,
|
||||
function (all, url) {
|
||||
var file = path.join(path.dirname(baseUrl), url);
|
||||
var imported = fs.readFileSync(file, "utf8").toString();
|
||||
return expandImports(imported, file);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
function removePrefixed(content, hasPrefixedFilter) {
|
||||
|
95
gulpfile.js
95
gulpfile.js
@ -577,45 +577,46 @@ gulp.task("buildnumber", function (done) {
|
||||
console.log();
|
||||
console.log("### Getting extension build number");
|
||||
|
||||
exec("git log --format=oneline " + config.baseVersion + "..", function (
|
||||
err,
|
||||
stdout,
|
||||
stderr
|
||||
) {
|
||||
var buildNumber = 0;
|
||||
if (!err) {
|
||||
// Build number is the number of commits since base version
|
||||
buildNumber = stdout ? stdout.match(/\n/g).length : 0;
|
||||
} else {
|
||||
console.log("This is not a Git repository; using default build number.");
|
||||
}
|
||||
|
||||
console.log("Extension build number: " + buildNumber);
|
||||
|
||||
var version = config.versionPrefix + buildNumber;
|
||||
|
||||
exec('git log --format="%h" -n 1', function (err2, stdout2, stderr2) {
|
||||
var buildCommit = "";
|
||||
if (!err2) {
|
||||
buildCommit = stdout2.replace("\n", "");
|
||||
exec(
|
||||
"git log --format=oneline " + config.baseVersion + "..",
|
||||
function (err, stdout, stderr) {
|
||||
var buildNumber = 0;
|
||||
if (!err) {
|
||||
// Build number is the number of commits since base version
|
||||
buildNumber = stdout ? stdout.match(/\n/g).length : 0;
|
||||
} else {
|
||||
console.log(
|
||||
"This is not a Git repository; using default build number."
|
||||
);
|
||||
}
|
||||
|
||||
createStringSource(
|
||||
"version.json",
|
||||
JSON.stringify(
|
||||
{
|
||||
version: version,
|
||||
build: buildNumber,
|
||||
commit: buildCommit,
|
||||
},
|
||||
null,
|
||||
2
|
||||
console.log("Extension build number: " + buildNumber);
|
||||
|
||||
var version = config.versionPrefix + buildNumber;
|
||||
|
||||
exec('git log --format="%h" -n 1', function (err2, stdout2, stderr2) {
|
||||
var buildCommit = "";
|
||||
if (!err2) {
|
||||
buildCommit = stdout2.replace("\n", "");
|
||||
}
|
||||
|
||||
createStringSource(
|
||||
"version.json",
|
||||
JSON.stringify(
|
||||
{
|
||||
version: version,
|
||||
build: buildNumber,
|
||||
commit: buildCommit,
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
)
|
||||
)
|
||||
.pipe(gulp.dest(BUILD_DIR))
|
||||
.on("end", done);
|
||||
});
|
||||
});
|
||||
.pipe(gulp.dest(BUILD_DIR))
|
||||
.on("end", done);
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
gulp.task("default_preferences-pre", function () {
|
||||
@ -1619,17 +1620,19 @@ gulp.task("baseline", function (done) {
|
||||
return;
|
||||
}
|
||||
|
||||
exec("git checkout " + baselineCommit, { cwd: workingDirectory }, function (
|
||||
error2
|
||||
) {
|
||||
if (error2) {
|
||||
done(new Error("Baseline commit checkout failed."));
|
||||
return;
|
||||
}
|
||||
exec(
|
||||
"git checkout " + baselineCommit,
|
||||
{ cwd: workingDirectory },
|
||||
function (error2) {
|
||||
if (error2) {
|
||||
done(new Error("Baseline commit checkout failed."));
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Baseline commit "' + baselineCommit + '" checked out.');
|
||||
done();
|
||||
});
|
||||
console.log('Baseline commit "' + baselineCommit + '" checked out.');
|
||||
done();
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -243,7 +243,7 @@ annotation_date_string={{date}}, {{time}}
|
||||
text_annotation_type.alt=[{{type}} Σχόλιο]
|
||||
password_label=Εισαγωγή κωδικού για το άνοιγμα του PDF αρχείου.
|
||||
password_invalid=Μη έγκυρος κωδικός. Προσπαθείστε ξανά.
|
||||
password_ok=ΟΚ
|
||||
password_ok=OK
|
||||
password_cancel=Ακύρωση
|
||||
|
||||
printing_not_supported=Προειδοποίηση: Η εκτύπωση δεν υποστηρίζεται πλήρως από αυτόν τον περιηγητή.
|
||||
|
@ -143,11 +143,13 @@ document_outline.title=Erakutsi dokumentuaren eskema (klik bikoitza elementu guz
|
||||
document_outline_label=Dokumentuaren eskema
|
||||
attachments.title=Erakutsi eranskinak
|
||||
attachments_label=Eranskinak
|
||||
layers_label=Geruzak
|
||||
thumbs.title=Erakutsi koadro txikiak
|
||||
thumbs_label=Koadro txikiak
|
||||
findbar.title=Bilatu dokumentuan
|
||||
findbar_label=Bilatu
|
||||
|
||||
additional_layers=Geruza gehigarriak
|
||||
# LOCALIZATION NOTE (page_canvas): "{{page}}" will be replaced by the page number.
|
||||
page_canvas={{page}}. orria
|
||||
# Thumbnails panel item (tooltip and alt text for images)
|
||||
|
@ -214,7 +214,7 @@ error_stack=Pila : {{stack}}
|
||||
error_file=Fichièr : {{file}}
|
||||
# LOCALIZATION NOTE (error_line): "{{line}}" will be replaced with a line number
|
||||
error_line=Linha : {{line}}
|
||||
rendering_error=Una error s'es produita pendent l'afichatge de la pagina.
|
||||
rendering_error=Una error s'es producha pendent l'afichatge de la pagina.
|
||||
|
||||
# Predefined zoom values
|
||||
page_scale_width=Largor plena
|
||||
@ -227,7 +227,7 @@ page_scale_percent={{scale}}%
|
||||
|
||||
# Loading indicator messages
|
||||
loading_error_indicator=Error
|
||||
loading_error=Una error s'es produita pendent lo cargament del fichièr PDF.
|
||||
loading_error=Una error s'es producha pendent lo cargament del fichièr PDF.
|
||||
invalid_file_error=Fichièr PDF invalid o corromput.
|
||||
missing_file_error=Fichièr PDF mancant.
|
||||
unexpected_response_error=Responsa de servidor imprevista.
|
||||
|
@ -136,10 +136,10 @@ print_progress_close=Cancelar
|
||||
# Tooltips and alt text for side panel toolbar buttons
|
||||
# (the _label strings are alt text for the buttons, the .title strings are
|
||||
# tooltips)
|
||||
toggle_sidebar.title=Alternar painel
|
||||
toggle_sidebar.title=Exibir/ocultar painel
|
||||
toggle_sidebar_notification.title=Alternar o painel (documento contém marcadores e anexos)
|
||||
toggle_sidebar_notification2.title=Exibir/ocultar painel lateral (documento contém estrutura/anexos/camadas)
|
||||
toggle_sidebar_label=Alternar painel
|
||||
toggle_sidebar_label=Exibir/ocultar painel
|
||||
document_outline.title=Mostrar a estrutura do documento (dê um duplo-clique para expandir/recolher todos os itens)
|
||||
document_outline_label=Estrutura do documento
|
||||
attachments.title=Mostrar anexos
|
||||
|
1683
package-lock.json
generated
1683
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
26
package.json
26
package.json
@ -2,26 +2,26 @@
|
||||
"name": "pdf.js",
|
||||
"version": "2.0.0",
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.12.3",
|
||||
"@babel/core": "^7.12.9",
|
||||
"@babel/plugin-proposal-logical-assignment-operators": "^7.12.1",
|
||||
"@babel/plugin-transform-modules-commonjs": "^7.12.1",
|
||||
"@babel/plugin-transform-runtime": "^7.12.1",
|
||||
"@babel/preset-env": "^7.12.1",
|
||||
"@babel/preset-env": "^7.12.7",
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"acorn": "^8.0.4",
|
||||
"autoprefixer": "^10.0.2",
|
||||
"babel-loader": "^8.2.1",
|
||||
"autoprefixer": "^10.0.4",
|
||||
"babel-loader": "^8.2.2",
|
||||
"canvas": "^2.6.1",
|
||||
"core-js": "^3.7.0",
|
||||
"core-js": "^3.8.0",
|
||||
"cross-env": "^7.0.2",
|
||||
"es-module-shims": "^0.6.0",
|
||||
"escodegen": "^2.0.0",
|
||||
"eslint": "^7.13.0",
|
||||
"eslint": "^7.14.0",
|
||||
"eslint-config-prettier": "^6.15.0",
|
||||
"eslint-plugin-fetch-options": "^0.0.5",
|
||||
"eslint-plugin-html": "^6.1.1",
|
||||
"eslint-plugin-import": "^2.22.1",
|
||||
"eslint-plugin-mozilla": "^2.8.0",
|
||||
"eslint-plugin-mozilla": "^2.9.1",
|
||||
"eslint-plugin-no-unsanitized": "^3.1.4",
|
||||
"eslint-plugin-prettier": "^3.1.4",
|
||||
"eslint-plugin-unicorn": "^22.0.0",
|
||||
@ -38,27 +38,27 @@
|
||||
"merge-stream": "^2.0.0",
|
||||
"mkdirp": "^1.0.4",
|
||||
"needle": "^2.5.2",
|
||||
"postcss": "^8.1.7",
|
||||
"postcss": "^8.1.10",
|
||||
"postcss-calc": "^7.0.5",
|
||||
"postcss-css-variables": "^0.17.0",
|
||||
"prettier": "^2.1.2",
|
||||
"prettier": "^2.2.1",
|
||||
"puppeteer": "^5.5.0",
|
||||
"rimraf": "^3.0.2",
|
||||
"streamqueue": "^1.1.2",
|
||||
"stylelint": "^13.7.2",
|
||||
"stylelint": "^13.8.0",
|
||||
"stylelint-config-prettier": "^8.0.2",
|
||||
"stylelint-prettier": "^1.1.2",
|
||||
"systemjs": "^0.21.6",
|
||||
"systemjs-plugin-babel": "^0.0.25",
|
||||
"terser": "^5.3.8",
|
||||
"terser": "^5.5.1",
|
||||
"through2": "^4.0.2",
|
||||
"ttest": "^3.0.0",
|
||||
"typescript": "^4.0.5",
|
||||
"typescript": "^4.1.2",
|
||||
"typogr": "^0.6.8",
|
||||
"vinyl": "^2.2.1",
|
||||
"vinyl-fs": "^3.0.3",
|
||||
"web-streams-polyfill": "^3.0.1",
|
||||
"webpack": "^5.4.0",
|
||||
"webpack": "^5.9.0",
|
||||
"webpack-stream": "~6.1.1",
|
||||
"wintersmith": "^2.5.0",
|
||||
"yargs": "^11.1.1"
|
||||
|
@ -994,11 +994,13 @@ var CMapFactory = (function CMapFactoryClosure() {
|
||||
var cMap = new CMap(true);
|
||||
|
||||
if (compressionType === CMapCompressionType.BINARY) {
|
||||
return new BinaryCMapReader().process(cMapData, cMap, function (
|
||||
useCMap
|
||||
) {
|
||||
return extendCMap(cMap, fetchBuiltInCMap, useCMap);
|
||||
});
|
||||
return new BinaryCMapReader().process(
|
||||
cMapData,
|
||||
cMap,
|
||||
function (useCMap) {
|
||||
return extendCMap(cMap, fetchBuiltInCMap, useCMap);
|
||||
}
|
||||
);
|
||||
}
|
||||
if (compressionType === CMapCompressionType.NONE) {
|
||||
var lexer = new Lexer(new Stream(cMapData));
|
||||
|
@ -533,101 +533,100 @@ class WorkerMessageHandler {
|
||||
return pdfManager.ensureDoc("calculationOrderIds");
|
||||
});
|
||||
|
||||
handler.on("SaveDocument", function ({
|
||||
numPages,
|
||||
annotationStorage,
|
||||
filename,
|
||||
}) {
|
||||
pdfManager.requestLoadedStream();
|
||||
const promises = [
|
||||
pdfManager.onLoadedStream(),
|
||||
pdfManager.ensureCatalog("acroForm"),
|
||||
pdfManager.ensureDoc("xref"),
|
||||
pdfManager.ensureDoc("startXRef"),
|
||||
];
|
||||
handler.on(
|
||||
"SaveDocument",
|
||||
function ({ numPages, annotationStorage, filename }) {
|
||||
pdfManager.requestLoadedStream();
|
||||
const promises = [
|
||||
pdfManager.onLoadedStream(),
|
||||
pdfManager.ensureCatalog("acroForm"),
|
||||
pdfManager.ensureDoc("xref"),
|
||||
pdfManager.ensureDoc("startXRef"),
|
||||
];
|
||||
|
||||
for (let pageIndex = 0; pageIndex < numPages; pageIndex++) {
|
||||
promises.push(
|
||||
pdfManager.getPage(pageIndex).then(function (page) {
|
||||
const task = new WorkerTask(`Save: page ${pageIndex}`);
|
||||
startWorkerTask(task);
|
||||
for (let pageIndex = 0; pageIndex < numPages; pageIndex++) {
|
||||
promises.push(
|
||||
pdfManager.getPage(pageIndex).then(function (page) {
|
||||
const task = new WorkerTask(`Save: page ${pageIndex}`);
|
||||
startWorkerTask(task);
|
||||
|
||||
return page
|
||||
.save(handler, task, annotationStorage)
|
||||
.finally(function () {
|
||||
finishWorkerTask(task);
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return Promise.all(promises).then(function ([
|
||||
stream,
|
||||
acroForm,
|
||||
xref,
|
||||
startXRef,
|
||||
...refs
|
||||
]) {
|
||||
let newRefs = [];
|
||||
for (const ref of refs) {
|
||||
newRefs = ref
|
||||
.filter(x => x !== null)
|
||||
.reduce((a, b) => a.concat(b), newRefs);
|
||||
return page
|
||||
.save(handler, task, annotationStorage)
|
||||
.finally(function () {
|
||||
finishWorkerTask(task);
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
if (newRefs.length === 0) {
|
||||
// No new refs so just return the initial bytes
|
||||
return stream.bytes;
|
||||
}
|
||||
|
||||
const xfa = (acroForm instanceof Dict && acroForm.get("XFA")) || [];
|
||||
let xfaDatasets = null;
|
||||
if (Array.isArray(xfa)) {
|
||||
for (let i = 0, ii = xfa.length; i < ii; i += 2) {
|
||||
if (xfa[i] === "datasets") {
|
||||
xfaDatasets = xfa[i + 1];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// TODO: Support XFA streams.
|
||||
warn("Unsupported XFA type.");
|
||||
}
|
||||
|
||||
let newXrefInfo = Object.create(null);
|
||||
if (xref.trailer) {
|
||||
// Get string info from Info in order to compute fileId.
|
||||
const infoObj = Object.create(null);
|
||||
const xrefInfo = xref.trailer.get("Info") || null;
|
||||
if (xrefInfo instanceof Dict) {
|
||||
xrefInfo.forEach((key, value) => {
|
||||
if (isString(key) && isString(value)) {
|
||||
infoObj[key] = stringToPDFString(value);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
newXrefInfo = {
|
||||
rootRef: xref.trailer.getRaw("Root") || null,
|
||||
encrypt: xref.trailer.getRaw("Encrypt") || null,
|
||||
newRef: xref.getNewRef(),
|
||||
infoRef: xref.trailer.getRaw("Info") || null,
|
||||
info: infoObj,
|
||||
fileIds: xref.trailer.getRaw("ID") || null,
|
||||
startXRef,
|
||||
filename,
|
||||
};
|
||||
}
|
||||
xref.resetNewRef();
|
||||
|
||||
return incrementalUpdate({
|
||||
originalData: stream.bytes,
|
||||
xrefInfo: newXrefInfo,
|
||||
newRefs,
|
||||
return Promise.all(promises).then(function ([
|
||||
stream,
|
||||
acroForm,
|
||||
xref,
|
||||
datasetsRef: xfaDatasets,
|
||||
startXRef,
|
||||
...refs
|
||||
]) {
|
||||
let newRefs = [];
|
||||
for (const ref of refs) {
|
||||
newRefs = ref
|
||||
.filter(x => x !== null)
|
||||
.reduce((a, b) => a.concat(b), newRefs);
|
||||
}
|
||||
|
||||
if (newRefs.length === 0) {
|
||||
// No new refs so just return the initial bytes
|
||||
return stream.bytes;
|
||||
}
|
||||
|
||||
const xfa = (acroForm instanceof Dict && acroForm.get("XFA")) || [];
|
||||
let xfaDatasets = null;
|
||||
if (Array.isArray(xfa)) {
|
||||
for (let i = 0, ii = xfa.length; i < ii; i += 2) {
|
||||
if (xfa[i] === "datasets") {
|
||||
xfaDatasets = xfa[i + 1];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// TODO: Support XFA streams.
|
||||
warn("Unsupported XFA type.");
|
||||
}
|
||||
|
||||
let newXrefInfo = Object.create(null);
|
||||
if (xref.trailer) {
|
||||
// Get string info from Info in order to compute fileId.
|
||||
const infoObj = Object.create(null);
|
||||
const xrefInfo = xref.trailer.get("Info") || null;
|
||||
if (xrefInfo instanceof Dict) {
|
||||
xrefInfo.forEach((key, value) => {
|
||||
if (isString(key) && isString(value)) {
|
||||
infoObj[key] = stringToPDFString(value);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
newXrefInfo = {
|
||||
rootRef: xref.trailer.getRaw("Root") || null,
|
||||
encrypt: xref.trailer.getRaw("Encrypt") || null,
|
||||
newRef: xref.getNewRef(),
|
||||
infoRef: xref.trailer.getRaw("Info") || null,
|
||||
info: infoObj,
|
||||
fileIds: xref.trailer.getRaw("ID") || null,
|
||||
startXRef,
|
||||
filename,
|
||||
};
|
||||
}
|
||||
xref.resetNewRef();
|
||||
|
||||
return incrementalUpdate({
|
||||
originalData: stream.bytes,
|
||||
xrefInfo: newXrefInfo,
|
||||
newRefs,
|
||||
xref,
|
||||
datasetsRef: xfaDatasets,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
handler.on("GetOperatorList", function wphSetupRenderPage(data, sink) {
|
||||
var pageIndex = data.pageIndex;
|
||||
|
@ -70,140 +70,136 @@ class Util extends PDFObject {
|
||||
const ZERO = 4;
|
||||
const HASH = 8;
|
||||
let i = 0;
|
||||
return args[0].replace(pattern, function (
|
||||
match,
|
||||
nDecSep,
|
||||
cFlags,
|
||||
nWidth,
|
||||
nPrecision,
|
||||
cConvChar
|
||||
) {
|
||||
// cConvChar must be one of d, f, s, x
|
||||
if (
|
||||
cConvChar !== "d" &&
|
||||
cConvChar !== "f" &&
|
||||
cConvChar !== "s" &&
|
||||
cConvChar !== "x"
|
||||
) {
|
||||
const buf = ["%"];
|
||||
for (const str of [nDecSep, cFlags, nWidth, nPrecision, cConvChar]) {
|
||||
if (str) {
|
||||
buf.push(str);
|
||||
return args[0].replace(
|
||||
pattern,
|
||||
function (match, nDecSep, cFlags, nWidth, nPrecision, cConvChar) {
|
||||
// cConvChar must be one of d, f, s, x
|
||||
if (
|
||||
cConvChar !== "d" &&
|
||||
cConvChar !== "f" &&
|
||||
cConvChar !== "s" &&
|
||||
cConvChar !== "x"
|
||||
) {
|
||||
const buf = ["%"];
|
||||
for (const str of [nDecSep, cFlags, nWidth, nPrecision, cConvChar]) {
|
||||
if (str) {
|
||||
buf.push(str);
|
||||
}
|
||||
}
|
||||
return buf.join("");
|
||||
}
|
||||
|
||||
i++;
|
||||
if (i === args.length) {
|
||||
throw new Error("Not enough arguments in printf");
|
||||
}
|
||||
const arg = args[i];
|
||||
|
||||
if (cConvChar === "s") {
|
||||
return arg.toString();
|
||||
}
|
||||
|
||||
let flags = 0;
|
||||
if (cFlags) {
|
||||
for (const flag of cFlags) {
|
||||
switch (flag) {
|
||||
case "+":
|
||||
flags |= PLUS;
|
||||
break;
|
||||
case " ":
|
||||
flags |= SPACE;
|
||||
break;
|
||||
case "0":
|
||||
flags |= ZERO;
|
||||
break;
|
||||
case "#":
|
||||
flags |= HASH;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return buf.join("");
|
||||
}
|
||||
cFlags = flags;
|
||||
|
||||
i++;
|
||||
if (i === args.length) {
|
||||
throw new Error("Not enough arguments in printf");
|
||||
}
|
||||
const arg = args[i];
|
||||
if (nWidth) {
|
||||
nWidth = parseInt(nWidth);
|
||||
}
|
||||
|
||||
if (cConvChar === "s") {
|
||||
return arg.toString();
|
||||
}
|
||||
let intPart = Math.trunc(arg);
|
||||
|
||||
let flags = 0;
|
||||
if (cFlags) {
|
||||
for (const flag of cFlags) {
|
||||
switch (flag) {
|
||||
case "+":
|
||||
flags |= PLUS;
|
||||
break;
|
||||
case " ":
|
||||
flags |= SPACE;
|
||||
break;
|
||||
case "0":
|
||||
flags |= ZERO;
|
||||
break;
|
||||
case "#":
|
||||
flags |= HASH;
|
||||
break;
|
||||
if (cConvChar === "x") {
|
||||
let hex = Math.abs(intPart).toString(16).toUpperCase();
|
||||
if (nWidth !== undefined) {
|
||||
hex = hex.padStart(nWidth, cFlags & ZERO ? "0" : " ");
|
||||
}
|
||||
if (cFlags & HASH) {
|
||||
hex = `0x${hex}`;
|
||||
}
|
||||
return hex;
|
||||
}
|
||||
|
||||
if (nPrecision) {
|
||||
nPrecision = parseInt(nPrecision.substring(1));
|
||||
}
|
||||
|
||||
nDecSep = nDecSep ? nDecSep.substring(1) : "0";
|
||||
const separators = {
|
||||
0: [",", "."],
|
||||
1: ["", "."],
|
||||
2: [".", ","],
|
||||
3: ["", ","],
|
||||
4: ["'", "."],
|
||||
};
|
||||
const [thousandSep, decimalSep] = separators[nDecSep];
|
||||
|
||||
let decPart = "";
|
||||
if (cConvChar === "f") {
|
||||
if (nPrecision !== undefined) {
|
||||
decPart = (arg - intPart).toFixed(nPrecision);
|
||||
} else {
|
||||
decPart = (arg - intPart).toString();
|
||||
}
|
||||
if (decPart.length > 2) {
|
||||
decPart = `${decimalSep}${decPart.substring(2)}`;
|
||||
} else if (cFlags & HASH) {
|
||||
decPart = ".";
|
||||
} else {
|
||||
decPart = "";
|
||||
}
|
||||
}
|
||||
}
|
||||
cFlags = flags;
|
||||
|
||||
if (nWidth) {
|
||||
nWidth = parseInt(nWidth);
|
||||
}
|
||||
let sign = "";
|
||||
if (intPart < 0) {
|
||||
sign = "-";
|
||||
intPart = -intPart;
|
||||
} else if (cFlags & PLUS) {
|
||||
sign = "+";
|
||||
} else if (cFlags & SPACE) {
|
||||
sign = " ";
|
||||
}
|
||||
|
||||
let intPart = Math.trunc(arg);
|
||||
if (thousandSep && intPart >= 1000) {
|
||||
const buf = [];
|
||||
while (true) {
|
||||
buf.push((intPart % 1000).toString().padStart(3, "0"));
|
||||
intPart = Math.trunc(intPart / 1000);
|
||||
if (intPart < 1000) {
|
||||
buf.push(intPart.toString());
|
||||
break;
|
||||
}
|
||||
}
|
||||
intPart = buf.reverse().join(thousandSep);
|
||||
} else {
|
||||
intPart = intPart.toString();
|
||||
}
|
||||
|
||||
if (cConvChar === "x") {
|
||||
let hex = Math.abs(intPart).toString(16).toUpperCase();
|
||||
let n = `${intPart}${decPart}`;
|
||||
if (nWidth !== undefined) {
|
||||
hex = hex.padStart(nWidth, cFlags & ZERO ? "0" : " ");
|
||||
n = n.padStart(nWidth - sign.length, cFlags & ZERO ? "0" : " ");
|
||||
}
|
||||
if (cFlags & HASH) {
|
||||
hex = `0x${hex}`;
|
||||
}
|
||||
return hex;
|
||||
|
||||
return `${sign}${n}`;
|
||||
}
|
||||
|
||||
if (nPrecision) {
|
||||
nPrecision = parseInt(nPrecision.substring(1));
|
||||
}
|
||||
|
||||
nDecSep = nDecSep ? nDecSep.substring(1) : "0";
|
||||
const separators = {
|
||||
0: [",", "."],
|
||||
1: ["", "."],
|
||||
2: [".", ","],
|
||||
3: ["", ","],
|
||||
4: ["'", "."],
|
||||
};
|
||||
const [thousandSep, decimalSep] = separators[nDecSep];
|
||||
|
||||
let decPart = "";
|
||||
if (cConvChar === "f") {
|
||||
if (nPrecision !== undefined) {
|
||||
decPart = (arg - intPart).toFixed(nPrecision);
|
||||
} else {
|
||||
decPart = (arg - intPart).toString();
|
||||
}
|
||||
if (decPart.length > 2) {
|
||||
decPart = `${decimalSep}${decPart.substring(2)}`;
|
||||
} else if (cFlags & HASH) {
|
||||
decPart = ".";
|
||||
} else {
|
||||
decPart = "";
|
||||
}
|
||||
}
|
||||
|
||||
let sign = "";
|
||||
if (intPart < 0) {
|
||||
sign = "-";
|
||||
intPart = -intPart;
|
||||
} else if (cFlags & PLUS) {
|
||||
sign = "+";
|
||||
} else if (cFlags & SPACE) {
|
||||
sign = " ";
|
||||
}
|
||||
|
||||
if (thousandSep && intPart >= 1000) {
|
||||
const buf = [];
|
||||
while (true) {
|
||||
buf.push((intPart % 1000).toString().padStart(3, "0"));
|
||||
intPart = Math.trunc(intPart / 1000);
|
||||
if (intPart < 1000) {
|
||||
buf.push(intPart.toString());
|
||||
break;
|
||||
}
|
||||
}
|
||||
intPart = buf.reverse().join(thousandSep);
|
||||
} else {
|
||||
intPart = intPart.toString();
|
||||
}
|
||||
|
||||
let n = `${intPart}${decPart}`;
|
||||
if (nWidth !== undefined) {
|
||||
n = n.padStart(nWidth - sign.length, cFlags & ZERO ? "0" : " ");
|
||||
}
|
||||
|
||||
return `${sign}${n}`;
|
||||
});
|
||||
);
|
||||
}
|
||||
|
||||
iconStreamFromIcon() {
|
||||
@ -525,14 +521,14 @@ class Util extends PDFObject {
|
||||
const patterns = /(mmmm|mmm|mm|m|dddd|ddd|dd|d|yyyy|yy|HH|H|hh|h|MM|M|ss|s|tt|t)/g;
|
||||
const actions = [];
|
||||
|
||||
const re = escapedFormat.replace(patterns, function (
|
||||
match,
|
||||
patternElement
|
||||
) {
|
||||
const { pattern, action } = handlers[patternElement];
|
||||
actions.push(action);
|
||||
return pattern;
|
||||
});
|
||||
const re = escapedFormat.replace(
|
||||
patterns,
|
||||
function (match, patternElement) {
|
||||
const { pattern, action } = handlers[patternElement];
|
||||
actions.push(action);
|
||||
return pattern;
|
||||
}
|
||||
);
|
||||
|
||||
this._scandCache.set(cFormat, [new RegExp(re, "g"), actions]);
|
||||
}
|
||||
|
@ -121,19 +121,22 @@ describe("evaluator", function () {
|
||||
resources.XObject = xObject;
|
||||
|
||||
const stream = new StringStream("/Res1 DoQ");
|
||||
runOperatorListCheck(partialEvaluator, stream, resources, function (
|
||||
result
|
||||
) {
|
||||
expect(result.fnArray.length).toEqual(3);
|
||||
expect(result.fnArray[0]).toEqual(OPS.dependency);
|
||||
expect(result.fnArray[1]).toEqual(OPS.paintImageXObject);
|
||||
expect(result.fnArray[2]).toEqual(OPS.restore);
|
||||
expect(result.argsArray.length).toEqual(3);
|
||||
expect(result.argsArray[0]).toEqual(["img_p0_1"]);
|
||||
expect(result.argsArray[1]).toEqual(["img_p0_1", 1, 1]);
|
||||
expect(result.argsArray[2]).toEqual(null);
|
||||
done();
|
||||
});
|
||||
runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
resources,
|
||||
function (result) {
|
||||
expect(result.fnArray.length).toEqual(3);
|
||||
expect(result.fnArray[0]).toEqual(OPS.dependency);
|
||||
expect(result.fnArray[1]).toEqual(OPS.paintImageXObject);
|
||||
expect(result.fnArray[2]).toEqual(OPS.restore);
|
||||
expect(result.argsArray.length).toEqual(3);
|
||||
expect(result.argsArray[0]).toEqual(["img_p0_1"]);
|
||||
expect(result.argsArray[1]).toEqual(["img_p0_1", 1, 1]);
|
||||
expect(result.argsArray[2]).toEqual(null);
|
||||
done();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle three glued operations", function (done) {
|
||||
@ -157,16 +160,19 @@ describe("evaluator", function () {
|
||||
const resources = new ResourcesMock();
|
||||
resources.Res1 = {};
|
||||
const stream = new StringStream("B*Bf*");
|
||||
runOperatorListCheck(partialEvaluator, stream, resources, function (
|
||||
result
|
||||
) {
|
||||
expect(!!result.fnArray && !!result.argsArray).toEqual(true);
|
||||
expect(result.fnArray.length).toEqual(3);
|
||||
expect(result.fnArray[0]).toEqual(OPS.eoFillStroke);
|
||||
expect(result.fnArray[1]).toEqual(OPS.fillStroke);
|
||||
expect(result.fnArray[2]).toEqual(OPS.eoFill);
|
||||
done();
|
||||
});
|
||||
runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
resources,
|
||||
function (result) {
|
||||
expect(!!result.fnArray && !!result.argsArray).toEqual(true);
|
||||
expect(result.fnArray.length).toEqual(3);
|
||||
expect(result.fnArray[0]).toEqual(OPS.eoFillStroke);
|
||||
expect(result.fnArray[1]).toEqual(OPS.fillStroke);
|
||||
expect(result.fnArray[2]).toEqual(OPS.eoFill);
|
||||
done();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle glued operations and operands", function (done) {
|
||||
@ -253,24 +259,27 @@ describe("evaluator", function () {
|
||||
resources.ExtGState = extGState;
|
||||
|
||||
const stream = new StringStream("/F2 /GS2 gs 5.711 Tf");
|
||||
runOperatorListCheck(partialEvaluator, stream, resources, function (
|
||||
result
|
||||
) {
|
||||
expect(result.fnArray.length).toEqual(3);
|
||||
expect(result.fnArray[0]).toEqual(OPS.setGState);
|
||||
expect(result.fnArray[1]).toEqual(OPS.dependency);
|
||||
expect(result.fnArray[2]).toEqual(OPS.setFont);
|
||||
expect(result.argsArray.length).toEqual(3);
|
||||
expect(result.argsArray[0]).toEqual([
|
||||
[
|
||||
["LW", 2],
|
||||
["CA", 0.5],
|
||||
],
|
||||
]);
|
||||
expect(result.argsArray[1]).toEqual(["g_font_error"]);
|
||||
expect(result.argsArray[2]).toEqual(["g_font_error", 5.711]);
|
||||
done();
|
||||
});
|
||||
runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
resources,
|
||||
function (result) {
|
||||
expect(result.fnArray.length).toEqual(3);
|
||||
expect(result.fnArray[0]).toEqual(OPS.setGState);
|
||||
expect(result.fnArray[1]).toEqual(OPS.dependency);
|
||||
expect(result.fnArray[2]).toEqual(OPS.setFont);
|
||||
expect(result.argsArray.length).toEqual(3);
|
||||
expect(result.argsArray[0]).toEqual([
|
||||
[
|
||||
["LW", 2],
|
||||
["CA", 0.5],
|
||||
],
|
||||
]);
|
||||
expect(result.argsArray[1]).toEqual(["g_font_error"]);
|
||||
expect(result.argsArray[2]).toEqual(["g_font_error", 5.711]);
|
||||
done();
|
||||
}
|
||||
);
|
||||
});
|
||||
it("should skip if too few arguments", function (done) {
|
||||
const stream = new StringStream("5 d0");
|
||||
@ -369,13 +378,16 @@ describe("evaluator", function () {
|
||||
resources.set("XObject", xobjs);
|
||||
|
||||
const stream = new StringStream("/Res1 Do");
|
||||
runOperatorListCheck(partialEvaluator, stream, resources, function (
|
||||
result
|
||||
) {
|
||||
expect(result.argsArray).toEqual([]);
|
||||
expect(result.fnArray).toEqual([]);
|
||||
done();
|
||||
});
|
||||
runOperatorListCheck(
|
||||
partialEvaluator,
|
||||
stream,
|
||||
resources,
|
||||
function (result) {
|
||||
expect(result.argsArray).toEqual([]);
|
||||
expect(result.fnArray).toEqual([]);
|
||||
done();
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -98,29 +98,30 @@ describe("pdf_find_controller", function () {
|
||||
return a + b;
|
||||
});
|
||||
|
||||
eventBus.on("updatefindmatchescount", function onUpdateFindMatchesCount(
|
||||
evt
|
||||
) {
|
||||
if (pdfFindController.pageMatches.length !== totalPages) {
|
||||
return;
|
||||
}
|
||||
eventBus.off("updatefindmatchescount", onUpdateFindMatchesCount);
|
||||
eventBus.on(
|
||||
"updatefindmatchescount",
|
||||
function onUpdateFindMatchesCount(evt) {
|
||||
if (pdfFindController.pageMatches.length !== totalPages) {
|
||||
return;
|
||||
}
|
||||
eventBus.off("updatefindmatchescount", onUpdateFindMatchesCount);
|
||||
|
||||
expect(evt.matchesCount.total).toBe(totalMatches);
|
||||
for (let i = 0; i < totalPages; i++) {
|
||||
expect(pdfFindController.pageMatches[i].length).toEqual(
|
||||
matchesPerPage[i]
|
||||
expect(evt.matchesCount.total).toBe(totalMatches);
|
||||
for (let i = 0; i < totalPages; i++) {
|
||||
expect(pdfFindController.pageMatches[i].length).toEqual(
|
||||
matchesPerPage[i]
|
||||
);
|
||||
}
|
||||
expect(pdfFindController.selected.pageIdx).toEqual(
|
||||
selectedMatch.pageIndex
|
||||
);
|
||||
expect(pdfFindController.selected.matchIdx).toEqual(
|
||||
selectedMatch.matchIndex
|
||||
);
|
||||
}
|
||||
expect(pdfFindController.selected.pageIdx).toEqual(
|
||||
selectedMatch.pageIndex
|
||||
);
|
||||
expect(pdfFindController.selected.matchIdx).toEqual(
|
||||
selectedMatch.matchIndex
|
||||
);
|
||||
|
||||
resolve();
|
||||
});
|
||||
resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user