Feature/multiple patch document exports (#2497)
* Turn patch document into options object Add outputType to options * Set keep styles to true by default * Simplify method * Rename variable * #2267 Multiple patches of same key * Remove path which won't be visited
This commit is contained in:
@ -16,7 +16,9 @@ import {
|
|||||||
VerticalAlign,
|
VerticalAlign,
|
||||||
} from "docx";
|
} from "docx";
|
||||||
|
|
||||||
patchDocument(fs.readFileSync("demo/assets/simple-template.docx"), {
|
patchDocument({
|
||||||
|
outputType: "nodebuffer",
|
||||||
|
data: fs.readFileSync("demo/assets/simple-template.docx"),
|
||||||
patches: {
|
patches: {
|
||||||
name: {
|
name: {
|
||||||
type: PatchType.PARAGRAPH,
|
type: PatchType.PARAGRAPH,
|
||||||
@ -56,7 +58,11 @@ patchDocument(fs.readFileSync("demo/assets/simple-template.docx"), {
|
|||||||
],
|
],
|
||||||
link: "https://www.google.co.uk",
|
link: "https://www.google.co.uk",
|
||||||
}),
|
}),
|
||||||
new ImageRun({ data: fs.readFileSync("./demo/images/dog.png"), transformation: { width: 100, height: 100 } }),
|
new ImageRun({
|
||||||
|
type: "png",
|
||||||
|
data: fs.readFileSync("./demo/images/dog.png"),
|
||||||
|
transformation: { width: 100, height: 100 },
|
||||||
|
}),
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
],
|
],
|
||||||
@ -82,7 +88,13 @@ patchDocument(fs.readFileSync("demo/assets/simple-template.docx"), {
|
|||||||
},
|
},
|
||||||
image_test: {
|
image_test: {
|
||||||
type: PatchType.PARAGRAPH,
|
type: PatchType.PARAGRAPH,
|
||||||
children: [new ImageRun({ data: fs.readFileSync("./demo/images/image1.jpeg"), transformation: { width: 100, height: 100 } })],
|
children: [
|
||||||
|
new ImageRun({
|
||||||
|
type: "jpg",
|
||||||
|
data: fs.readFileSync("./demo/images/image1.jpeg"),
|
||||||
|
transformation: { width: 100, height: 100 },
|
||||||
|
}),
|
||||||
|
],
|
||||||
},
|
},
|
||||||
table: {
|
table: {
|
||||||
type: PatchType.DOCUMENT,
|
type: PatchType.DOCUMENT,
|
||||||
|
@ -3,7 +3,9 @@
|
|||||||
import * as fs from "fs";
|
import * as fs from "fs";
|
||||||
import { patchDocument, PatchType, TextRun } from "docx";
|
import { patchDocument, PatchType, TextRun } from "docx";
|
||||||
|
|
||||||
patchDocument(fs.readFileSync("demo/assets/simple-template-2.docx"), {
|
patchDocument({
|
||||||
|
outputType: "nodebuffer",
|
||||||
|
data: fs.readFileSync("demo/assets/simple-template-2.docx"),
|
||||||
patches: {
|
patches: {
|
||||||
name: {
|
name: {
|
||||||
type: PatchType.PARAGRAPH,
|
type: PatchType.PARAGRAPH,
|
||||||
|
@ -24,7 +24,9 @@ const patches = getPatches({
|
|||||||
paragraph_replace: "Lorem ipsum paragraph",
|
paragraph_replace: "Lorem ipsum paragraph",
|
||||||
});
|
});
|
||||||
|
|
||||||
patchDocument(fs.readFileSync("demo/assets/simple-template.docx"), {
|
patchDocument({
|
||||||
|
outputType: "nodebuffer",
|
||||||
|
data: fs.readFileSync("demo/assets/simple-template.docx"),
|
||||||
patches,
|
patches,
|
||||||
}).then((doc) => {
|
}).then((doc) => {
|
||||||
fs.writeFileSync("My Document.docx", doc);
|
fs.writeFileSync("My Document.docx", doc);
|
||||||
|
@ -22,8 +22,11 @@ const patches = getPatches({
|
|||||||
"first-name": "John",
|
"first-name": "John",
|
||||||
});
|
});
|
||||||
|
|
||||||
patchDocument(fs.readFileSync("demo/assets/simple-template-3.docx"), {
|
patchDocument({
|
||||||
|
outputType: "nodebuffer",
|
||||||
|
data: fs.readFileSync("demo/assets/simple-template-3.docx"),
|
||||||
patches,
|
patches,
|
||||||
|
keepOriginalStyles: true,
|
||||||
}).then((doc) => {
|
}).then((doc) => {
|
||||||
fs.writeFileSync("My Document.docx", doc);
|
fs.writeFileSync("My Document.docx", doc);
|
||||||
});
|
});
|
||||||
|
Binary file not shown.
@ -218,7 +218,9 @@ describe("from-docx", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should patch the document", async () => {
|
it("should patch the document", async () => {
|
||||||
const output = await patchDocument(Buffer.from(""), {
|
const output = await patchDocument({
|
||||||
|
outputType: "uint8array",
|
||||||
|
data: Buffer.from(""),
|
||||||
patches: {
|
patches: {
|
||||||
name: {
|
name: {
|
||||||
type: PatchType.PARAGRAPH,
|
type: PatchType.PARAGRAPH,
|
||||||
@ -279,7 +281,9 @@ describe("from-docx", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should patch the document", async () => {
|
it("should patch the document", async () => {
|
||||||
const output = await patchDocument(Buffer.from(""), {
|
const output = await patchDocument({
|
||||||
|
outputType: "uint8array",
|
||||||
|
data: Buffer.from(""),
|
||||||
patches: {},
|
patches: {},
|
||||||
});
|
});
|
||||||
expect(output).to.not.be.undefined;
|
expect(output).to.not.be.undefined;
|
||||||
@ -305,7 +309,9 @@ describe("from-docx", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should use the relationships file rather than create one", async () => {
|
it("should use the relationships file rather than create one", async () => {
|
||||||
const output = await patchDocument(Buffer.from(""), {
|
const output = await patchDocument({
|
||||||
|
outputType: "uint8array",
|
||||||
|
data: Buffer.from(""),
|
||||||
patches: {
|
patches: {
|
||||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||||
image_test: {
|
image_test: {
|
||||||
@ -350,7 +356,9 @@ describe("from-docx", () => {
|
|||||||
|
|
||||||
it("should throw an error if the content types is not found", () =>
|
it("should throw an error if the content types is not found", () =>
|
||||||
expect(
|
expect(
|
||||||
patchDocument(Buffer.from(""), {
|
patchDocument({
|
||||||
|
outputType: "uint8array",
|
||||||
|
data: Buffer.from(""),
|
||||||
patches: {
|
patches: {
|
||||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||||
image_test: {
|
image_test: {
|
||||||
@ -388,7 +396,9 @@ describe("from-docx", () => {
|
|||||||
|
|
||||||
it("should throw an error if the content types is not found", () =>
|
it("should throw an error if the content types is not found", () =>
|
||||||
expect(
|
expect(
|
||||||
patchDocument(Buffer.from(""), {
|
patchDocument({
|
||||||
|
outputType: "uint8array",
|
||||||
|
data: Buffer.from(""),
|
||||||
patches: {
|
patches: {
|
||||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||||
image_test: {
|
image_test: {
|
||||||
|
@ -12,7 +12,6 @@ import { TargetModeType } from "@file/relationships/relationship/relationship";
|
|||||||
import { uniqueId } from "@util/convenience-functions";
|
import { uniqueId } from "@util/convenience-functions";
|
||||||
|
|
||||||
import { replacer } from "./replacer";
|
import { replacer } from "./replacer";
|
||||||
import { findLocationOfText } from "./traverser";
|
|
||||||
import { toJson } from "./util";
|
import { toJson } from "./util";
|
||||||
import { appendRelationship, getNextRelationshipIndex } from "./relationship-manager";
|
import { appendRelationship, getNextRelationshipIndex } from "./relationship-manager";
|
||||||
import { appendContentType } from "./content-types-manager";
|
import { appendContentType } from "./content-types-manager";
|
||||||
@ -47,14 +46,37 @@ interface IHyperlinkRelationshipAddition {
|
|||||||
|
|
||||||
export type IPatch = ParagraphPatch | FilePatch;
|
export type IPatch = ParagraphPatch | FilePatch;
|
||||||
|
|
||||||
export interface PatchDocumentOptions {
|
// From JSZip
|
||||||
|
type OutputByType = {
|
||||||
|
readonly base64: string;
|
||||||
|
// eslint-disable-next-line id-denylist
|
||||||
|
readonly string: string;
|
||||||
|
readonly text: string;
|
||||||
|
readonly binarystring: string;
|
||||||
|
readonly array: readonly number[];
|
||||||
|
readonly uint8array: Uint8Array;
|
||||||
|
readonly arraybuffer: ArrayBuffer;
|
||||||
|
readonly blob: Blob;
|
||||||
|
readonly nodebuffer: Buffer;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type PatchDocumentOutputType = keyof OutputByType;
|
||||||
|
|
||||||
|
export type PatchDocumentOptions<T extends PatchDocumentOutputType = PatchDocumentOutputType> = {
|
||||||
|
readonly outputType: T;
|
||||||
|
readonly data: InputDataType;
|
||||||
readonly patches: { readonly [key: string]: IPatch };
|
readonly patches: { readonly [key: string]: IPatch };
|
||||||
readonly keepOriginalStyles?: boolean;
|
readonly keepOriginalStyles?: boolean;
|
||||||
}
|
};
|
||||||
|
|
||||||
const imageReplacer = new ImageReplacer();
|
const imageReplacer = new ImageReplacer();
|
||||||
|
|
||||||
export const patchDocument = async (data: InputDataType, options: PatchDocumentOptions): Promise<Uint8Array> => {
|
export const patchDocument = async <T extends PatchDocumentOutputType = PatchDocumentOutputType>({
|
||||||
|
outputType,
|
||||||
|
data,
|
||||||
|
patches,
|
||||||
|
keepOriginalStyles,
|
||||||
|
}: PatchDocumentOptions<T>): Promise<OutputByType[T]> => {
|
||||||
const zipContent = await JSZip.loadAsync(data);
|
const zipContent = await JSZip.loadAsync(data);
|
||||||
const contexts = new Map<string, IContext>();
|
const contexts = new Map<string, IContext>();
|
||||||
const file = {
|
const file = {
|
||||||
@ -104,13 +126,20 @@ export const patchDocument = async (data: InputDataType, options: PatchDocumentO
|
|||||||
};
|
};
|
||||||
contexts.set(key, context);
|
contexts.set(key, context);
|
||||||
|
|
||||||
for (const [patchKey, patchValue] of Object.entries(options.patches)) {
|
for (const [patchKey, patchValue] of Object.entries(patches)) {
|
||||||
const patchText = `{{${patchKey}}}`;
|
const patchText = `{{${patchKey}}}`;
|
||||||
const renderedParagraphs = findLocationOfText(json, patchText);
|
|
||||||
// TODO: mutates json. Make it immutable
|
// TODO: mutates json. Make it immutable
|
||||||
replacer(
|
// We need to loop through to catch every occurrence of the patch text
|
||||||
|
// It is possible that the patch text is in the same run
|
||||||
|
// This algorithm is limited to one patch per text run
|
||||||
|
// Once it cannot find any more occurrences, it will throw an error, and then we break out of the loop
|
||||||
|
// https://github.com/dolanmiu/docx/issues/2267
|
||||||
|
// eslint-disable-next-line no-constant-condition
|
||||||
|
while (true) {
|
||||||
|
try {
|
||||||
|
replacer({
|
||||||
json,
|
json,
|
||||||
{
|
patch: {
|
||||||
...patchValue,
|
...patchValue,
|
||||||
children: patchValue.children.map((element) => {
|
children: patchValue.children.map((element) => {
|
||||||
// We need to replace external hyperlinks with concrete hyperlinks
|
// We need to replace external hyperlinks with concrete hyperlinks
|
||||||
@ -132,10 +161,13 @@ export const patchDocument = async (data: InputDataType, options: PatchDocumentO
|
|||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
} as any,
|
} as any,
|
||||||
patchText,
|
patchText,
|
||||||
renderedParagraphs,
|
|
||||||
context,
|
context,
|
||||||
options.keepOriginalStyles,
|
keepOriginalStyles,
|
||||||
);
|
});
|
||||||
|
} catch {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const mediaDatas = imageReplacer.getMediaData(JSON.stringify(json), context.file.Media);
|
const mediaDatas = imageReplacer.getMediaData(JSON.stringify(json), context.file.Media);
|
||||||
@ -201,6 +233,7 @@ export const patchDocument = async (data: InputDataType, options: PatchDocumentO
|
|||||||
appendContentType(contentTypesJson, "image/jpeg", "jpg");
|
appendContentType(contentTypesJson, "image/jpeg", "jpg");
|
||||||
appendContentType(contentTypesJson, "image/bmp", "bmp");
|
appendContentType(contentTypesJson, "image/bmp", "bmp");
|
||||||
appendContentType(contentTypesJson, "image/gif", "gif");
|
appendContentType(contentTypesJson, "image/gif", "gif");
|
||||||
|
appendContentType(contentTypesJson, "image/svg+xml", "svg");
|
||||||
}
|
}
|
||||||
|
|
||||||
const zip = new JSZip();
|
const zip = new JSZip();
|
||||||
@ -220,7 +253,7 @@ export const patchDocument = async (data: InputDataType, options: PatchDocumentO
|
|||||||
}
|
}
|
||||||
|
|
||||||
return zip.generateAsync({
|
return zip.generateAsync({
|
||||||
type: "uint8array",
|
type: outputType,
|
||||||
mimeType: "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
mimeType: "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||||
compression: "DEFLATE",
|
compression: "DEFLATE",
|
||||||
});
|
});
|
||||||
|
@ -27,7 +27,7 @@ describe("paragraph-token-replacer", () => {
|
|||||||
},
|
},
|
||||||
renderedParagraph: {
|
renderedParagraph: {
|
||||||
index: 0,
|
index: 0,
|
||||||
path: [0],
|
pathToParagraph: [0],
|
||||||
runs: [
|
runs: [
|
||||||
{
|
{
|
||||||
end: 4,
|
end: 4,
|
||||||
@ -128,7 +128,7 @@ describe("paragraph-token-replacer", () => {
|
|||||||
{ text: "World", parts: [{ text: "World", index: 0, start: 15, end: 19 }], index: 3, start: 15, end: 19 },
|
{ text: "World", parts: [{ text: "World", index: 0, start: 15, end: 19 }], index: 3, start: 15, end: 19 },
|
||||||
],
|
],
|
||||||
index: 0,
|
index: 0,
|
||||||
path: [0, 1, 0, 0],
|
pathToParagraph: [0, 1, 0, 0],
|
||||||
},
|
},
|
||||||
originalText: "{{name}}",
|
originalText: "{{name}}",
|
||||||
replacementText: "John",
|
replacementText: "John",
|
||||||
|
@ -8,7 +8,7 @@ import { PatchType } from "./from-docx";
|
|||||||
|
|
||||||
import { replacer } from "./replacer";
|
import { replacer } from "./replacer";
|
||||||
|
|
||||||
const MOCK_JSON = {
|
export const MOCK_JSON = {
|
||||||
elements: [
|
elements: [
|
||||||
{
|
{
|
||||||
type: "element",
|
type: "element",
|
||||||
@ -73,103 +73,60 @@ const MOCK_JSON = {
|
|||||||
|
|
||||||
describe("replacer", () => {
|
describe("replacer", () => {
|
||||||
describe("replacer", () => {
|
describe("replacer", () => {
|
||||||
it("should return the same object if nothing is added", () => {
|
it("should throw an error if nothing is added", () => {
|
||||||
const output = replacer(
|
expect(() =>
|
||||||
{
|
replacer({
|
||||||
|
json: {
|
||||||
elements: [],
|
elements: [],
|
||||||
},
|
},
|
||||||
{
|
patch: {
|
||||||
type: PatchType.PARAGRAPH,
|
type: PatchType.PARAGRAPH,
|
||||||
children: [],
|
children: [],
|
||||||
},
|
},
|
||||||
"hello",
|
patchText: "hello",
|
||||||
[],
|
|
||||||
// eslint-disable-next-line functional/prefer-readonly-type
|
// eslint-disable-next-line functional/prefer-readonly-type
|
||||||
vi.fn<[], IContext>()(),
|
context: vi.fn<[], IContext>()(),
|
||||||
);
|
}),
|
||||||
|
).toThrow();
|
||||||
expect(output).to.deep.equal({
|
|
||||||
elements: [],
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should replace paragraph type", () => {
|
it("should replace paragraph type", () => {
|
||||||
const output = replacer(
|
const output = replacer({
|
||||||
MOCK_JSON,
|
json: JSON.parse(JSON.stringify(MOCK_JSON)),
|
||||||
{
|
patch: {
|
||||||
type: PatchType.PARAGRAPH,
|
type: PatchType.PARAGRAPH,
|
||||||
children: [new TextRun("Delightful Header")],
|
children: [new TextRun("Delightful Header")],
|
||||||
},
|
},
|
||||||
"{{header_adjective}}",
|
patchText: "{{header_adjective}}",
|
||||||
[
|
context: {
|
||||||
{
|
|
||||||
text: "This is a {{header_adjective}} don’t you think?",
|
|
||||||
runs: [
|
|
||||||
{
|
|
||||||
text: "This is a {{head",
|
|
||||||
parts: [{ text: "This is a {{head", index: 0, start: 0, end: 15 }],
|
|
||||||
index: 1,
|
|
||||||
start: 0,
|
|
||||||
end: 15,
|
|
||||||
},
|
|
||||||
{ text: "er", parts: [{ text: "er", index: 0, start: 16, end: 17 }], index: 2, start: 16, end: 17 },
|
|
||||||
{
|
|
||||||
text: "_adjective}} don’t you think?",
|
|
||||||
parts: [{ text: "_adjective}} don’t you think?", index: 0, start: 18, end: 46 }],
|
|
||||||
index: 3,
|
|
||||||
start: 18,
|
|
||||||
end: 46,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
index: 0,
|
|
||||||
path: [0, 0, 0],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
{
|
|
||||||
file: {} as unknown as File,
|
file: {} as unknown as File,
|
||||||
viewWrapper: {
|
viewWrapper: {
|
||||||
Relationships: {},
|
Relationships: {},
|
||||||
} as unknown as IViewWrapper,
|
} as unknown as IViewWrapper,
|
||||||
stack: [],
|
stack: [],
|
||||||
},
|
},
|
||||||
);
|
});
|
||||||
|
|
||||||
expect(JSON.stringify(output)).to.contain("Delightful Header");
|
expect(JSON.stringify(output)).to.contain("Delightful Header");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should replace paragraph type keeping original styling if keepOriginalStyles is true", () => {
|
it("should replace paragraph type keeping original styling if keepOriginalStyles is true", () => {
|
||||||
const output = replacer(
|
const output = replacer({
|
||||||
MOCK_JSON,
|
json: JSON.parse(JSON.stringify(MOCK_JSON)),
|
||||||
{
|
patch: {
|
||||||
type: PatchType.PARAGRAPH,
|
type: PatchType.PARAGRAPH,
|
||||||
children: [new TextRun("sweet")],
|
children: [new TextRun("sweet")],
|
||||||
},
|
},
|
||||||
"{{bold}}",
|
patchText: "{{bold}}",
|
||||||
[
|
context: {
|
||||||
{
|
|
||||||
text: "What a {{bold}} text!",
|
|
||||||
runs: [
|
|
||||||
{
|
|
||||||
text: "What a {{bold}} text!",
|
|
||||||
parts: [{ text: "What a {{bold}} text!", index: 1, start: 0, end: 21 }],
|
|
||||||
index: 0,
|
|
||||||
start: 0,
|
|
||||||
end: 21,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
index: 0,
|
|
||||||
path: [0, 0, 1],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
{
|
|
||||||
file: {} as unknown as File,
|
file: {} as unknown as File,
|
||||||
viewWrapper: {
|
viewWrapper: {
|
||||||
Relationships: {},
|
Relationships: {},
|
||||||
} as unknown as IViewWrapper,
|
} as unknown as IViewWrapper,
|
||||||
stack: [],
|
stack: [],
|
||||||
},
|
},
|
||||||
true,
|
keepOriginalStyles: true,
|
||||||
);
|
});
|
||||||
|
|
||||||
expect(JSON.stringify(output)).to.contain("sweet");
|
expect(JSON.stringify(output)).to.contain("sweet");
|
||||||
expect(output.elements![0].elements![1].elements).toMatchObject([
|
expect(output.elements![0].elements![1].elements).toMatchObject([
|
||||||
@ -225,91 +182,23 @@ describe("replacer", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should replace document type", () => {
|
it("should replace document type", () => {
|
||||||
const output = replacer(
|
const output = replacer({
|
||||||
MOCK_JSON,
|
json: JSON.parse(JSON.stringify(MOCK_JSON)),
|
||||||
{
|
patch: {
|
||||||
type: PatchType.DOCUMENT,
|
type: PatchType.DOCUMENT,
|
||||||
children: [new Paragraph("Lorem ipsum paragraph")],
|
children: [new Paragraph("Lorem ipsum paragraph")],
|
||||||
},
|
},
|
||||||
"{{header_adjective}}",
|
patchText: "{{header_adjective}}",
|
||||||
[
|
context: {
|
||||||
{
|
|
||||||
text: "This is a {{header_adjective}} don’t you think?",
|
|
||||||
runs: [
|
|
||||||
{
|
|
||||||
text: "This is a {{head",
|
|
||||||
parts: [{ text: "This is a {{head", index: 0, start: 0, end: 15 }],
|
|
||||||
index: 1,
|
|
||||||
start: 0,
|
|
||||||
end: 15,
|
|
||||||
},
|
|
||||||
{ text: "er", parts: [{ text: "er", index: 0, start: 16, end: 17 }], index: 2, start: 16, end: 17 },
|
|
||||||
{
|
|
||||||
text: "_adjective}} don’t you think?",
|
|
||||||
parts: [{ text: "_adjective}} don’t you think?", index: 0, start: 18, end: 46 }],
|
|
||||||
index: 3,
|
|
||||||
start: 18,
|
|
||||||
end: 46,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
index: 0,
|
|
||||||
path: [0, 0, 0],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
{
|
|
||||||
file: {} as unknown as File,
|
file: {} as unknown as File,
|
||||||
viewWrapper: {
|
viewWrapper: {
|
||||||
Relationships: {},
|
Relationships: {},
|
||||||
} as unknown as IViewWrapper,
|
} as unknown as IViewWrapper,
|
||||||
stack: [],
|
stack: [],
|
||||||
},
|
},
|
||||||
);
|
});
|
||||||
|
|
||||||
expect(JSON.stringify(output)).to.contain("Lorem ipsum paragraph");
|
expect(JSON.stringify(output)).to.contain("Lorem ipsum paragraph");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw an error if the type is not supported", () => {
|
|
||||||
expect(() =>
|
|
||||||
replacer(
|
|
||||||
{},
|
|
||||||
{
|
|
||||||
type: PatchType.DOCUMENT,
|
|
||||||
children: [new Paragraph("Lorem ipsum paragraph")],
|
|
||||||
},
|
|
||||||
"{{header_adjective}}",
|
|
||||||
[
|
|
||||||
{
|
|
||||||
text: "This is a {{header_adjective}} don’t you think?",
|
|
||||||
runs: [
|
|
||||||
{
|
|
||||||
text: "This is a {{head",
|
|
||||||
parts: [{ text: "This is a {{head", index: 0, start: 0, end: 15 }],
|
|
||||||
index: 1,
|
|
||||||
start: 0,
|
|
||||||
end: 15,
|
|
||||||
},
|
|
||||||
{ text: "er", parts: [{ text: "er", index: 0, start: 16, end: 17 }], index: 2, start: 16, end: 17 },
|
|
||||||
{
|
|
||||||
text: "_adjective}} don’t you think?",
|
|
||||||
parts: [{ text: "_adjective}} don’t you think?", index: 0, start: 18, end: 46 }],
|
|
||||||
index: 3,
|
|
||||||
start: 18,
|
|
||||||
end: 46,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
index: 0,
|
|
||||||
path: [0, 0, 0],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
{
|
|
||||||
file: {} as unknown as File,
|
|
||||||
viewWrapper: {
|
|
||||||
Relationships: {},
|
|
||||||
} as unknown as IViewWrapper,
|
|
||||||
stack: [],
|
|
||||||
},
|
|
||||||
),
|
|
||||||
).to.throw();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -6,22 +6,33 @@ import { IContext, XmlComponent } from "@file/xml-components";
|
|||||||
|
|
||||||
import { IPatch, PatchType } from "./from-docx";
|
import { IPatch, PatchType } from "./from-docx";
|
||||||
import { toJson } from "./util";
|
import { toJson } from "./util";
|
||||||
import { IRenderedParagraphNode } from "./run-renderer";
|
|
||||||
import { replaceTokenInParagraphElement } from "./paragraph-token-replacer";
|
import { replaceTokenInParagraphElement } from "./paragraph-token-replacer";
|
||||||
import { findRunElementIndexWithToken, splitRunElement } from "./paragraph-split-inject";
|
import { findRunElementIndexWithToken, splitRunElement } from "./paragraph-split-inject";
|
||||||
|
import { findLocationOfText } from "./traverser";
|
||||||
|
|
||||||
const formatter = new Formatter();
|
const formatter = new Formatter();
|
||||||
|
|
||||||
const SPLIT_TOKEN = "ɵ";
|
const SPLIT_TOKEN = "ɵ";
|
||||||
|
|
||||||
export const replacer = (
|
export const replacer = ({
|
||||||
json: Element,
|
json,
|
||||||
patch: IPatch,
|
patch,
|
||||||
patchText: string,
|
patchText,
|
||||||
renderedParagraphs: readonly IRenderedParagraphNode[],
|
context,
|
||||||
context: IContext,
|
keepOriginalStyles = true,
|
||||||
keepOriginalStyles: boolean = false,
|
}: {
|
||||||
): Element => {
|
readonly json: Element;
|
||||||
|
readonly patch: IPatch;
|
||||||
|
readonly patchText: string;
|
||||||
|
readonly context: IContext;
|
||||||
|
readonly keepOriginalStyles?: boolean;
|
||||||
|
}): Element => {
|
||||||
|
const renderedParagraphs = findLocationOfText(json, patchText);
|
||||||
|
|
||||||
|
if (renderedParagraphs.length === 0) {
|
||||||
|
throw new Error(`Could not find text ${patchText}`);
|
||||||
|
}
|
||||||
|
|
||||||
for (const renderedParagraph of renderedParagraphs) {
|
for (const renderedParagraph of renderedParagraphs) {
|
||||||
const textJson = patch.children
|
const textJson = patch.children
|
||||||
// eslint-disable-next-line no-loop-func
|
// eslint-disable-next-line no-loop-func
|
||||||
@ -30,15 +41,15 @@ export const replacer = (
|
|||||||
|
|
||||||
switch (patch.type) {
|
switch (patch.type) {
|
||||||
case PatchType.DOCUMENT: {
|
case PatchType.DOCUMENT: {
|
||||||
const parentElement = goToParentElementFromPath(json, renderedParagraph.path);
|
const parentElement = goToParentElementFromPath(json, renderedParagraph.pathToParagraph);
|
||||||
const elementIndex = getLastElementIndexFromPath(renderedParagraph.path);
|
const elementIndex = getLastElementIndexFromPath(renderedParagraph.pathToParagraph);
|
||||||
// eslint-disable-next-line functional/immutable-data, prefer-destructuring
|
// eslint-disable-next-line functional/immutable-data, prefer-destructuring
|
||||||
parentElement.elements!.splice(elementIndex, 1, ...textJson);
|
parentElement.elements!.splice(elementIndex, 1, ...textJson);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case PatchType.PARAGRAPH:
|
case PatchType.PARAGRAPH:
|
||||||
default: {
|
default: {
|
||||||
const paragraphElement = goToElementFromPath(json, renderedParagraph.path);
|
const paragraphElement = goToElementFromPath(json, renderedParagraph.pathToParagraph);
|
||||||
replaceTokenInParagraphElement({
|
replaceTokenInParagraphElement({
|
||||||
paragraphElement,
|
paragraphElement,
|
||||||
renderedParagraph,
|
renderedParagraph,
|
||||||
@ -87,11 +98,7 @@ const goToElementFromPath = (json: Element, path: readonly number[]): Element =>
|
|||||||
// Which we do not want to double count
|
// Which we do not want to double count
|
||||||
for (let i = 1; i < path.length; i++) {
|
for (let i = 1; i < path.length; i++) {
|
||||||
const index = path[i];
|
const index = path[i];
|
||||||
const nextElements = element.elements;
|
const nextElements = element.elements!;
|
||||||
|
|
||||||
if (!nextElements) {
|
|
||||||
throw new Error("Could not find element");
|
|
||||||
}
|
|
||||||
|
|
||||||
element = nextElements[index];
|
element = nextElements[index];
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,7 @@ describe("run-renderer", () => {
|
|||||||
const output = renderParagraphNode({ element: { name: "w:p" }, index: 0, parent: undefined });
|
const output = renderParagraphNode({ element: { name: "w:p" }, index: 0, parent: undefined });
|
||||||
expect(output).to.deep.equal({
|
expect(output).to.deep.equal({
|
||||||
index: -1,
|
index: -1,
|
||||||
path: [],
|
pathToParagraph: [],
|
||||||
runs: [],
|
runs: [],
|
||||||
text: "",
|
text: "",
|
||||||
});
|
});
|
||||||
@ -39,7 +39,7 @@ describe("run-renderer", () => {
|
|||||||
});
|
});
|
||||||
expect(output).to.deep.equal({
|
expect(output).to.deep.equal({
|
||||||
index: 0,
|
index: 0,
|
||||||
path: [0],
|
pathToParagraph: [0],
|
||||||
runs: [
|
runs: [
|
||||||
{
|
{
|
||||||
end: 4,
|
end: 4,
|
||||||
@ -79,7 +79,7 @@ describe("run-renderer", () => {
|
|||||||
});
|
});
|
||||||
expect(output).to.deep.equal({
|
expect(output).to.deep.equal({
|
||||||
index: 0,
|
index: 0,
|
||||||
path: [0],
|
pathToParagraph: [0],
|
||||||
runs: [
|
runs: [
|
||||||
{
|
{
|
||||||
end: 0,
|
end: 0,
|
||||||
|
@ -6,7 +6,7 @@ export interface IRenderedParagraphNode {
|
|||||||
readonly text: string;
|
readonly text: string;
|
||||||
readonly runs: readonly IRenderedRunNode[];
|
readonly runs: readonly IRenderedRunNode[];
|
||||||
readonly index: number;
|
readonly index: number;
|
||||||
readonly path: readonly number[];
|
readonly pathToParagraph: readonly number[];
|
||||||
}
|
}
|
||||||
|
|
||||||
interface StartAndEnd {
|
interface StartAndEnd {
|
||||||
@ -35,7 +35,7 @@ export const renderParagraphNode = (node: ElementWrapper): IRenderedParagraphNod
|
|||||||
text: "",
|
text: "",
|
||||||
runs: [],
|
runs: [],
|
||||||
index: -1,
|
index: -1,
|
||||||
path: [],
|
pathToParagraph: [],
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -50,8 +50,7 @@ export const renderParagraphNode = (node: ElementWrapper): IRenderedParagraphNod
|
|||||||
|
|
||||||
return renderedRunNode;
|
return renderedRunNode;
|
||||||
})
|
})
|
||||||
.filter((e) => !!e)
|
.filter((e) => !!e);
|
||||||
.map((e) => e as IRenderedRunNode);
|
|
||||||
|
|
||||||
const text = runs.reduce((acc, curr) => acc + curr.text, "");
|
const text = runs.reduce((acc, curr) => acc + curr.text, "");
|
||||||
|
|
||||||
@ -59,7 +58,7 @@ export const renderParagraphNode = (node: ElementWrapper): IRenderedParagraphNod
|
|||||||
text,
|
text,
|
||||||
runs,
|
runs,
|
||||||
index: node.index,
|
index: node.index,
|
||||||
path: buildNodePath(node),
|
pathToParagraph: buildNodePath(node),
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -139,6 +139,28 @@ const MOCK_JSON = {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
type: "element",
|
||||||
|
name: "w:p",
|
||||||
|
elements: [
|
||||||
|
{
|
||||||
|
type: "element",
|
||||||
|
name: "w:r",
|
||||||
|
elements: [
|
||||||
|
{
|
||||||
|
type: "element",
|
||||||
|
name: "w:rPr",
|
||||||
|
elements: [{ type: "element", name: "w:b", attributes: { "w:val": "1" } }],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "element",
|
||||||
|
name: "w:t",
|
||||||
|
elements: [{ type: "text", text: "What a {{bold}} text!" }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
{
|
{
|
||||||
type: "element",
|
type: "element",
|
||||||
name: "w:p",
|
name: "w:p",
|
||||||
@ -535,6 +557,45 @@ const MOCK_JSON = {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
type: "element",
|
||||||
|
name: "w:p",
|
||||||
|
attributes: {
|
||||||
|
"w14:paraId": "3BE1A671",
|
||||||
|
"w14:textId": "74E856C4",
|
||||||
|
"w:rsidR": "000D38A7",
|
||||||
|
"w:rsidRDefault": "000D38A7",
|
||||||
|
},
|
||||||
|
elements: [
|
||||||
|
{
|
||||||
|
type: "element",
|
||||||
|
name: "w:pPr",
|
||||||
|
elements: [{ type: "element", name: "w:pStyle", attributes: { "w:val": "Header" } }],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "element",
|
||||||
|
name: "w:r",
|
||||||
|
elements: [{ type: "element", name: "w:t", elements: [{ type: "text", text: "This is a {{head" }] }],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "element",
|
||||||
|
name: "w:r",
|
||||||
|
attributes: { "w:rsidR": "004A3A99" },
|
||||||
|
elements: [{ type: "element", name: "w:t", elements: [{ type: "text", text: "er" }] }],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "element",
|
||||||
|
name: "w:r",
|
||||||
|
elements: [
|
||||||
|
{
|
||||||
|
type: "element",
|
||||||
|
name: "w:t",
|
||||||
|
elements: [{ type: "text", text: "_adjective}} don’t you think?" }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
{
|
{
|
||||||
type: "element",
|
type: "element",
|
||||||
name: "w:sectPr",
|
name: "w:sectPr",
|
||||||
@ -574,7 +635,7 @@ describe("traverser", () => {
|
|||||||
expect(output).to.deep.equal([
|
expect(output).to.deep.equal([
|
||||||
{
|
{
|
||||||
index: 1,
|
index: 1,
|
||||||
path: [0, 0, 0, 8, 2, 0, 1],
|
pathToParagraph: [0, 0, 0, 9, 2, 0, 1],
|
||||||
runs: [
|
runs: [
|
||||||
{
|
{
|
||||||
end: 18,
|
end: 18,
|
||||||
@ -595,5 +656,76 @@ describe("traverser", () => {
|
|||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should find the location of text", () => {
|
||||||
|
const output = findLocationOfText(MOCK_JSON, "{{bold}}");
|
||||||
|
|
||||||
|
expect(output).to.deep.equal([
|
||||||
|
{
|
||||||
|
text: "What a {{bold}} text!",
|
||||||
|
runs: [
|
||||||
|
{
|
||||||
|
text: "What a {{bold}} text!",
|
||||||
|
parts: [{ text: "What a {{bold}} text!", index: 1, start: 0, end: 20 }],
|
||||||
|
index: 0,
|
||||||
|
start: 0,
|
||||||
|
end: 20,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
index: 5,
|
||||||
|
pathToParagraph: [0, 0, 0, 5],
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should find the location of text", () => {
|
||||||
|
const output = findLocationOfText(MOCK_JSON, "{{bold}}");
|
||||||
|
|
||||||
|
expect(output).to.deep.equal([
|
||||||
|
{
|
||||||
|
text: "What a {{bold}} text!",
|
||||||
|
runs: [
|
||||||
|
{
|
||||||
|
text: "What a {{bold}} text!",
|
||||||
|
parts: [{ text: "What a {{bold}} text!", index: 1, start: 0, end: 20 }],
|
||||||
|
index: 0,
|
||||||
|
start: 0,
|
||||||
|
end: 20,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
index: 5,
|
||||||
|
pathToParagraph: [0, 0, 0, 5],
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should find the location of text", () => {
|
||||||
|
const output = findLocationOfText(MOCK_JSON, "{{header_adjective}}");
|
||||||
|
|
||||||
|
expect(output).to.deep.equal([
|
||||||
|
{
|
||||||
|
text: "This is a {{header_adjective}} don’t you think?",
|
||||||
|
runs: [
|
||||||
|
{
|
||||||
|
text: "This is a {{head",
|
||||||
|
parts: [{ text: "This is a {{head", index: 0, start: 0, end: 15 }],
|
||||||
|
index: 1,
|
||||||
|
start: 0,
|
||||||
|
end: 15,
|
||||||
|
},
|
||||||
|
{ text: "er", parts: [{ text: "er", index: 0, start: 16, end: 17 }], index: 2, start: 16, end: 17 },
|
||||||
|
{
|
||||||
|
text: "_adjective}} don’t you think?",
|
||||||
|
parts: [{ text: "_adjective}} don’t you think?", index: 0, start: 18, end: 46 }],
|
||||||
|
index: 3,
|
||||||
|
start: 18,
|
||||||
|
end: 46,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
index: 14,
|
||||||
|
pathToParagraph: [0, 0, 0, 14],
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
Reference in New Issue
Block a user