Rearchitect import (#6141)

This commit is contained in:
Tom Moor
2023-11-13 20:15:38 -05:00
committed by GitHub
parent 2143269bcd
commit 1898a34418
10 changed files with 302 additions and 244 deletions

View File

@@ -222,6 +222,7 @@
"tmp": "^0.2.1",
"turndown": "^7.1.2",
"umzug": "^3.2.1",
"unzipper": "0.10.11",
"utf8": "^3.0.0",
"utility-types": "^3.10.0",
"uuid": "^8.3.2",
@@ -301,6 +302,7 @@
"@types/throng": "^5.0.4",
"@types/tmp": "^0.2.3",
"@types/turndown": "^5.0.1",
"@types/unzipper": "^0.10.9",
"@types/utf8": "^3.0.1",
"@types/validator": "^13.7.17",
"@typescript-eslint/eslint-plugin": "^5.62.0",

View File

@@ -1,4 +1,5 @@
import JSZip from "jszip";
import path from "path";
import fs from "fs-extra";
import escapeRegExp from "lodash/escapeRegExp";
import find from "lodash/find";
import mime from "mime-types";
@@ -13,18 +14,19 @@ import {
DocumentJSONExport,
JSONExportMetadata,
} from "@server/types";
import ZipHelper, { FileTreeNode } from "@server/utils/ZipHelper";
import ImportHelper, { FileTreeNode } from "@server/utils/ImportHelper";
import ImportTask, { StructuredImportData } from "./ImportTask";
export default class ImportJSONTask extends ImportTask {
public async parseData(
buffer: Buffer,
fileOperation: FileOperation
dirPath: string,
_: FileOperation
): Promise<StructuredImportData> {
const zip = await JSZip.loadAsync(buffer);
const tree = ZipHelper.toFileTree(zip);
return this.parseFileTree({ fileOperation, zip, tree });
const tree = await ImportHelper.toFileTree(dirPath);
if (!tree) {
throw new Error("Could not find valid content in zip file");
}
return this.parseFileTree(tree.children);
}
/**
@@ -34,14 +36,10 @@ export default class ImportJSONTask extends ImportTask {
* @param tree An array of FileTreeNode representing root files in the zip
* @returns A StructuredImportData object
*/
private async parseFileTree({
zip,
tree,
}: {
zip: JSZip;
fileOperation: FileOperation;
tree: FileTreeNode[];
}): Promise<StructuredImportData> {
private async parseFileTree(
tree: FileTreeNode[]
): Promise<StructuredImportData> {
let rootPath = "";
const output: StructuredImportData = {
collections: [],
documents: [],
@@ -51,10 +49,16 @@ export default class ImportJSONTask extends ImportTask {
// Load metadata
let metadata: JSONExportMetadata | undefined = undefined;
for (const node of tree) {
if (node.path === "metadata.json") {
const zipObject = zip.files["metadata.json"];
metadata = JSON.parse(await zipObject.async("string"));
if (!rootPath) {
rootPath = path.dirname(node.path);
}
if (node.path === "metadata.json") {
metadata = JSON.parse(await fs.readFile(node.path, "utf8"));
}
}
if (!rootPath) {
throw new Error("Could not find root path");
}
Logger.debug("task", "Importing JSON metadata", { metadata });
@@ -93,13 +97,12 @@ export default class ImportJSONTask extends ImportTask {
}) {
Object.values(attachments).forEach((node) => {
const id = uuidv4();
const zipObject = zip.files[node.key];
const mimeType = mime.lookup(node.key) || "application/octet-stream";
output.attachments.push({
id,
name: node.name,
buffer: () => zipObject.async("nodebuffer"),
buffer: () => fs.readFile(path.join(rootPath, node.key)),
mimeType,
path: node.key,
externalId: node.id,
@@ -109,17 +112,12 @@ export default class ImportJSONTask extends ImportTask {
// All nodes in the root level should be collections as JSON + metadata
for (const node of tree) {
if (
node.path.endsWith("/") ||
node.path === ".DS_Store" ||
node.path === "metadata.json"
) {
if (node.children.length > 0 || node.path.endsWith("metadata.json")) {
continue;
}
const zipObject = zip.files[node.path];
const item: CollectionJSONExport = JSON.parse(
await zipObject.async("string")
await fs.readFile(node.path, "utf8")
);
const collectionId = uuidv4();

View File

@@ -77,8 +77,8 @@ describe("ImportMarkdownZipTask", () => {
error = err;
}
expect(error && error.message).toBe(
"Uploaded file does not contain any valid documents"
expect(error && error.message).toContain(
"Uploaded file does not contain any valid collections"
);
});
});

View File

@@ -1,40 +1,38 @@
import JSZip from "jszip";
import fs from "fs-extra";
import escapeRegExp from "lodash/escapeRegExp";
import mime from "mime-types";
import { v4 as uuidv4 } from "uuid";
import documentImporter from "@server/commands/documentImporter";
import Logger from "@server/logging/Logger";
import { FileOperation, User } from "@server/models";
import ZipHelper, { FileTreeNode } from "@server/utils/ZipHelper";
import ImportHelper, { FileTreeNode } from "@server/utils/ImportHelper";
import ImportTask, { StructuredImportData } from "./ImportTask";
export default class ImportMarkdownZipTask extends ImportTask {
public async parseData(
stream: NodeJS.ReadableStream,
dirPath: string,
fileOperation: FileOperation
): Promise<StructuredImportData> {
const zip = await JSZip.loadAsync(stream);
const tree = ZipHelper.toFileTree(zip);
const tree = await ImportHelper.toFileTree(dirPath);
if (!tree) {
throw new Error("Could not find valid content in zip file");
}
return this.parseFileTree({ fileOperation, zip, tree });
return this.parseFileTree(fileOperation, tree.children);
}
/**
* Converts the file structure from zipAsFileTree into documents,
* collections, and attachments.
*
* @param fileOperation The file operation
* @param tree An array of FileTreeNode representing root files in the zip
* @returns A StructuredImportData object
*/
private async parseFileTree({
zip,
tree,
fileOperation,
}: {
zip: JSZip;
fileOperation: FileOperation;
tree: FileTreeNode[];
}): Promise<StructuredImportData> {
private async parseFileTree(
fileOperation: FileOperation,
tree: FileTreeNode[]
): Promise<StructuredImportData> {
const user = await User.findByPk(fileOperation.userId, {
rejectOnEmpty: true,
});
@@ -59,14 +57,6 @@ export default class ImportMarkdownZipTask extends ImportTask {
return parseNodeChildren(child.children, collectionId);
}
const zipObject = zip.files[child.path];
if (!zipObject) {
Logger.info("task", "Zip file referenced path that doesn't exist", {
path: child.path,
});
return;
}
const id = uuidv4();
// this is an attachment
@@ -76,7 +66,7 @@ export default class ImportMarkdownZipTask extends ImportTask {
name: child.name,
path: child.path,
mimeType: mime.lookup(child.path) || "application/octet-stream",
buffer: () => zipObject.async("nodebuffer"),
buffer: () => fs.readFile(child.path),
});
return;
}
@@ -84,29 +74,14 @@ export default class ImportMarkdownZipTask extends ImportTask {
const { title, emoji, text } = await documentImporter({
mimeType: "text/markdown",
fileName: child.name,
content: await zipObject.async("string"),
content:
child.children.length > 0
? ""
: await fs.readFile(child.path, "utf8"),
user,
ip: user.lastActiveIp || undefined,
});
let metadata;
try {
metadata = zipObject.comment ? JSON.parse(zipObject.comment) : {};
} catch (err) {
Logger.debug(
"task",
`ZIP comment found for ${child.name}, but could not be parsed as metadata: ${zipObject.comment}`
);
}
const createdAt = metadata.createdAt
? new Date(metadata.createdAt)
: zipObject.date;
const updatedAt = metadata.updatedAt
? new Date(metadata.updatedAt)
: zipObject.date;
const existingDocumentIndex = output.documents.findIndex(
(doc) =>
doc.title === title &&
@@ -134,8 +109,6 @@ export default class ImportMarkdownZipTask extends ImportTask {
title,
emoji,
text,
updatedAt,
createdAt,
collectionId,
parentDocumentId,
path: child.path,
@@ -150,7 +123,7 @@ export default class ImportMarkdownZipTask extends ImportTask {
// All nodes in the root level should be collections
for (const node of tree) {
if (node.path.endsWith("/")) {
if (node.children.length > 0) {
const collectionId = uuidv4();
output.collections.push({
id: collectionId,

View File

@@ -37,7 +37,9 @@ describe("ImportNotionTask", () => {
// Check that the image url was replaced in the text with a redirect
const attachments = Array.from(response.attachments.values());
const documents = Array.from(response.documents.values());
expect(documents[2].text).toContain(attachments[0].redirectUrl);
expect(documents.map((d) => d.text).join("")).toContain(
attachments[0].redirectUrl
);
});
it("should import successfully from a HTML export", async () => {
@@ -76,6 +78,8 @@ describe("ImportNotionTask", () => {
);
const documents = Array.from(response.documents.values());
expect(documents[1].text).toContain(attachment?.redirectUrl);
expect(documents.map((d) => d.text).join("")).toContain(
attachment?.redirectUrl
);
});
});

View File

@@ -1,5 +1,5 @@
import path from "path";
import JSZip from "jszip";
import fs from "fs-extra";
import compact from "lodash/compact";
import escapeRegExp from "lodash/escapeRegExp";
import mime from "mime-types";
@@ -7,35 +7,33 @@ import { v4 as uuidv4 } from "uuid";
import documentImporter from "@server/commands/documentImporter";
import Logger from "@server/logging/Logger";
import { FileOperation, User } from "@server/models";
import ZipHelper, { FileTreeNode } from "@server/utils/ZipHelper";
import ImportHelper, { FileTreeNode } from "@server/utils/ImportHelper";
import ImportTask, { StructuredImportData } from "./ImportTask";
export default class ImportNotionTask extends ImportTask {
public async parseData(
stream: NodeJS.ReadableStream,
dirPath: string,
fileOperation: FileOperation
): Promise<StructuredImportData> {
const zip = await JSZip.loadAsync(stream);
const tree = ZipHelper.toFileTree(zip);
return this.parseFileTree({ fileOperation, zip, tree });
const tree = await ImportHelper.toFileTree(dirPath);
if (!tree) {
throw new Error("Could not find valid content in zip file");
}
return this.parseFileTree(fileOperation, tree.children);
}
/**
* Converts the file structure from zipAsFileTree into documents,
* collections, and attachments.
*
* @param fileOperation The file operation
* @param tree An array of FileTreeNode representing root files in the zip
* @returns A StructuredImportData object
*/
private async parseFileTree({
zip,
tree,
fileOperation,
}: {
zip: JSZip;
fileOperation: FileOperation;
tree: FileTreeNode[];
}): Promise<StructuredImportData> {
private async parseFileTree(
fileOperation: FileOperation,
tree: FileTreeNode[]
): Promise<StructuredImportData> {
const user = await User.findByPk(fileOperation.userId, {
rejectOnEmpty: true,
});
@@ -58,7 +56,6 @@ export default class ImportNotionTask extends ImportTask {
return;
}
const zipObject = zip.files[child.path];
const id = uuidv4();
const match = child.title.match(this.NotionUUIDRegex);
const name = child.title.replace(this.NotionUUIDRegex, "");
@@ -78,7 +75,7 @@ export default class ImportNotionTask extends ImportTask {
name: child.name,
path: child.path,
mimeType,
buffer: () => zipObject.async("nodebuffer"),
buffer: () => fs.readFile(child.path),
externalId,
});
return;
@@ -89,7 +86,10 @@ export default class ImportNotionTask extends ImportTask {
const { title, emoji, text } = await documentImporter({
mimeType: mimeType || "text/markdown",
fileName: name,
content: zipObject ? await zipObject.async("string") : "",
content:
child.children.length > 0
? ""
: await fs.readFile(child.path, "utf8"),
user,
ip: user.lastActiveIp || undefined,
});
@@ -205,11 +205,10 @@ export default class ImportNotionTask extends ImportTask {
mimeType === "text/plain" ||
mimeType === "text/html"
) {
const zipObject = zip.files[node.path];
const { text } = await documentImporter({
mimeType,
fileName: name,
content: await zipObject.async("string"),
content: await fs.readFile(node.path, "utf8"),
user,
ip: user.lastActiveIp || undefined,
});

View File

@@ -1,5 +1,8 @@
import path from "path";
import { rm } from "fs-extra";
import truncate from "lodash/truncate";
import tmp from "tmp";
import unzipper from "unzipper";
import {
AttachmentPreset,
CollectionPermission,
@@ -10,6 +13,7 @@ import { CollectionValidation } from "@shared/validations";
import attachmentCreator from "@server/commands/attachmentCreator";
import documentCreator from "@server/commands/documentCreator";
import { serializer } from "@server/editor";
import env from "@server/env";
import { InternalError, ValidationError } from "@server/errors";
import Logger from "@server/logging/Logger";
import {
@@ -98,19 +102,22 @@ export default abstract class ImportTask extends BaseTask<Props> {
* @param props The props
*/
public async perform({ fileOperationId }: Props) {
let dirPath;
const fileOperation = await FileOperation.findByPk(fileOperationId, {
rejectOnEmpty: true,
});
try {
Logger.info("task", `ImportTask fetching data for ${fileOperationId}`);
const data = await this.fetchData(fileOperation);
if (!data) {
dirPath = await this.fetchAndExtractData(fileOperation);
if (!dirPath) {
throw InternalError("Failed to fetch data for import from storage.");
}
Logger.info("task", `ImportTask parsing data for ${fileOperationId}`);
const parsed = await this.parseData(data, fileOperation);
Logger.info("task", `ImportTask parsing data for ${fileOperationId}`, {
dirPath,
});
const parsed = await this.parseData(dirPath, fileOperation);
if (parsed.collections.length === 0) {
throw ValidationError(
@@ -152,6 +159,10 @@ export default abstract class ImportTask extends BaseTask<Props> {
error
);
throw error;
} finally {
if (dirPath) {
await this.cleanupExtractedData(dirPath, fileOperation);
}
}
}
@@ -179,38 +190,70 @@ export default abstract class ImportTask extends BaseTask<Props> {
}
/**
* Fetch the remote data associated with the file operation as a Buffer.
* Fetch the remote data associated with the file operation into a temporary disk location.
*
* @param fileOperation The FileOperation to fetch data for
* @returns A promise that resolves to the data as a buffer.
* @returns A promise that resolves to the temporary file path.
*/
protected async fetchData(fileOperation: FileOperation): Promise<Buffer> {
protected async fetchAndExtractData(
fileOperation: FileOperation
): Promise<string> {
return new Promise((resolve, reject) => {
const bufs: Buffer[] = [];
const stream = fileOperation.stream;
if (!stream) {
return reject(new Error("No stream available"));
}
stream.on("data", function (d) {
bufs.push(d);
});
stream.on("error", reject);
stream.on("end", () => {
resolve(Buffer.concat(bufs));
tmp.dir((err, path) => {
if (err) {
return reject(err);
}
const dest = unzipper
.Extract({ path, verbose: env.isDevelopment })
.on("error", reject)
.on("close", () => resolve(path));
stream
.on("error", (err) => {
dest.end();
reject(err);
})
.pipe(dest);
});
});
}
/**
* Parse the data loaded from fetchData into a consistent structured format
* Cleanup the temporary directory where the data was fetched and extracted.
*
* @param dirPath The temporary directory path where the data was fetched
* @param fileOperation The associated FileOperation
*/
protected async cleanupExtractedData(
dirPath: string,
fileOperation: FileOperation
) {
try {
await rm(dirPath, { recursive: true, force: true });
} catch (error) {
Logger.error(
`ImportTask failed to cleanup extracted data for ${fileOperation.id}`,
error
);
}
}
/**
* Parse the data loaded from fetchAndExtractData into a consistent structured format
* that represents collections, documents, and the relationships between them.
*
* @param data The data loaded from fetchData
* @param dirPath The temporary directory path where the data was fetched
* @param fileOperation The FileOperation to parse data for
* @returns A promise that resolves to the structured data
*/
protected abstract parseData(
data: Buffer | NodeJS.ReadableStream,
dirPath: string,
fileOperation: FileOperation
): Promise<StructuredImportData>;

View File

@@ -0,0 +1,67 @@
import path from "path";
import fs from "fs-extra";
import { deserializeFilename } from "./fs";
export type FileTreeNode = {
/** The title, extracted from the file name */
title: string;
/** The file name including extension */
name: string;
/** Full path to the file within the zip file */
path: string;
/** Any nested children */
children: FileTreeNode[];
};
export default class ImportHelper {
/**
* Collects the files and folders for a directory filePath.
*/
public static async toFileTree(
filePath: string,
currentDepth = 0
): Promise<FileTreeNode | null> {
const name = path.basename(filePath);
const title = deserializeFilename(path.parse(path.basename(name)).name);
const item = {
path: filePath,
name,
title,
children: [] as FileTreeNode[],
};
let stats;
if ([".DS_Store", "__MACOSX"].includes(name)) {
return null;
}
try {
stats = await fs.stat(filePath);
} catch (e) {
return null;
}
if (stats.isFile()) {
return item;
}
if (stats.isDirectory()) {
const dirData = await fs.readdir(filePath);
if (dirData === null) {
return null;
}
item.children = (
await Promise.all(
dirData.map((child) =>
this.toFileTree(path.join(filePath, child), currentDepth + 1)
)
)
).filter(Boolean) as FileTreeNode[];
} else {
return null;
}
return item;
}
}

View File

@@ -1,24 +1,9 @@
import fs from "fs";
import path from "path";
import JSZip from "jszip";
import find from "lodash/find";
import tmp from "tmp";
import { bytesToHumanReadable } from "@shared/utils/files";
import { ValidationError } from "@server/errors";
import Logger from "@server/logging/Logger";
import { trace } from "@server/logging/tracing";
import { deserializeFilename } from "./fs";
export type FileTreeNode = {
/** The title, extracted from the file name */
title: string;
/** The file name including extension */
name: string;
/** Full path to the file within the zip file */
path: string;
/** Any nested children */
children: FileTreeNode[];
};
@trace()
export default class ZipHelper {
@@ -32,62 +17,6 @@ export default class ZipHelper {
},
};
/**
* Converts the flat structure returned by JSZIP into a nested file structure
* for easier processing.
*
* @param zip The JSZip instance
* @param maxFiles The maximum number of files to unzip (Prevent zip bombs)
*/
public static toFileTree(
zip: JSZip,
/** The maximum number of files to unzip */
maxFiles = 10000
) {
const paths = ZipHelper.getPathsInZip(zip, maxFiles);
const tree: FileTreeNode[] = [];
paths.forEach(function (filePath) {
if (filePath.startsWith("/__MACOSX")) {
return;
}
const pathParts = filePath.split("/");
// Remove first blank element from the parts array.
pathParts.shift();
let currentLevel = tree; // initialize currentLevel to root
pathParts.forEach(function (name) {
// check to see if the path already exists.
const existingPath = find(currentLevel, {
name,
});
if (existingPath) {
// The path to this item was already in the tree, so don't add again.
// Set the current level to this path's children
currentLevel = existingPath.children;
} else if (name.endsWith(".DS_Store") || !name) {
return;
} else {
const newPart = {
name,
path: filePath.replace(/^\//, ""),
title: deserializeFilename(path.parse(path.basename(name)).name),
children: [],
};
currentLevel.push(newPart);
currentLevel = newPart.children;
}
});
});
return tree;
}
/**
* Write a zip file to a temporary disk location
*
@@ -158,34 +87,4 @@ export default class ZipHelper {
);
});
}
/**
* Gets a list of file paths contained within the ZIP file, accounting for
* differences between OS.
*
* @param zip The JSZip instance
* @param maxFiles The maximum number of files to unzip (Prevent zip bombs)
*/
private static getPathsInZip(zip: JSZip, maxFiles = 10000) {
let fileCount = 0;
const paths: string[] = [];
Object.keys(zip.files).forEach((p) => {
if (++fileCount > maxFiles) {
throw ValidationError("Too many files in zip");
}
const filePath = `/${p}`;
// "zip.files" for ZIPs created on Windows does not return paths for
// directories, so we must add them manually if missing.
const dir = filePath.slice(0, filePath.lastIndexOf("/") + 1);
if (dir.length > 1 && !paths.includes(dir)) {
paths.push(dir);
}
paths.push(filePath);
});
return paths;
}
}

129
yarn.lock
View File

@@ -214,14 +214,7 @@
"@babel/traverse" "^7.22.5"
"@babel/types" "^7.22.5"
"@babel/helper-optimise-call-expression@^7.18.6":
version "7.18.6"
resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz#9369aa943ee7da47edab2cb4e838acf09d290ffe"
integrity sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==
dependencies:
"@babel/types" "^7.18.6"
"@babel/helper-optimise-call-expression@^7.22.5":
"@babel/helper-optimise-call-expression@^7.18.6", "@babel/helper-optimise-call-expression@^7.22.5":
version "7.22.5"
resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz#f21531a9ccbff644fdd156b4077c16ff0c3f609e"
integrity sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw==
@@ -259,14 +252,7 @@
dependencies:
"@babel/types" "^7.22.5"
"@babel/helper-skip-transparent-expression-wrappers@^7.20.0":
version "7.20.0"
resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.20.0.tgz#fbe4c52f60518cab8140d77101f0e63a8a230684"
integrity sha512-5y1JYeNKfvnT8sZcK9DVRtpTbGiomYIHviSP3OQWmDPU3DeH4a1ZlT/N2lyQ5P8egjcRaT/Y9aNqUxK0WsnIIg==
dependencies:
"@babel/types" "^7.20.0"
"@babel/helper-skip-transparent-expression-wrappers@^7.22.5":
"@babel/helper-skip-transparent-expression-wrappers@^7.20.0", "@babel/helper-skip-transparent-expression-wrappers@^7.22.5":
version "7.22.5"
resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.22.5.tgz#007f15240b5751c537c40e77abb4e89eeaaa8847"
integrity sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q==
@@ -3554,6 +3540,13 @@
resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.6.tgz#250a7b16c3b91f672a24552ec64678eeb1d3a08d"
integrity sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==
"@types/unzipper@^0.10.9":
version "0.10.9"
resolved "https://registry.yarnpkg.com/@types/unzipper/-/unzipper-0.10.9.tgz#ccbc393ecd1ec013dbe9bc6f13332dad0aa00a0f"
integrity sha512-vHbmFZAw8emNAOVkHVbS3qBnbr0x/qHQZ+ei1HE7Oy6Tyrptl+jpqnOX+BF5owcu/HZLOV0nJK+K9sjs1Ox2JA==
dependencies:
"@types/node" "*"
"@types/utf8@^3.0.1":
version "3.0.1"
resolved "https://registry.yarnpkg.com/@types/utf8/-/utf8-3.0.1.tgz#bf081663d4fff05ee63b41f377a35f8b189f7e5b"
@@ -4361,17 +4354,30 @@ batch@^0.6.1:
resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16"
integrity sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==
big-integer@^1.6.17:
version "1.6.51"
resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.51.tgz#0df92a5d9880560d3ff2d5fd20245c889d130686"
integrity sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==
binary-extensions@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.1.0.tgz#30fa40c9e7fe07dbc895678cd287024dea241dd9"
integrity sha512-1Yj8h9Q+QDF5FzhMs/c9+6UntbD5MkRfRwac8DoEm9ZfUBZ7tZ55YcGVAzEe4bXsdQHEk+s9S5wsOKVdZrw0tQ==
binary@~0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/binary/-/binary-0.3.0.tgz#9f60553bc5ce8c3386f3b553cff47462adecaa79"
integrity sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg==
dependencies:
buffers "~0.1.1"
chainsaw "~0.1.0"
bluebird@^3.7.2:
version "3.7.2"
resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f"
integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==
bluebird@~3.4.0:
bluebird@~3.4.0, bluebird@~3.4.1:
version "3.4.7"
resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.4.7.tgz#f72d760be09b7f76d08ed8fae98b289a8d05fab3"
integrity sha1-9y12C+Cbf3bQjtj66Ysomo0F+rM=
@@ -4489,6 +4495,11 @@ buffer-from@^1.0.0, buffer-from@^1.1.2:
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5"
integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==
buffer-indexof-polyfill@~1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/buffer-indexof-polyfill/-/buffer-indexof-polyfill-1.0.2.tgz#d2732135c5999c64b277fcf9b1abe3498254729c"
integrity sha512-I7wzHwA3t1/lwXQh+A5PbNvJxgfo5r3xulgpYDB5zckTu/Z9oUK9biouBKQUjEqzaz3HnAT6TYoovmE+GqSf7A==
buffer-writer@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/buffer-writer/-/buffer-writer-2.0.0.tgz#ce7eb81a38f7829db09c873f2fbb792c0c98ec04"
@@ -4503,6 +4514,11 @@ buffer@4.9.2:
ieee754 "^1.1.4"
isarray "^1.0.0"
buffers@~0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/buffers/-/buffers-0.1.1.tgz#b24579c3bed4d6d396aeee6d9a8ae7f5482ab7bb"
integrity sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ==
builtin-modules@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-2.0.0.tgz#60b7ef5ae6546bd7deefa74b08b62a43a232648e"
@@ -4581,6 +4597,13 @@ caniuse-lite@^1.0.30001449:
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001451.tgz#2e197c698fc1373d63e1406d6607ea4617c613f1"
integrity sha512-XY7UbUpGRatZzoRft//5xOa69/1iGJRBlrieH6QYrkKLIFn3m7OVEJ81dSrKoy2BnKsdbX5cLrOispZNYo9v2w==
chainsaw@~0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/chainsaw/-/chainsaw-0.1.0.tgz#5eab50b28afe58074d0d58291388828b5e5fbc98"
integrity sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ==
dependencies:
traverse ">=0.3.0 <0.4"
chalk@5.3.0:
version "5.3.0"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385"
@@ -5893,6 +5916,13 @@ duck@^0.1.12:
dependencies:
underscore "^1.13.1"
duplexer2@~0.1.4:
version "0.1.4"
resolved "https://registry.yarnpkg.com/duplexer2/-/duplexer2-0.1.4.tgz#8b12dab878c0d69e3e7891051662a32fc6bddcc1"
integrity sha512-asLFVfWWtJ90ZyOUHMqk7/S2w2guQKxUI2itj3d92ADHhxUSbCMGi1f1cBcJ7xM1To+pE/Khbwo1yuNbMEPKeA==
dependencies:
readable-stream "^2.0.2"
duplexer@^0.1.1:
version "0.1.2"
resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6"
@@ -7107,6 +7137,16 @@ fsevents@^2.3.2, fsevents@~2.3.2:
resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a"
integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==
fstream@^1.0.12:
version "1.0.12"
resolved "https://registry.yarnpkg.com/fstream/-/fstream-1.0.12.tgz#4e8ba8ee2d48be4f7d0de505455548eae5932045"
integrity sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==
dependencies:
graceful-fs "^4.1.2"
inherits "~2.0.0"
mkdirp ">=0.5 0"
rimraf "2"
ftp@^0.3.10:
version "0.3.10"
resolved "https://registry.yarnpkg.com/ftp/-/ftp-0.3.10.tgz#9197d861ad8142f3e63d5a83bfe4c59f7330885d"
@@ -7363,10 +7403,10 @@ gopd@^1.0.1:
dependencies:
get-intrinsic "^1.1.3"
graceful-fs@^4.0.0, graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.9:
version "4.2.10"
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c"
integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==
graceful-fs@^4.0.0, graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.2, graceful-fs@^4.2.4, graceful-fs@^4.2.9:
version "4.2.11"
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3"
integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==
graphemer@^1.4.0:
version "1.4.0"
@@ -7794,7 +7834,7 @@ inflight@^1.0.4:
once "^1.3.0"
wrappy "1"
inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.1, inherits@~2.0.3:
inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.0, inherits@~2.0.1, inherits@~2.0.3:
version "2.0.4"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
@@ -9282,6 +9322,11 @@ list-stylesheets@^2.0.1:
cheerio "1.0.0-rc.12"
pick-util "^1.1.5"
listenercount@~1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/listenercount/-/listenercount-1.0.1.tgz#84c8a72ab59c4725321480c975e6508342e70937"
integrity sha512-3mk/Zag0+IJxeDrxSgaDPy4zZ3w05PRZeJNnlWhzFz5OkX49J4krc+A8X2d2M69vGMBEX0uyl8M+W+8gH+kBqQ==
listr2@6.6.1:
version "6.6.1"
resolved "https://registry.yarnpkg.com/listr2/-/listr2-6.6.1.tgz#08b2329e7e8ba6298481464937099f4a2cd7f95d"
@@ -9714,6 +9759,13 @@ minimist@^1.2.0, minimist@^1.2.6:
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18"
integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==
"mkdirp@>=0.5 0":
version "0.5.6"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6"
integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==
dependencies:
minimist "^1.2.6"
mktemp@~0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/mktemp/-/mktemp-0.4.0.tgz#6d0515611c8a8c84e484aa2000129b98e981ff0b"
@@ -11322,10 +11374,10 @@ readable-stream@3, readable-stream@^3.0.0, readable-stream@^3.0.2, readable-stre
string_decoder "^1.1.1"
util-deprecate "^1.0.1"
readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.5, readable-stream@^2.1.5, readable-stream@^2.3.3, readable-stream@^2.3.5, readable-stream@^2.3.6, readable-stream@~2.3.6:
version "2.3.7"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.1.5, readable-stream@^2.3.3, readable-stream@^2.3.5, readable-stream@^2.3.6, readable-stream@~2.3.6:
version "2.3.8"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b"
integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==
dependencies:
core-util-is "~1.0.0"
inherits "~2.0.3"
@@ -11651,7 +11703,7 @@ rfdc@^1.3.0:
resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.0.tgz#d0b7c441ab2720d05dc4cf26e01c89631d9da08b"
integrity sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==
rimraf@^2.5.4, rimraf@^2.6.3:
rimraf@2, rimraf@^2.5.4, rimraf@^2.6.3:
version "2.7.1"
resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec"
integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==
@@ -11930,7 +11982,7 @@ serialize-javascript@^4.0.0:
dependencies:
randombytes "^2.1.0"
setimmediate@^1.0.5:
setimmediate@^1.0.5, setimmediate@~1.0.4:
version "1.0.5"
resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285"
integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==
@@ -12829,6 +12881,11 @@ tr46@~0.0.3:
resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a"
integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==
"traverse@>=0.3.0 <0.4":
version "0.3.9"
resolved "https://registry.yarnpkg.com/traverse/-/traverse-0.3.9.tgz#717b8f220cc0bb7b44e40514c22b2e8bbc70d8b9"
integrity sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ==
tree-kill@^1.2.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/tree-kill/-/tree-kill-1.2.2.tgz#4ca09a9092c88b73a7cdc5e8a01b507b0790a0cc"
@@ -13143,6 +13200,22 @@ unpipe@1.0.0:
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=
unzipper@0.10.11:
version "0.10.11"
resolved "https://registry.yarnpkg.com/unzipper/-/unzipper-0.10.11.tgz#0b4991446472cbdb92ee7403909f26c2419c782e"
integrity sha512-+BrAq2oFqWod5IESRjL3S8baohbevGcVA+teAIOYWM3pDVdseogqbzhhvvmiyQrUNKFUnDMtELW3X8ykbyDCJw==
dependencies:
big-integer "^1.6.17"
binary "~0.3.0"
bluebird "~3.4.1"
buffer-indexof-polyfill "~1.0.0"
duplexer2 "~0.1.4"
fstream "^1.0.12"
graceful-fs "^4.2.2"
listenercount "~1.0.1"
readable-stream "~2.3.6"
setimmediate "~1.0.4"
upath@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894"