feat: Data management
This commit is contained in:
229
electron/data-archive.ts
Normal file
229
electron/data-archive.ts
Normal file
@@ -0,0 +1,229 @@
|
||||
import * as fsp from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
|
||||
export interface ZipArchiveEntry {
|
||||
data: Buffer;
|
||||
path: string;
|
||||
}
|
||||
|
||||
interface CentralDirectoryEntry {
|
||||
compressedSize: number;
|
||||
crc: number;
|
||||
data: Buffer;
|
||||
localHeaderOffset: number;
|
||||
name: Buffer;
|
||||
uncompressedSize: number;
|
||||
}
|
||||
|
||||
const ZIP_LOCAL_FILE_HEADER_SIGNATURE = 0x04034b50;
|
||||
const ZIP_CENTRAL_DIRECTORY_SIGNATURE = 0x02014b50;
|
||||
const ZIP_END_OF_CENTRAL_DIRECTORY_SIGNATURE = 0x06054b50;
|
||||
const ZIP_UTF8_FLAG = 0x0800;
|
||||
const ZIP_STORE_METHOD = 0;
|
||||
const ZIP_VERSION = 20;
|
||||
const MAX_UINT32 = 0xffffffff;
|
||||
|
||||
const crcTable = buildCrcTable();
|
||||
|
||||
export function createZipArchive(entries: ZipArchiveEntry[]): Buffer {
|
||||
const localParts: Buffer[] = [];
|
||||
const centralEntries: CentralDirectoryEntry[] = [];
|
||||
let offset = 0;
|
||||
|
||||
for (const entry of entries) {
|
||||
const normalizedPath = normalizeZipPath(entry.path);
|
||||
const name = Buffer.from(normalizedPath, 'utf8');
|
||||
const data = entry.data;
|
||||
|
||||
if (name.length > 0xffff || data.length > MAX_UINT32 || offset > MAX_UINT32) {
|
||||
throw new Error('Data archive is too large for the portable ZIP format.');
|
||||
}
|
||||
|
||||
const crc = crc32(data);
|
||||
const localHeader = Buffer.alloc(30);
|
||||
|
||||
localHeader.writeUInt32LE(ZIP_LOCAL_FILE_HEADER_SIGNATURE, 0);
|
||||
localHeader.writeUInt16LE(ZIP_VERSION, 4);
|
||||
localHeader.writeUInt16LE(ZIP_UTF8_FLAG, 6);
|
||||
localHeader.writeUInt16LE(ZIP_STORE_METHOD, 8);
|
||||
localHeader.writeUInt16LE(0, 10);
|
||||
localHeader.writeUInt16LE(0, 12);
|
||||
localHeader.writeUInt32LE(crc, 14);
|
||||
localHeader.writeUInt32LE(data.length, 18);
|
||||
localHeader.writeUInt32LE(data.length, 22);
|
||||
localHeader.writeUInt16LE(name.length, 26);
|
||||
localHeader.writeUInt16LE(0, 28);
|
||||
|
||||
localParts.push(localHeader, name, data);
|
||||
centralEntries.push({
|
||||
compressedSize: data.length,
|
||||
crc,
|
||||
data,
|
||||
localHeaderOffset: offset,
|
||||
name,
|
||||
uncompressedSize: data.length
|
||||
});
|
||||
|
||||
offset += localHeader.length + name.length + data.length;
|
||||
}
|
||||
|
||||
const centralDirectoryOffset = offset;
|
||||
const centralParts = centralEntries.map((entry) => {
|
||||
const header = Buffer.alloc(46);
|
||||
|
||||
header.writeUInt32LE(ZIP_CENTRAL_DIRECTORY_SIGNATURE, 0);
|
||||
header.writeUInt16LE(ZIP_VERSION, 4);
|
||||
header.writeUInt16LE(ZIP_VERSION, 6);
|
||||
header.writeUInt16LE(ZIP_UTF8_FLAG, 8);
|
||||
header.writeUInt16LE(ZIP_STORE_METHOD, 10);
|
||||
header.writeUInt16LE(0, 12);
|
||||
header.writeUInt16LE(0, 14);
|
||||
header.writeUInt32LE(entry.crc, 16);
|
||||
header.writeUInt32LE(entry.compressedSize, 20);
|
||||
header.writeUInt32LE(entry.uncompressedSize, 24);
|
||||
header.writeUInt16LE(entry.name.length, 28);
|
||||
header.writeUInt16LE(0, 30);
|
||||
header.writeUInt16LE(0, 32);
|
||||
header.writeUInt16LE(0, 34);
|
||||
header.writeUInt16LE(0, 36);
|
||||
header.writeUInt32LE(0, 38);
|
||||
header.writeUInt32LE(entry.localHeaderOffset, 42);
|
||||
|
||||
offset += header.length + entry.name.length;
|
||||
|
||||
return Buffer.concat([header, entry.name]);
|
||||
});
|
||||
|
||||
const centralDirectorySize = offset - centralDirectoryOffset;
|
||||
|
||||
if (centralEntries.length > 0xffff || centralDirectoryOffset > MAX_UINT32 || centralDirectorySize > MAX_UINT32) {
|
||||
throw new Error('Data archive is too large for the portable ZIP format.');
|
||||
}
|
||||
|
||||
const end = Buffer.alloc(22);
|
||||
|
||||
end.writeUInt32LE(ZIP_END_OF_CENTRAL_DIRECTORY_SIGNATURE, 0);
|
||||
end.writeUInt16LE(0, 4);
|
||||
end.writeUInt16LE(0, 6);
|
||||
end.writeUInt16LE(centralEntries.length, 8);
|
||||
end.writeUInt16LE(centralEntries.length, 10);
|
||||
end.writeUInt32LE(centralDirectorySize, 12);
|
||||
end.writeUInt32LE(centralDirectoryOffset, 16);
|
||||
end.writeUInt16LE(0, 20);
|
||||
|
||||
return Buffer.concat([...localParts, ...centralParts, end]);
|
||||
}
|
||||
|
||||
export function readZipArchive(data: Buffer): ZipArchiveEntry[] {
|
||||
const endOffset = findEndOfCentralDirectory(data);
|
||||
|
||||
if (endOffset < 0) {
|
||||
throw new Error('The selected file is not a supported data archive.');
|
||||
}
|
||||
|
||||
const entryCount = data.readUInt16LE(endOffset + 10);
|
||||
const centralDirectoryOffset = data.readUInt32LE(endOffset + 16);
|
||||
const entries: ZipArchiveEntry[] = [];
|
||||
let offset = centralDirectoryOffset;
|
||||
|
||||
for (let index = 0; index < entryCount; index += 1) {
|
||||
if (data.readUInt32LE(offset) !== ZIP_CENTRAL_DIRECTORY_SIGNATURE) {
|
||||
throw new Error('The data archive directory is invalid.');
|
||||
}
|
||||
|
||||
const method = data.readUInt16LE(offset + 10);
|
||||
const compressedSize = data.readUInt32LE(offset + 20);
|
||||
const uncompressedSize = data.readUInt32LE(offset + 24);
|
||||
const nameLength = data.readUInt16LE(offset + 28);
|
||||
const extraLength = data.readUInt16LE(offset + 30);
|
||||
const commentLength = data.readUInt16LE(offset + 32);
|
||||
const localHeaderOffset = data.readUInt32LE(offset + 42);
|
||||
const entryPath = normalizeZipPath(data.subarray(offset + 46, offset + 46 + nameLength).toString('utf8'));
|
||||
|
||||
if (method !== ZIP_STORE_METHOD || compressedSize !== uncompressedSize) {
|
||||
throw new Error('Compressed data archives are not supported by this build.');
|
||||
}
|
||||
|
||||
if (data.readUInt32LE(localHeaderOffset) !== ZIP_LOCAL_FILE_HEADER_SIGNATURE) {
|
||||
throw new Error('The data archive contains an invalid file entry.');
|
||||
}
|
||||
|
||||
const localNameLength = data.readUInt16LE(localHeaderOffset + 26);
|
||||
const localExtraLength = data.readUInt16LE(localHeaderOffset + 28);
|
||||
const dataOffset = localHeaderOffset + 30 + localNameLength + localExtraLength;
|
||||
|
||||
entries.push({
|
||||
data: Buffer.from(data.subarray(dataOffset, dataOffset + compressedSize)),
|
||||
path: entryPath
|
||||
});
|
||||
|
||||
offset += 46 + nameLength + extraLength + commentLength;
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
export async function extractZipEntries(entries: ZipArchiveEntry[], destinationPath: string): Promise<void> {
|
||||
const destinationRoot = path.resolve(destinationPath);
|
||||
|
||||
for (const entry of entries) {
|
||||
const targetPath = path.resolve(destinationRoot, entry.path);
|
||||
|
||||
if (!targetPath.startsWith(destinationRoot + path.sep) && targetPath !== destinationRoot) {
|
||||
throw new Error('The data archive contains an unsafe path.');
|
||||
}
|
||||
|
||||
await fsp.mkdir(path.dirname(targetPath), { recursive: true });
|
||||
await fsp.writeFile(targetPath, entry.data);
|
||||
}
|
||||
}
|
||||
|
||||
function findEndOfCentralDirectory(data: Buffer): number {
|
||||
const minimumOffset = Math.max(0, data.length - 0xffff - 22);
|
||||
|
||||
for (let offset = data.length - 22; offset >= minimumOffset; offset -= 1) {
|
||||
if (data.readUInt32LE(offset) === ZIP_END_OF_CENTRAL_DIRECTORY_SIGNATURE) {
|
||||
return offset;
|
||||
}
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
function normalizeZipPath(value: string): string {
|
||||
const normalized = value.replace(/\\/g, '/').replace(/^\/+/, '');
|
||||
|
||||
if (!normalized || normalized.split('/').some((part) => part === '..' || part === '')) {
|
||||
throw new Error('The data archive contains an unsafe path.');
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
function buildCrcTable(): number[] {
|
||||
const table: number[] = [];
|
||||
|
||||
for (let index = 0; index < 256; index += 1) {
|
||||
let value = index;
|
||||
|
||||
for (let bit = 0; bit < 8; bit += 1) {
|
||||
value = (value & 1) !== 0
|
||||
? 0xedb88320 ^ (value >>> 1)
|
||||
: value >>> 1;
|
||||
}
|
||||
|
||||
table[index] = value >>> 0;
|
||||
}
|
||||
|
||||
return table;
|
||||
}
|
||||
|
||||
function crc32(data: Buffer): number {
|
||||
let crc = 0xffffffff;
|
||||
|
||||
for (const byte of data) {
|
||||
crc = crcTable[(crc ^ byte) & 0xff] ^ (crc >>> 8);
|
||||
}
|
||||
|
||||
return (crc ^ 0xffffffff) >>> 0;
|
||||
}
|
||||
Reference in New Issue
Block a user