feat(project): optimize image imports and converter

- Optimize imported scene preview images (smart WebP/JPEG/PNG, preserve alpha, keep pixel size)

- Update converter to re-encode existing image assets with same algorithm

- Improve import/export progress overlay and reduce presentation slide stutter

Made-with: Cursor
This commit is contained in:
Ivan Fontosh
2026-04-23 17:59:57 +08:00
parent 1d051f8bf9
commit 8f8eef53c9
33 changed files with 3684 additions and 68 deletions
+36 -2
View File
@@ -27,6 +27,20 @@ import {
waitForEditorWindowReady,
} from './windows/createWindows';
function emitZipProgress(evt: {
kind: 'import' | 'export';
stage: string;
percent: number;
detail?: string;
}): void {
for (const win of BrowserWindow.getAllWindows()) {
win.webContents.send(
evt.kind === 'import' ? ipcChannels.project.importZipProgress : ipcChannels.project.exportZipProgress,
evt,
);
}
}
/**
* Отключение GPU ломает скорость вторичных окон (презентация/пульт — WebGL). По умолчанию не трогаем.
* При чёрном экране в упакованной сборке: `DND_DISABLE_GPU=1`.
@@ -403,7 +417,18 @@ async function main() {
if (canceled || !filePaths[0]) {
return { canceled: true as const };
}
const project = await projectStore.importProjectFromExternalZip(filePaths[0]);
const srcPath = filePaths[0];
emitZipProgress({ kind: 'import', stage: 'copy', percent: 0, detail: 'Копирование…' });
// Let store import; progress for unzip is emitted from unzipToDir wrapper in store.
const project = await projectStore.importProjectFromExternalZip(srcPath, (p) => {
emitZipProgress({
kind: 'import',
stage: p.stage,
percent: p.percent,
...(p.detail ? { detail: p.detail } : null),
});
});
emitZipProgress({ kind: 'import', stage: 'done', percent: 100, detail: 'Готово' });
emitSessionState();
return { canceled: false as const, project };
});
@@ -428,7 +453,16 @@ async function main() {
if (!lower.endsWith('.dnd.zip')) {
dest = lower.endsWith('.zip') ? dest.replace(/\.zip$/iu, '.dnd.zip') : `${dest}.dnd.zip`;
}
await projectStore.exportProjectZipToPath(projectId, dest);
emitZipProgress({ kind: 'export', stage: 'copy', percent: 0, detail: 'Экспорт…' });
await projectStore.exportProjectZipToPath(projectId, dest, (p) => {
emitZipProgress({
kind: 'export',
stage: p.stage,
percent: p.percent,
...(p.detail ? { detail: p.detail } : null),
});
});
emitZipProgress({ kind: 'export', stage: 'done', percent: 100, detail: 'Готово' });
return { canceled: false as const };
});
registerHandler(ipcChannels.project.deleteProject, async ({ projectId }) => {
+2 -1
View File
@@ -14,6 +14,7 @@ void test('collectReferencedAssetIds: превью, видео и аудио', (
scenes: {
s1: {
previewAssetId: 'pr' as AssetId,
previewThumbAssetId: 'th' as AssetId,
media: {
videos: ['v1' as AssetId],
audios: [{ assetId: 'a1' as AssetId, autoplay: true, loop: true }],
@@ -23,7 +24,7 @@ void test('collectReferencedAssetIds: превью, видео и аудио', (
campaignAudios: [{ assetId: 'ca1' as AssetId, autoplay: true, loop: true }],
} as unknown as Project;
const s = collectReferencedAssetIds(p);
assert.deepEqual([...s].sort(), ['a1', 'ca1', 'pr', 'v1'].sort());
assert.deepEqual([...s].sort(), ['a1', 'ca1', 'pr', 'th', 'v1'].sort());
});
void test('reconcileAssetFiles: снимает осиротевшие assets и удаляет файлы', async () => {
+1
View File
@@ -9,6 +9,7 @@ export function collectReferencedAssetIds(p: Project): Set<AssetId> {
const refs = new Set<AssetId>();
for (const sc of Object.values(p.scenes)) {
if (sc.previewAssetId) refs.add(sc.previewAssetId);
if (sc.previewThumbAssetId) refs.add(sc.previewThumbAssetId);
for (const vid of sc.media.videos) refs.add(vid);
for (const au of sc.media.audios) refs.add(au.assetId);
}
@@ -0,0 +1,14 @@
import type { Buffer } from 'node:buffer';
export type OptimizeImageImportResult = {
buffer: Buffer;
mime: string;
ext: string;
width: number;
height: number;
passthrough: boolean;
};
export function optimizeImageBufferVisuallyLossless(
input: Buffer,
): Promise<OptimizeImageImportResult>;
@@ -0,0 +1,218 @@
/**
* Visually lossless re-encode for imported raster images (same pixel dimensions).
* Node-only; shared by the main app and tools/project-converter.
*/
import sharp from 'sharp';
/** @typedef {import('node:buffer').Buffer} Buffer */
/**
* @typedef {Object} OptimizeImageImportResult
* @property {Buffer} buffer
* @property {string} mime
* @property {string} ext filename extension without dot (e.g. webp, jpg, png)
* @property {number} width
* @property {number} height
* @property {boolean} passthrough true if original bytes were kept
*/
const WEBP_EFFORT = 6;
const RASTER_QUALITY = 95;
/**
* @param {Buffer} buf
* @param {import('sharp').Metadata | null} meta
* @returns {OptimizeImageImportResult}
*/
function makePassthrough(buf, meta) {
const fmt = meta?.format;
if (fmt === 'jpeg' || fmt === 'jpg') {
return {
buffer: buf,
mime: 'image/jpeg',
ext: 'jpg',
width: meta?.width ?? 0,
height: meta?.height ?? 0,
passthrough: true,
};
}
if (fmt === 'png') {
return {
buffer: buf,
mime: 'image/png',
ext: 'png',
width: meta?.width ?? 0,
height: meta?.height ?? 0,
passthrough: true,
};
}
if (fmt === 'webp') {
return {
buffer: buf,
mime: 'image/webp',
ext: 'webp',
width: meta?.width ?? 0,
height: meta?.height ?? 0,
passthrough: true,
};
}
if (fmt === 'gif') {
return {
buffer: buf,
mime: 'image/gif',
ext: 'gif',
width: meta?.width ?? 0,
height: meta?.height ?? 0,
passthrough: true,
};
}
if (fmt === 'tiff' || fmt === 'tif') {
return {
buffer: buf,
mime: 'image/tiff',
ext: 'tiff',
width: meta?.width ?? 0,
height: meta?.height ?? 0,
passthrough: true,
};
}
if (fmt === 'bmp') {
return {
buffer: buf,
mime: 'image/bmp',
ext: 'bmp',
width: meta?.width ?? 0,
height: meta?.height ?? 0,
passthrough: true,
};
}
return {
buffer: buf,
mime: 'application/octet-stream',
ext: 'bin',
width: meta?.width ?? 0,
height: meta?.height ?? 0,
passthrough: true,
};
}
/**
* @param {Buffer} outBuf
* @param {number} w0
* @param {number} h0
*/
async function sameDimensionsOrThrow(outBuf, w0, h0) {
const m = await sharp(outBuf).metadata();
if ((m.width ?? 0) !== w0 || (m.height ?? 0) !== h0) {
const err = new Error('encode changed dimensions');
err.code = 'DIM';
throw err;
}
}
/**
* @param {Buffer} src
* @returns {Promise<OptimizeImageImportResult>}
*/
export async function optimizeImageBufferVisuallyLossless(src) {
const input = Buffer.isBuffer(src) ? src : Buffer.from(src);
if (input.length === 0) {
return makePassthrough(input, { width: 0, height: 0, format: 'png' });
}
let meta0;
try {
meta0 = await sharp(input, { failOn: 'error', unlimited: true }).metadata();
} catch {
return makePassthrough(input, null);
}
const width = meta0.width ?? 0;
const height = meta0.height ?? 0;
if (width <= 0 || height <= 0) {
return makePassthrough(input, meta0);
}
const pages = meta0.pages ?? 1;
if (pages > 1) {
return makePassthrough(input, meta0);
}
const rawFmt = meta0.format;
if (rawFmt === 'svg' || rawFmt === 'pdf' || rawFmt === 'heif' || rawFmt === 'jxl' || rawFmt === 'vips') {
return makePassthrough(input, meta0);
}
const hasAlpha = meta0.hasAlpha === true;
try {
if (hasAlpha) {
const webpLo = await sharp(input)
.rotate()
.ensureAlpha()
.webp({ lossless: true, effort: WEBP_EFFORT })
.toBuffer();
const pngOut = await sharp(input)
.rotate()
.ensureAlpha()
.png({ compressionLevel: 9, adaptiveFiltering: true })
.toBuffer();
await sameDimensionsOrThrow(webpLo, width, height);
await sameDimensionsOrThrow(pngOut, width, height);
const pickWebp = webpLo.length <= pngOut.length;
const outBuf = pickWebp ? webpLo : pngOut;
if (outBuf.length >= input.length) {
return makePassthrough(input, meta0);
}
return {
buffer: outBuf,
mime: pickWebp ? 'image/webp' : 'image/png',
ext: pickWebp ? 'webp' : 'png',
width,
height,
passthrough: false,
};
}
const webpColor = await sharp(input)
.rotate()
.webp({
quality: RASTER_QUALITY,
nearLossless: true,
effort: WEBP_EFFORT,
smartSubsample: false,
})
.toBuffer();
const jpegColor = await sharp(input)
.rotate()
.jpeg({
quality: RASTER_QUALITY,
chromaSubsampling: '4:4:4',
mozjpeg: true,
optimizeScans: true,
})
.toBuffer();
await sameDimensionsOrThrow(webpColor, width, height);
await sameDimensionsOrThrow(jpegColor, width, height);
const useWebp = webpColor.length <= jpegColor.length;
const outBuf = useWebp ? webpColor : jpegColor;
if (outBuf.length >= input.length) {
return makePassthrough(input, meta0);
}
return {
buffer: outBuf,
mime: useWebp ? 'image/webp' : 'image/jpeg',
ext: useWebp ? 'webp' : 'jpg',
width,
height,
passthrough: false,
};
} catch (e) {
if (e && typeof e === 'object' && /** @type {{ code?: string }} */ (e).code === 'DIM') {
return makePassthrough(input, meta0);
}
return makePassthrough(input, meta0);
}
}
@@ -0,0 +1,54 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import sharp from 'sharp';
import { optimizeImageBufferVisuallyLossless } from './optimizeImageImport.lib.mjs';
void test('optimizeImageBufferVisuallyLossless: preserves dimensions for opaque RGB', async () => {
const input = await sharp({
create: {
width: 400,
height: 300,
channels: 3,
background: { r: 10, g: 100, b: 200 },
},
})
.png({ compressionLevel: 0 })
.toBuffer();
const out = await optimizeImageBufferVisuallyLossless(input);
assert.equal(out.passthrough, false);
assert.ok(out.buffer.length > 0);
assert.ok(out.buffer.length < input.length);
const meta = await sharp(out.buffer).metadata();
assert.equal(meta.width, 400);
assert.equal(meta.height, 300);
});
void test('optimizeImageBufferVisuallyLossless: preserves dimensions for alpha', async () => {
const input = await sharp({
create: {
width: 200,
height: 200,
channels: 4,
background: { r: 255, g: 0, b: 0, alpha: 0.5 },
},
})
.png({ compressionLevel: 0 })
.toBuffer();
const out = await optimizeImageBufferVisuallyLossless(input);
assert.equal(out.passthrough, false);
assert.ok(out.mime === 'image/webp' || out.mime === 'image/png');
assert.ok(out.buffer.length < input.length);
const meta = await sharp(out.buffer).metadata();
assert.equal(meta.width, 200);
assert.equal(meta.height, 200);
assert.equal(meta.hasAlpha, true);
});
void test('optimizeImageBufferVisuallyLossless: non-image buffer is passthrough', async () => {
const out = await optimizeImageBufferVisuallyLossless(Buffer.from('not an image'));
assert.equal(out.passthrough, true);
});
@@ -0,0 +1,35 @@
import assert from 'node:assert/strict';
import fs from 'node:fs/promises';
import os from 'node:os';
import path from 'node:path';
import test from 'node:test';
import sharp from 'sharp';
import { generateScenePreviewThumbnailBytes, SCENE_PREVIEW_THUMB_MAX_PX } from './scenePreviewThumbnail';
void test('generateScenePreviewThumbnailBytes: image scales to max edge', async () => {
const tmp = await fs.mkdtemp(path.join(os.tmpdir(), 'dnd-thumb-test-'));
const src = path.join(tmp, 'wide.png');
await sharp({
create: {
width: 800,
height: 400,
channels: 3,
background: { r: 200, g: 40, b: 40 },
},
})
.png()
.toFile(src);
const buf = await generateScenePreviewThumbnailBytes(src, 'image');
assert.ok(buf !== null);
assert.ok(buf.length > 0);
const meta = await sharp(buf).metadata();
assert.equal(typeof meta.width, 'number');
assert.equal(typeof meta.height, 'number');
assert.ok(meta.width > 0 && meta.height > 0);
assert.ok(Math.max(meta.width, meta.height) <= SCENE_PREVIEW_THUMB_MAX_PX);
await fs.rm(tmp, { recursive: true, force: true });
});
+86
View File
@@ -0,0 +1,86 @@
import { execFile } from 'node:child_process';
import fs from 'node:fs/promises';
import os from 'node:os';
import path from 'node:path';
import { promisify } from 'node:util';
import ffmpegStatic from 'ffmpeg-static';
import sharp from 'sharp';
const execFileAsync = promisify(execFile);
/** Longest edge; presentation uses the original asset. */
export const SCENE_PREVIEW_THUMB_MAX_PX = 320;
/**
* Builds a small WebP still for graph/list previews. Returns null if generation fails (import still succeeds).
*/
export async function generateScenePreviewThumbnailBytes(
sourceAbsPath: string,
kind: 'image' | 'video',
): Promise<Buffer | null> {
try {
if (kind === 'image') {
return await sharp(sourceAbsPath)
.rotate()
.resize(SCENE_PREVIEW_THUMB_MAX_PX, SCENE_PREVIEW_THUMB_MAX_PX, {
fit: 'inside',
withoutEnlargement: true,
})
.webp({ quality: 82 })
.toBuffer();
}
const ffmpegPath = ffmpegStatic;
if (!ffmpegPath) return null;
const tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'dnd-thumb-'));
const tmpPng = path.join(tmpDir, 'frame.png');
try {
const seekSeconds = ['0.5', '0.25', '0'];
let extracted = false;
for (const ss of seekSeconds) {
await fs.rm(tmpPng, { force: true }).catch(() => undefined);
try {
await execFileAsync(
ffmpegPath,
[
'-hide_banner',
'-loglevel',
'error',
'-y',
'-ss',
ss,
'-i',
sourceAbsPath,
'-frames:v',
'1',
tmpPng,
],
{ maxBuffer: 16 * 1024 * 1024 },
);
const st = await fs.stat(tmpPng).catch(() => null);
if (st !== null && st.isFile() && st.size > 0) {
extracted = true;
break;
}
} catch {
/* try next seek */
}
}
if (!extracted) return null;
return await sharp(tmpPng)
.resize(SCENE_PREVIEW_THUMB_MAX_PX, SCENE_PREVIEW_THUMB_MAX_PX, {
fit: 'inside',
withoutEnlargement: true,
})
.webp({ quality: 82 })
.toBuffer();
} finally {
await fs.rm(tmpDir, { recursive: true, force: true }).catch(() => undefined);
}
} catch {
return null;
}
}
+15 -1
View File
@@ -5,12 +5,18 @@ import yauzl from 'yauzl';
import type { Project } from '../../shared/types';
export function unzipToDir(zipPath: string, outDir: string): Promise<void> {
export function unzipToDir(
zipPath: string,
outDir: string,
onProgress?: (done: number, total: number) => void,
): Promise<void> {
return new Promise((resolve, reject) => {
yauzl.open(zipPath, { lazyEntries: true }, (err, zip) => {
if (err) return reject(err);
const zipFile = zip;
let settled = false;
const total = zipFile.entryCount || 0;
let done = 0;
const safeClose = (): void => {
try {
@@ -37,6 +43,14 @@ export function unzipToDir(zipPath: string, outDir: string): Promise<void> {
zipFile.readEntry();
zipFile.on('entry', (entry: yauzl.Entry) => {
if (settled) return;
done += 1;
if (onProgress && total > 0) {
try {
onProgress(done, total);
} catch {
// ignore
}
}
const filePath = path.join(outDir, entry.fileName);
if (entry.fileName.endsWith('/')) {
fssync.mkdirSync(filePath, { recursive: true });
@@ -39,3 +39,9 @@ void test('zipStore: exportProjectZipToPath flushes saveNow for currently open p
assert.match(src, /if \(this\.openProject\?\.id === projectId\)\s*\{\s*await this\.saveNow\(\);\s*\}/);
assert.match(src, /await fs\.copyFile\(src, dest\)/);
});
void test('zipStore: normalizeScene defaults previewThumbAssetId for older projects', () => {
const src = fs.readFileSync(path.join(here, 'zipStore.ts'), 'utf8');
assert.match(src, /previewThumbAssetId/);
assert.match(src, /function normalizeScene\(/);
});
+174 -23
View File
@@ -25,7 +25,9 @@ import { getAppSemanticVersion } from '../versionInfo';
import { reconcileAssetFiles } from './assetPrune';
import { rmWithRetries } from './fsRetry';
import { optimizeImageBufferVisuallyLossless } from './optimizeImageImport.lib.mjs';
import { getLegacyProjectsRootDirs, getProjectsCacheRootDir, getProjectsRootDir } from './paths';
import { generateScenePreviewThumbnailBytes } from './scenePreviewThumbnail';
import { readProjectJsonFromZip, unzipToDir } from './yauzlProjectZip';
type ProjectIndexEntry = {
@@ -213,6 +215,44 @@ export class ZipProjectStore {
return project;
}
private async openProjectByIdWithProgress(
projectId: ProjectId,
onUnzipPercent: (pct: number) => void,
): Promise<Project> {
await this.ensureRoots();
// Mutations are persisted to cache immediately, but zip packing is debounced (queueSave).
// When switching projects we delete the cache and restore it from the zip, so flush pending saves first.
if (this.openProject) {
await this.saveNow();
}
this.projectSession += 1;
const list = await this.listProjects();
const entry = list.find((p) => p.id === projectId);
if (!entry) {
throw new Error('Project not found');
}
const zipPath = path.join(getProjectsRootDir(), entry.fileName);
const cacheDir = path.join(getProjectsCacheRootDir(), projectId);
await fs.rm(cacheDir, { recursive: true, force: true });
await fs.mkdir(cacheDir, { recursive: true });
await unzipToDir(zipPath, cacheDir, (done, total) => {
const pct = total > 0 ? Math.round((done / total) * 100) : 0;
onUnzipPercent(Math.max(0, Math.min(100, pct)));
});
const projectPath = path.join(cacheDir, 'project.json');
const projectRaw = await fs.readFile(projectPath, 'utf8');
const parsed = JSON.parse(projectRaw) as unknown as Project;
const project = normalizeProject(parsed);
const fileBaseName = entry.fileName.replace(/\.dnd\.zip$/iu, '');
project.meta.fileBaseName = project.meta.fileBaseName.trim().length
? project.meta.fileBaseName
: fileBaseName;
this.openProject = { id: projectId, zipPath, cacheDir, projectPath, project };
return project;
}
getOpenProject(): Project | null {
return this.openProject?.project ?? null;
}
@@ -245,35 +285,78 @@ export class ZipProjectStore {
const sc = open.project.scenes[sceneId];
if (!sc) throw new Error('Scene not found');
const kind = classifyMediaPath(filePath);
if (kind?.type !== 'image' && kind?.type !== 'video') {
const kind0 = classifyMediaPath(filePath);
if (!kind0 || (kind0.type !== 'image' && kind0.type !== 'video')) {
throw new Error('Файл превью должен быть изображением или видео');
}
let kind: MediaKind = kind0;
const buf = await fs.readFile(filePath);
const sha256 = crypto.createHash('sha256').update(buf).digest('hex');
const id = asAssetId(this.randomId());
const orig = path.basename(filePath);
const safeOrig = sanitizeFileName(orig);
const relPath = `assets/${id}_${safeOrig}`;
const abs = path.join(open.cacheDir, relPath);
let safeOrig = sanitizeFileName(orig);
let relPath = `assets/${id}_${safeOrig}`;
let abs = path.join(open.cacheDir, relPath);
let writeBuf = buf;
let storedOrig = orig;
if (kind.type === 'image') {
const opt = await optimizeImageBufferVisuallyLossless(buf);
if (!opt.passthrough) {
writeBuf = Buffer.from(opt.buffer);
kind = { type: 'image', mime: opt.mime };
safeOrig = sanitizeFileName(`${path.parse(orig).name}.${opt.ext}`);
relPath = `assets/${id}_${safeOrig}`;
abs = path.join(open.cacheDir, relPath);
storedOrig = `${path.parse(orig).name}.${opt.ext}`;
}
}
const sha256 = crypto.createHash('sha256').update(writeBuf).digest('hex');
await fs.mkdir(path.dirname(abs), { recursive: true });
await fs.copyFile(filePath, abs);
const asset = buildMediaAsset(id, kind, orig, relPath, sha256, buf.length);
await fs.writeFile(abs, writeBuf);
const asset = buildMediaAsset(id, kind, storedOrig, relPath, sha256, writeBuf.length);
const thumbKind = kind.type === 'image' ? 'image' : 'video';
const thumbBytes = await generateScenePreviewThumbnailBytes(abs, thumbKind);
let thumbAsset: MediaAsset | null = null;
let thumbId: AssetId | null = null;
if (thumbBytes !== null && thumbBytes.length > 0) {
thumbId = asAssetId(this.randomId());
const thumbRelPath = `assets/${thumbId}_preview_thumb.webp`;
const thumbAbs = path.join(open.cacheDir, thumbRelPath);
await fs.writeFile(thumbAbs, thumbBytes);
const thumbSha = crypto.createHash('sha256').update(thumbBytes).digest('hex');
const thumbOrigName = `${path.parse(safeOrig).name}_preview_thumb.webp`;
thumbAsset = buildMediaAsset(
thumbId,
{ type: 'image', mime: 'image/webp' },
thumbOrigName,
thumbRelPath,
thumbSha,
thumbBytes.length,
);
}
const oldPreviewId = sc.previewAssetId;
const oldThumbId = sc.previewThumbAssetId ?? null;
await this.updateProject((p) => {
const scene = p.scenes[sceneId];
if (!scene) throw new Error('Scene not found');
let assets: Record<AssetId, MediaAsset> = { ...p.assets };
if (oldPreviewId) {
assets = Object.fromEntries(Object.entries(assets).filter(([k]) => k !== oldPreviewId)) as Record<
AssetId,
MediaAsset
>;
const drop = new Set<AssetId>();
if (oldPreviewId) drop.add(oldPreviewId);
if (oldThumbId) drop.add(oldThumbId);
if (drop.size > 0) {
assets = Object.fromEntries(
Object.entries(assets).filter(([k]) => !drop.has(k as AssetId)),
) as Record<AssetId, MediaAsset>;
}
assets[id] = asset;
if (thumbAsset !== null && thumbId !== null) {
assets[thumbId] = thumbAsset;
}
return {
...p,
assets,
@@ -283,6 +366,7 @@ export class ZipProjectStore {
...scene,
previewAssetId: id,
previewAssetType: kind.type,
previewThumbAssetId: thumbId,
previewVideoAutostart: kind.type === 'video' ? scene.previewVideoAutostart : false,
},
},
@@ -300,14 +384,17 @@ export class ZipProjectStore {
const sc = open.project.scenes[sceneId];
if (!sc) throw new Error('Scene not found');
const oldId = sc.previewAssetId;
if (!oldId) {
const oldThumbId = sc.previewThumbAssetId ?? null;
if (!oldId && !oldThumbId) {
return open.project;
}
await this.updateProject((p) => {
const assets = Object.fromEntries(Object.entries(p.assets).filter(([k]) => k !== oldId)) as Record<
AssetId,
MediaAsset
>;
const drop = new Set<AssetId>();
if (oldId) drop.add(oldId);
if (oldThumbId) drop.add(oldThumbId);
const assets = Object.fromEntries(
Object.entries(p.assets).filter(([k]) => !drop.has(k as AssetId)),
) as Record<AssetId, MediaAsset>;
return {
...p,
assets,
@@ -317,6 +404,7 @@ export class ZipProjectStore {
...p.scenes[sceneId],
previewAssetId: null,
previewAssetType: null,
previewThumbAssetId: null,
previewVideoAutostart: false,
},
},
@@ -355,6 +443,7 @@ export class ZipProjectStore {
layout: { x: 0, y: 0 },
previewAssetId: null,
previewAssetType: null,
previewThumbAssetId: null,
previewVideoAutostart: false,
previewRotationDeg: 0,
} satisfies Scene);
@@ -365,6 +454,9 @@ export class ZipProjectStore {
...(patch.description !== undefined ? { description: patch.description } : null),
...(patch.previewAssetId !== undefined ? { previewAssetId: patch.previewAssetId } : null),
...(patch.previewAssetType !== undefined ? { previewAssetType: patch.previewAssetType } : null),
...(patch.previewThumbAssetId !== undefined
? { previewThumbAssetId: patch.previewThumbAssetId }
: null),
...(patch.previewVideoAutostart !== undefined
? { previewVideoAutostart: patch.previewVideoAutostart }
: null),
@@ -784,7 +876,10 @@ export class ZipProjectStore {
* Если архив уже лежит в `projects`, только открывает.
* При конфликте `id` с другим файлом перезаписывает `project.json` в копии с новым id.
*/
async importProjectFromExternalZip(sourcePath: string): Promise<Project> {
async importProjectFromExternalZip(
sourcePath: string,
onProgress?: (p: { stage: 'copy' | 'unzip' | 'done'; percent: number; detail?: string }) => void,
): Promise<Project> {
await this.ensureRoots();
const resolved = path.resolve(sourcePath);
const st = await fs.stat(resolved).catch(() => null);
@@ -809,7 +904,12 @@ export class ZipProjectStore {
} else {
destFileName = await uniqueDndZipFileName(root, baseName);
destPath = path.join(root, destFileName);
await fs.copyFile(resolved, destPath);
if (onProgress) onProgress({ stage: 'copy', percent: 1, detail: 'Копирование…' });
await copyFileWithProgress(resolved, destPath, (pct) => {
if (!onProgress) return;
// Copy is ~70% of the operation; unzip/open happens after.
onProgress({ stage: 'copy', percent: Math.max(1, Math.min(70, pct)), detail: 'Копирование…' });
});
}
let project = await readProjectJsonFromZip(destPath);
@@ -832,11 +932,19 @@ export class ZipProjectStore {
}
this.projectSession += 1;
return this.openProjectById(project.id);
const opened = await this.openProjectByIdWithProgress(project.id, (pct) => {
if (onProgress) onProgress({ stage: 'unzip', percent: pct, detail: 'Распаковка…' });
});
if (onProgress) onProgress({ stage: 'done', percent: 100, detail: 'Готово' });
return opened;
}
/** Копия файла проекта в указанный путь (полный путь к `.dnd.zip`). */
async exportProjectZipToPath(projectId: ProjectId, destinationPath: string): Promise<void> {
async exportProjectZipToPath(
projectId: ProjectId,
destinationPath: string,
onProgress?: (p: { stage: 'copy' | 'done'; percent: number; detail?: string }) => void,
): Promise<void> {
await this.ensureRoots();
// If exporting the currently open project, make sure pending debounced pack is flushed.
if (this.openProject?.id === projectId) {
@@ -850,7 +958,11 @@ export class ZipProjectStore {
const src = path.join(getProjectsRootDir(), entry.fileName);
const dest = path.resolve(destinationPath);
await fs.mkdir(path.dirname(dest), { recursive: true });
await fs.copyFile(src, dest);
if (onProgress) onProgress({ stage: 'copy', percent: 1, detail: 'Копирование…' });
await copyFileWithProgress(src, dest, (pct) => {
if (onProgress) onProgress({ stage: 'copy', percent: pct, detail: 'Копирование…' });
});
if (onProgress) onProgress({ stage: 'done', percent: 100, detail: 'Готово' });
}
/** Удаляет архив проекта и кэш распаковки с диска. Если проект открыт — сбрасывает сессию. */
@@ -993,6 +1105,8 @@ function normalizeScene(s: Scene): Scene {
(s as unknown as { previewVideoAutostostart?: boolean; previewVideoAutostart?: boolean })
.previewVideoAutostart,
);
const previewThumbAssetId =
(s as unknown as { previewThumbAssetId?: AssetId | null }).previewThumbAssetId ?? null;
const rawAudios = Array.isArray(raw.audios) ? raw.audios : [];
const audios = rawAudios
@@ -1017,6 +1131,7 @@ function normalizeScene(s: Scene): Scene {
...s,
previewAssetId: previewAssetId ?? null,
previewAssetType,
previewThumbAssetId,
previewVideoAutostart,
previewRotationDeg,
layout: layoutIn ?? { x: 0, y: 0 },
@@ -1125,6 +1240,40 @@ async function replaceFileAtomic(srcPath: string, destPath: string): Promise<voi
}
}
async function copyFileWithProgress(
src: string,
dest: string,
onPercent: (pct: number) => void,
): Promise<void> {
const st = await fs.stat(src);
const total = st.size || 0;
if (total <= 0) {
await fs.copyFile(src, dest);
onPercent(100);
return;
}
await fs.mkdir(path.dirname(dest), { recursive: true });
await new Promise<void>((resolve, reject) => {
let done = 0;
const rs = fssync.createReadStream(src);
const ws = fssync.createWriteStream(dest);
const onErr = (e: unknown) => reject(e instanceof Error ? e : new Error(String(e)));
rs.on('error', onErr);
ws.on('error', onErr);
rs.on('data', (chunk: Buffer) => {
done += chunk.length;
const pct = Math.round((done / total) * 100);
try {
onPercent(Math.max(0, Math.min(100, pct)));
} catch {
// ignore
}
});
ws.on('close', () => resolve());
rs.pipe(ws);
});
}
type MediaKind = { type: MediaAssetType; mime: string };
function classifyMediaPath(filePath: string): MediaKind | null {
@@ -1139,6 +1288,8 @@ function classifyMediaPath(filePath: string): MediaKind | null {
return { type: 'image', mime: 'image/webp' };
case '.gif':
return { type: 'image', mime: 'image/gif' };
case '.bmp':
return { type: 'image', mime: 'image/bmp' };
case '.mp4':
return { type: 'video', mime: 'video/mp4' };
case '.webm':