diff --git a/src/modals/ImportExportModal.js b/src/modals/ImportExportModal.js index f57e3f7..cb134d2 100644 --- a/src/modals/ImportExportModal.js +++ b/src/modals/ImportExportModal.js @@ -206,11 +206,12 @@ function ImportExportModal({ isOpen, onRequestClose }) { const tokenIds = checkedTokens.map((token) => token.id); try { - const blob = await worker.exportData( + const buffer = await worker.exportData( Comlink.proxy(handleDBProgress), mapIds, tokenIds ); + const blob = new Blob([buffer]); saveAs(blob, `${shortid.generate()}.owlbear`); addSuccessToast("Exported", checkedMaps, checkedTokens); } catch (e) { diff --git a/src/workers/DatabaseWorker.js b/src/workers/DatabaseWorker.js index bb47f64..28d6c9c 100644 --- a/src/workers/DatabaseWorker.js +++ b/src/workers/DatabaseWorker.js @@ -7,6 +7,7 @@ import { import { encode, decode } from "@msgpack/msgpack"; import { getDatabase } from "../database"; +import blobToBuffer from "../helpers/blobToBuffer"; // Worker to load large amounts of database data on a separate thread let service = { @@ -92,7 +93,10 @@ let service = { numRowsPerChunk: 1, prettyJson: true, }); - return data; + + const buffer = await blobToBuffer(data); + + return Comlink.transfer(buffer, [buffer.buffer]); }, /**