2020-11-26 00:29:10 -05:00
|
|
|
import * as Comlink from "comlink";
|
2021-02-13 05:51:31 -05:00
|
|
|
import {
|
|
|
|
importInto,
|
|
|
|
exportDB,
|
|
|
|
peakImportFile,
|
|
|
|
} from "@mitchemmc/dexie-export-import";
|
2021-07-16 03:31:55 -04:00
|
|
|
import { ExportProgress } from "@mitchemmc/dexie-export-import/dist/export";
|
2021-03-16 02:49:50 -04:00
|
|
|
import { encode, decode } from "@msgpack/msgpack";
|
2020-11-26 00:29:10 -05:00
|
|
|
|
|
|
|
import { getDatabase } from "../database";
|
2021-04-15 01:18:47 -04:00
|
|
|
import blobToBuffer from "../helpers/blobToBuffer";
|
2021-07-16 03:31:55 -04:00
|
|
|
|
|
|
|
import { Map } from "../types/Map";
|
|
|
|
import { Token } from "../types/Token";
|
2022-04-01 02:07:38 -04:00
|
|
|
import { Asset } from "../types/Asset";
|
2021-05-27 03:13:14 -04:00
|
|
|
|
|
|
|
type ProgressCallback = (progress: ExportProgress) => boolean;
|
2020-11-26 00:29:10 -05:00
|
|
|
|
|
|
|
// Worker to load large amounts of database data on a separate thread
|
2021-01-27 00:24:13 -05:00
|
|
|
let service = {
|
2021-01-21 22:59:05 -05:00
|
|
|
/**
|
|
|
|
* Load either a whole table or individual item from the DB
|
|
|
|
* @param {string} table Table to load from
|
2021-02-08 00:53:56 -05:00
|
|
|
* @param {string=} key Optional database key to load, if undefined whole table will be loaded
|
2021-01-21 22:59:05 -05:00
|
|
|
*/
|
2021-07-16 03:31:55 -04:00
|
|
|
async loadData<T>(table: string, key?: string): Promise<Uint8Array> {
|
2020-11-26 01:08:09 -05:00
|
|
|
try {
|
|
|
|
let db = getDatabase({});
|
2021-01-21 22:59:05 -05:00
|
|
|
if (key) {
|
|
|
|
// Load specific item
|
2021-02-13 05:51:31 -05:00
|
|
|
const data = await db.table(table).get(key);
|
2021-04-22 02:53:35 -04:00
|
|
|
const packed = encode(data);
|
|
|
|
return Comlink.transfer(packed, [packed.buffer]);
|
2021-01-21 22:59:05 -05:00
|
|
|
} else {
|
|
|
|
// Load entire table
|
2021-07-16 03:31:55 -04:00
|
|
|
let items: T[] = [];
|
2021-01-21 22:59:05 -05:00
|
|
|
// Use a cursor instead of toArray to prevent IPC max size error
|
2022-04-01 02:07:38 -04:00
|
|
|
await db.table(table).each((item: any) => {
|
2021-04-22 02:53:35 -04:00
|
|
|
items.push(item);
|
2021-02-08 00:53:56 -05:00
|
|
|
});
|
2021-01-27 00:24:13 -05:00
|
|
|
|
|
|
|
// Pack data with msgpack so we can use transfer to avoid memory issues
|
|
|
|
const packed = encode(items);
|
|
|
|
return Comlink.transfer(packed, [packed.buffer]);
|
2021-01-21 22:59:05 -05:00
|
|
|
}
|
2021-05-27 03:13:14 -04:00
|
|
|
} catch {
|
2021-07-16 03:31:55 -04:00
|
|
|
throw new Error("Unable to load database");
|
2021-05-27 03:13:14 -04:00
|
|
|
}
|
2020-11-26 00:29:10 -05:00
|
|
|
},
|
2021-01-26 19:57:23 -05:00
|
|
|
|
2021-03-16 02:49:50 -04:00
|
|
|
/**
|
|
|
|
* Put data into table encoded by msgpack
|
|
|
|
* @param {Uint8Array} data
|
|
|
|
* @param {string} table
|
|
|
|
*/
|
2021-07-16 00:55:33 -04:00
|
|
|
async putData(data: Uint8Array, table: string): Promise<boolean> {
|
2021-03-16 02:49:50 -04:00
|
|
|
try {
|
|
|
|
let db = getDatabase({});
|
|
|
|
const decoded = decode(data);
|
2021-04-29 01:44:57 -04:00
|
|
|
await db.table(table).put(decoded);
|
2021-03-16 02:49:50 -04:00
|
|
|
return true;
|
|
|
|
} catch {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
},
|
|
|
|
|
2021-01-26 19:57:23 -05:00
|
|
|
/**
|
|
|
|
* Export current database
|
|
|
|
* @param {function} progressCallback
|
2021-04-30 00:05:12 -04:00
|
|
|
* @param {string[]} mapIds An array of map ids to export
|
|
|
|
* @param {string[]} tokenIds An array of token ids to export
|
2021-01-26 19:57:23 -05:00
|
|
|
*/
|
2021-07-16 03:31:55 -04:00
|
|
|
async exportData(
|
|
|
|
progressCallback: ProgressCallback,
|
|
|
|
mapIds: string[],
|
|
|
|
tokenIds: string[]
|
|
|
|
) {
|
2021-02-12 21:21:13 -05:00
|
|
|
let db = getDatabase({});
|
2021-02-13 05:51:31 -05:00
|
|
|
|
2021-04-30 00:05:12 -04:00
|
|
|
// Add assets for selected maps and tokens
|
2022-04-01 02:07:38 -04:00
|
|
|
const maps: Map[] = await db
|
2021-07-16 03:31:55 -04:00
|
|
|
.table<Map>("maps")
|
|
|
|
.where("id")
|
|
|
|
.anyOf(mapIds)
|
|
|
|
.toArray();
|
2021-04-30 00:05:12 -04:00
|
|
|
const tokens = await db
|
2021-07-16 03:31:55 -04:00
|
|
|
.table<Token>("tokens")
|
2021-04-30 00:05:12 -04:00
|
|
|
.where("id")
|
|
|
|
.anyOf(tokenIds)
|
|
|
|
.toArray();
|
2021-07-16 03:31:55 -04:00
|
|
|
const assetIds: string[] = [];
|
2021-04-30 00:05:12 -04:00
|
|
|
for (let map of maps) {
|
2021-06-11 01:21:08 -04:00
|
|
|
if (map.type === "file") {
|
|
|
|
assetIds.push(map.file);
|
|
|
|
assetIds.push(map.thumbnail);
|
|
|
|
for (let res of Object.values(map.resolutions)) {
|
2021-07-17 04:39:49 -04:00
|
|
|
res && assetIds.push(res);
|
2021-06-11 01:21:08 -04:00
|
|
|
}
|
2021-04-30 00:05:12 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
for (let token of tokens) {
|
2021-06-11 01:21:08 -04:00
|
|
|
if (token.type === "file") {
|
|
|
|
assetIds.push(token.file);
|
|
|
|
assetIds.push(token.thumbnail);
|
|
|
|
}
|
2021-04-30 00:05:12 -04:00
|
|
|
}
|
|
|
|
|
2021-07-16 03:31:55 -04:00
|
|
|
const filter = (table: string, value: any) => {
|
2021-02-13 05:51:31 -05:00
|
|
|
if (table === "maps") {
|
2021-04-30 00:05:12 -04:00
|
|
|
return mapIds.includes(value.id);
|
2021-02-13 05:51:31 -05:00
|
|
|
}
|
|
|
|
if (table === "states") {
|
2021-04-30 00:05:12 -04:00
|
|
|
return mapIds.includes(value.mapId);
|
2021-02-13 05:51:31 -05:00
|
|
|
}
|
|
|
|
if (table === "tokens") {
|
2021-04-30 00:05:12 -04:00
|
|
|
return tokenIds.includes(value.id);
|
|
|
|
}
|
|
|
|
if (table === "assets") {
|
|
|
|
return assetIds.includes(value.id);
|
2021-02-13 05:51:31 -05:00
|
|
|
}
|
2021-06-11 01:21:08 -04:00
|
|
|
// Always include groups table
|
|
|
|
if (table === "groups") {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2021-02-13 05:51:31 -05:00
|
|
|
return false;
|
|
|
|
};
|
|
|
|
|
2022-04-03 21:57:43 -04:00
|
|
|
const data = await exportDB(db as any, {
|
2021-02-12 21:21:13 -05:00
|
|
|
progressCallback,
|
2021-02-13 05:51:31 -05:00
|
|
|
filter,
|
2021-02-12 21:21:13 -05:00
|
|
|
numRowsPerChunk: 1,
|
2021-02-13 18:11:52 -05:00
|
|
|
prettyJson: true,
|
2021-02-12 21:21:13 -05:00
|
|
|
});
|
2021-04-15 01:18:47 -04:00
|
|
|
|
|
|
|
const buffer = await blobToBuffer(data);
|
|
|
|
|
|
|
|
return Comlink.transfer(buffer, [buffer.buffer]);
|
2021-01-26 19:57:23 -05:00
|
|
|
},
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Import into current database
|
|
|
|
* @param {Blob} data
|
2021-02-13 05:51:31 -05:00
|
|
|
* @param {string} databaseName The name of the database to import into
|
2021-05-27 03:13:14 -04:00
|
|
|
* @param {ProgressCallback} progressCallback
|
2021-01-26 19:57:23 -05:00
|
|
|
*/
|
2021-07-16 00:55:33 -04:00
|
|
|
async importData(
|
|
|
|
data: Blob,
|
|
|
|
databaseName: string,
|
|
|
|
progressCallback: ProgressCallback
|
|
|
|
) {
|
2021-02-13 05:51:31 -05:00
|
|
|
const importMeta = await peakImportFile(data);
|
2021-02-14 03:17:14 -05:00
|
|
|
if (!importMeta.data) {
|
|
|
|
throw new Error("Uanble to parse file");
|
|
|
|
}
|
|
|
|
|
2021-02-12 21:21:13 -05:00
|
|
|
let db = getDatabase({});
|
2021-02-13 05:51:31 -05:00
|
|
|
|
|
|
|
if (importMeta.data.databaseName !== db.name) {
|
|
|
|
throw new Error("Unable to import database, name mismatch");
|
|
|
|
}
|
2021-02-13 21:36:00 -05:00
|
|
|
if (importMeta.data.databaseVersion > db.verno) {
|
|
|
|
throw new Error(
|
|
|
|
`Database version differs. Current database is in version ${db.verno} but export is ${importMeta.data.databaseVersion}`
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Ensure import DB is cleared before importing new data
|
2021-03-17 21:57:41 -04:00
|
|
|
let importDB = getDatabase({ addons: [] }, databaseName, 0);
|
2021-02-13 21:36:00 -05:00
|
|
|
await importDB.delete();
|
|
|
|
importDB.close();
|
2021-02-13 05:51:31 -05:00
|
|
|
|
2021-02-13 21:36:00 -05:00
|
|
|
// Load import database up to it's desired version
|
2021-03-17 21:57:41 -04:00
|
|
|
importDB = getDatabase(
|
|
|
|
{ addons: [] },
|
|
|
|
databaseName,
|
2021-04-30 22:54:00 -04:00
|
|
|
importMeta.data.databaseVersion,
|
|
|
|
false
|
2021-03-17 21:57:41 -04:00
|
|
|
);
|
2022-04-03 21:57:43 -04:00
|
|
|
await importInto(importDB as any, data, {
|
2021-02-13 05:51:31 -05:00
|
|
|
progressCallback,
|
|
|
|
acceptNameDiff: true,
|
|
|
|
overwriteValues: true,
|
|
|
|
filter: (table, value) => {
|
|
|
|
// Ensure values are of the correct form
|
|
|
|
if (table === "maps" || table === "tokens") {
|
|
|
|
return "id" in value && "owner" in value;
|
|
|
|
}
|
|
|
|
if (table === "states") {
|
|
|
|
return "mapId" in value;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
},
|
2021-02-13 21:36:00 -05:00
|
|
|
acceptVersionDiff: true,
|
2021-02-13 05:51:31 -05:00
|
|
|
});
|
|
|
|
importDB.close();
|
2021-01-26 19:57:23 -05:00
|
|
|
},
|
2021-04-29 21:56:37 -04:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Ensure the asset cache doesn't go over `maxCacheSize` by removing cached assets
|
|
|
|
* Removes largest assets first
|
|
|
|
* @param {number} maxCacheSize Max size of cache in bytes
|
|
|
|
*/
|
2021-07-16 03:31:55 -04:00
|
|
|
async cleanAssetCache(maxCacheSize: number) {
|
2021-04-29 21:56:37 -04:00
|
|
|
try {
|
|
|
|
let db = getDatabase({});
|
|
|
|
const userId = (await db.table("user").get("userId")).value;
|
2021-06-27 18:25:54 -04:00
|
|
|
|
2021-07-16 03:31:55 -04:00
|
|
|
const assetSizes: { id: string; size: number }[] = [];
|
2021-06-27 18:25:54 -04:00
|
|
|
await db
|
2022-04-03 21:57:43 -04:00
|
|
|
.table("assets")
|
2021-04-29 21:56:37 -04:00
|
|
|
.where("owner")
|
|
|
|
.notEqual(userId)
|
2022-04-01 02:07:38 -04:00
|
|
|
.each((asset: Asset) => {
|
2021-06-27 18:25:54 -04:00
|
|
|
assetSizes.push({ id: asset.id, size: asset.file.byteLength });
|
|
|
|
});
|
|
|
|
const totalSize = assetSizes.reduce((acc, cur) => acc + cur.size, 0);
|
2021-04-29 21:56:37 -04:00
|
|
|
if (totalSize > maxCacheSize) {
|
|
|
|
// Remove largest assets first
|
2021-06-27 18:25:54 -04:00
|
|
|
const largestAssets = assetSizes.sort((a, b) => b.size - a.size);
|
2021-04-29 21:56:37 -04:00
|
|
|
let assetsToDelete = [];
|
|
|
|
let deletedBytes = 0;
|
|
|
|
for (let asset of largestAssets) {
|
|
|
|
assetsToDelete.push(asset.id);
|
2021-06-27 18:25:54 -04:00
|
|
|
deletedBytes += asset.size;
|
2021-04-29 21:56:37 -04:00
|
|
|
if (totalSize - deletedBytes < maxCacheSize) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2022-04-03 21:57:43 -04:00
|
|
|
await db.table("assets").bulkDelete(assetsToDelete);
|
2021-04-29 21:56:37 -04:00
|
|
|
}
|
|
|
|
} catch {}
|
|
|
|
},
|
2020-11-26 00:29:10 -05:00
|
|
|
};
|
|
|
|
|
2021-07-16 00:55:33 -04:00
|
|
|
export type DatabaseWorkerService = typeof service;
|
|
|
|
|
2021-01-27 00:24:13 -05:00
|
|
|
Comlink.expose(service);
|