diff --git a/src/contexts/MapDataContext.js b/src/contexts/MapDataContext.js index 2119e23..f81459d 100644 --- a/src/contexts/MapDataContext.js +++ b/src/contexts/MapDataContext.js @@ -1,4 +1,10 @@ -import React, { useEffect, useState, useContext } from "react"; +import React, { + useEffect, + useState, + useContext, + useCallback, + useRef, +} from "react"; import * as Comlink from "comlink"; import AuthContext from "./AuthContext"; @@ -26,6 +32,8 @@ const defaultMapState = { notes: {}, }; +const worker = Comlink.wrap(new DatabaseWorker()); + export function MapDataProvider({ children }) { const { database, databaseStatus } = useContext(DatabaseContext); const { userId } = useContext(AuthContext); @@ -65,7 +73,6 @@ export function MapDataProvider({ children }) { } async function loadMaps() { - const worker = Comlink.wrap(new DatabaseWorker()); await worker.loadData("maps"); const storedMaps = await worker.data; const sortedMaps = storedMaps.sort((a, b) => b.created - a.created); @@ -80,139 +87,28 @@ export function MapDataProvider({ children }) { loadMaps(); }, [userId, database, databaseStatus]); - /** - * Adds a map to the database, also adds an assosiated state for that map - * @param {Object} map map to add - */ - async function addMap(map) { - await database.table("maps").add(map); - const state = { ...defaultMapState, mapId: map.id }; - await database.table("states").add(state); - setMaps((prevMaps) => [map, ...prevMaps]); - setMapStates((prevStates) => [state, ...prevStates]); - if (map.owner !== userId) { - await updateCache(); - } - } + const mapsRef = useRef(maps); + useEffect(() => { + mapsRef.current = maps; + }, [maps]); - async function removeMap(id) { - await database.table("maps").delete(id); - await database.table("states").delete(id); - setMaps((prevMaps) => { - const filtered = prevMaps.filter((map) => map.id !== id); - return filtered; - }); - setMapStates((prevMapsStates) => { - const filtered = prevMapsStates.filter((state) => state.mapId !== id); - return filtered; - }); - } + const getMap = useCallback((mapId) => { + return mapsRef.current.find((map) => map.id === mapId); + }, []); - async function removeMaps(ids) { - await database.table("maps").bulkDelete(ids); - await database.table("states").bulkDelete(ids); - setMaps((prevMaps) => { - const filtered = prevMaps.filter((map) => !ids.includes(map.id)); - return filtered; - }); - setMapStates((prevMapsStates) => { - const filtered = prevMapsStates.filter( - (state) => !ids.includes(state.mapId) - ); - return filtered; - }); - } - - async function resetMap(id) { - const state = { ...defaultMapState, mapId: id }; - await database.table("states").put(state); - setMapStates((prevMapStates) => { - const newStates = [...prevMapStates]; - const i = newStates.findIndex((state) => state.mapId === id); - if (i > -1) { - newStates[i] = state; - } - return newStates; - }); - return state; - } - - async function updateMap(id, update) { - // fake-indexeddb throws an error when updating maps in production. - // Catch that error and use put when it fails - try { - await database.table("maps").update(id, update); - } catch (error) { - // if (error.name !== "QuotaExceededError") { - const map = (await getMapFromDB(id)) || {}; - await database.table("maps").put({ ...map, id, ...update }); - // } - } - setMaps((prevMaps) => { - const newMaps = [...prevMaps]; - const i = newMaps.findIndex((map) => map.id === id); - if (i > -1) { - newMaps[i] = { ...newMaps[i], ...update }; - } - return newMaps; - }); - } - - async function updateMaps(ids, update) { - await Promise.all( - ids.map((id) => database.table("maps").update(id, update)) - ); - setMaps((prevMaps) => { - const newMaps = [...prevMaps]; - for (let id of ids) { - const i = newMaps.findIndex((map) => map.id === id); - if (i > -1) { - newMaps[i] = { ...newMaps[i], ...update }; - } - } - return newMaps; - }); - } - - async function updateMapState(id, update) { - await database.table("states").update(id, update); - setMapStates((prevMapStates) => { - const newStates = [...prevMapStates]; - const i = newStates.findIndex((state) => state.mapId === id); - if (i > -1) { - newStates[i] = { ...newStates[i], ...update }; - } - return newStates; - }); - } - - /** - * Adds a map to the database if none exists or replaces a map if it already exists - * Note: this does not add a map state to do that use AddMap - * @param {Object} map the map to put - */ - async function putMap(map) { - await database.table("maps").put(map); - setMaps((prevMaps) => { - const newMaps = [...prevMaps]; - const i = newMaps.findIndex((m) => m.id === map.id); - if (i > -1) { - newMaps[i] = { ...newMaps[i], ...map }; - } else { - newMaps.unshift(map); - } - return newMaps; - }); - if (map.owner !== userId) { - await updateCache(); - } - } + const getMapFromDB = useCallback( + async (mapId) => { + let map = await database.table("maps").get(mapId); + return map; + }, + [database] + ); /** * Keep up to cachedMapMax amount of maps that you don't own * Sorted by when they we're last used */ - async function updateCache() { + const updateCache = useCallback(async () => { const cachedMaps = await database .table("maps") .where("owner") @@ -228,16 +124,157 @@ export function MapDataProvider({ children }) { return prevMaps.filter((map) => !idsToDelete.includes(map.id)); }); } - } + }, [database, userId]); - function getMap(mapId) { - return maps.find((map) => map.id === mapId); - } + /** + * Adds a map to the database, also adds an assosiated state for that map + * @param {Object} map map to add + */ + const addMap = useCallback( + async (map) => { + await database.table("maps").add(map); + const state = { ...defaultMapState, mapId: map.id }; + await database.table("states").add(state); + setMaps((prevMaps) => [map, ...prevMaps]); + setMapStates((prevStates) => [state, ...prevStates]); + if (map.owner !== userId) { + await updateCache(); + } + }, + [database, updateCache, userId] + ); - async function getMapFromDB(mapId) { - let map = await database.table("maps").get(mapId); - return map; - } + const removeMap = useCallback( + async (id) => { + await database.table("maps").delete(id); + await database.table("states").delete(id); + setMaps((prevMaps) => { + const filtered = prevMaps.filter((map) => map.id !== id); + return filtered; + }); + setMapStates((prevMapsStates) => { + const filtered = prevMapsStates.filter((state) => state.mapId !== id); + return filtered; + }); + }, + [database] + ); + + const removeMaps = useCallback( + async (ids) => { + await database.table("maps").bulkDelete(ids); + await database.table("states").bulkDelete(ids); + setMaps((prevMaps) => { + const filtered = prevMaps.filter((map) => !ids.includes(map.id)); + return filtered; + }); + setMapStates((prevMapsStates) => { + const filtered = prevMapsStates.filter( + (state) => !ids.includes(state.mapId) + ); + return filtered; + }); + }, + [database] + ); + + const resetMap = useCallback( + async (id) => { + const state = { ...defaultMapState, mapId: id }; + await database.table("states").put(state); + setMapStates((prevMapStates) => { + const newStates = [...prevMapStates]; + const i = newStates.findIndex((state) => state.mapId === id); + if (i > -1) { + newStates[i] = state; + } + return newStates; + }); + return state; + }, + [database] + ); + + const updateMap = useCallback( + async (id, update) => { + // fake-indexeddb throws an error when updating maps in production. + // Catch that error and use put when it fails + try { + await database.table("maps").update(id, update); + } catch (error) { + const map = (await getMapFromDB(id)) || {}; + await database.table("maps").put({ ...map, id, ...update }); + } + setMaps((prevMaps) => { + const newMaps = [...prevMaps]; + const i = newMaps.findIndex((map) => map.id === id); + if (i > -1) { + newMaps[i] = { ...newMaps[i], ...update }; + } + return newMaps; + }); + }, + [database, getMapFromDB] + ); + + const updateMaps = useCallback( + async (ids, update) => { + await Promise.all( + ids.map((id) => database.table("maps").update(id, update)) + ); + setMaps((prevMaps) => { + const newMaps = [...prevMaps]; + for (let id of ids) { + const i = newMaps.findIndex((map) => map.id === id); + if (i > -1) { + newMaps[i] = { ...newMaps[i], ...update }; + } + } + return newMaps; + }); + }, + [database] + ); + + const updateMapState = useCallback( + async (id, update) => { + await database.table("states").update(id, update); + setMapStates((prevMapStates) => { + const newStates = [...prevMapStates]; + const i = newStates.findIndex((state) => state.mapId === id); + if (i > -1) { + newStates[i] = { ...newStates[i], ...update }; + } + return newStates; + }); + }, + [database] + ); + + /** + * Adds a map to the database if none exists or replaces a map if it already exists + * Note: this does not add a map state to do that use AddMap + * @param {Object} map the map to put + */ + const putMap = useCallback( + async (map) => { + await database.table("maps").put(map); + setMaps((prevMaps) => { + const newMaps = [...prevMaps]; + const i = newMaps.findIndex((m) => m.id === map.id); + if (i > -1) { + newMaps[i] = { ...newMaps[i], ...map }; + } else { + newMaps.unshift(map); + } + return newMaps; + }); + if (map.owner !== userId) { + await updateCache(); + } + }, + [database, updateCache, userId] + ); const ownedMaps = maps.filter((map) => map.owner === userId); diff --git a/src/contexts/TokenDataContext.js b/src/contexts/TokenDataContext.js index f94026d..b9b45fd 100644 --- a/src/contexts/TokenDataContext.js +++ b/src/contexts/TokenDataContext.js @@ -1,4 +1,10 @@ -import React, { useEffect, useState, useContext } from "react"; +import React, { + useEffect, + useState, + useContext, + useCallback, + useRef, +} from "react"; import * as Comlink from "comlink"; import AuthContext from "./AuthContext"; @@ -12,6 +18,8 @@ const TokenDataContext = React.createContext(); const cachedTokenMax = 100; +const worker = Comlink.wrap(new DatabaseWorker()); + export function TokenDataProvider({ children }) { const { database, databaseStatus } = useContext(DatabaseContext); const { userId } = useContext(AuthContext); @@ -37,7 +45,6 @@ export function TokenDataProvider({ children }) { } async function loadTokens() { - const worker = Comlink.wrap(new DatabaseWorker()); await worker.loadData("tokens"); const storedTokens = await worker.data; const sortedTokens = storedTokens.sort((a, b) => b.created - a.created); @@ -50,82 +57,28 @@ export function TokenDataProvider({ children }) { loadTokens(); }, [userId, database, databaseStatus]); - async function addToken(token) { - await database.table("tokens").add(token); - setTokens((prevTokens) => [token, ...prevTokens]); - if (token.owner !== userId) { - await updateCache(); - } - } + const tokensRef = useRef(tokens); + useEffect(() => { + tokensRef.current = tokens; + }, [tokens]); - async function removeToken(id) { - await database.table("tokens").delete(id); - setTokens((prevTokens) => { - const filtered = prevTokens.filter((token) => token.id !== id); - return filtered; - }); - } + const getToken = useCallback((tokenId) => { + return tokensRef.current.find((token) => token.id === tokenId); + }, []); - async function removeTokens(ids) { - await database.table("tokens").bulkDelete(ids); - setTokens((prevTokens) => { - const filtered = prevTokens.filter((token) => !ids.includes(token.id)); - return filtered; - }); - } - - async function updateToken(id, update) { - const change = { ...update, lastModified: Date.now() }; - await database.table("tokens").update(id, change); - setTokens((prevTokens) => { - const newTokens = [...prevTokens]; - const i = newTokens.findIndex((token) => token.id === id); - if (i > -1) { - newTokens[i] = { ...newTokens[i], ...change }; - } - return newTokens; - }); - } - - async function updateTokens(ids, update) { - const change = { ...update, lastModified: Date.now() }; - await Promise.all( - ids.map((id) => database.table("tokens").update(id, change)) - ); - setTokens((prevTokens) => { - const newTokens = [...prevTokens]; - for (let id of ids) { - const i = newTokens.findIndex((token) => token.id === id); - if (i > -1) { - newTokens[i] = { ...newTokens[i], ...change }; - } - } - return newTokens; - }); - } - - async function putToken(token) { - await database.table("tokens").put(token); - setTokens((prevTokens) => { - const newTokens = [...prevTokens]; - const i = newTokens.findIndex((t) => t.id === token.id); - if (i > -1) { - newTokens[i] = { ...newTokens[i], ...token }; - } else { - newTokens.unshift(token); - } - return newTokens; - }); - if (token.owner !== userId) { - await updateCache(); - } - } + const getTokenFromDB = useCallback( + async (tokenId) => { + let token = await database.table("tokens").get(tokenId); + return token; + }, + [database] + ); /** * Keep up to cachedTokenMax amount of tokens that you don't own * Sorted by when they we're last used */ - async function updateCache() { + const updateCache = useCallback(async () => { const cachedTokens = await database .table("tokens") .where("owner") @@ -141,11 +94,96 @@ export function TokenDataProvider({ children }) { return prevTokens.filter((token) => !idsToDelete.includes(token.id)); }); } - } + }, [database, userId]); - function getToken(tokenId) { - return tokens.find((token) => token.id === tokenId); - } + const addToken = useCallback( + async (token) => { + await database.table("tokens").add(token); + setTokens((prevTokens) => [token, ...prevTokens]); + if (token.owner !== userId) { + await updateCache(); + } + }, + [database, updateCache, userId] + ); + + const removeToken = useCallback( + async (id) => { + await database.table("tokens").delete(id); + setTokens((prevTokens) => { + const filtered = prevTokens.filter((token) => token.id !== id); + return filtered; + }); + }, + [database] + ); + + const removeTokens = useCallback( + async (ids) => { + await database.table("tokens").bulkDelete(ids); + setTokens((prevTokens) => { + const filtered = prevTokens.filter((token) => !ids.includes(token.id)); + return filtered; + }); + }, + [database] + ); + + const updateToken = useCallback( + async (id, update) => { + const change = { ...update, lastModified: Date.now() }; + await database.table("tokens").update(id, change); + setTokens((prevTokens) => { + const newTokens = [...prevTokens]; + const i = newTokens.findIndex((token) => token.id === id); + if (i > -1) { + newTokens[i] = { ...newTokens[i], ...change }; + } + return newTokens; + }); + }, + [database] + ); + + const updateTokens = useCallback( + async (ids, update) => { + const change = { ...update, lastModified: Date.now() }; + await Promise.all( + ids.map((id) => database.table("tokens").update(id, change)) + ); + setTokens((prevTokens) => { + const newTokens = [...prevTokens]; + for (let id of ids) { + const i = newTokens.findIndex((token) => token.id === id); + if (i > -1) { + newTokens[i] = { ...newTokens[i], ...change }; + } + } + return newTokens; + }); + }, + [database] + ); + + const putToken = useCallback( + async (token) => { + await database.table("tokens").put(token); + setTokens((prevTokens) => { + const newTokens = [...prevTokens]; + const i = newTokens.findIndex((t) => t.id === token.id); + if (i > -1) { + newTokens[i] = { ...newTokens[i], ...token }; + } else { + newTokens.unshift(token); + } + return newTokens; + }); + if (token.owner !== userId) { + await updateCache(); + } + }, + [database, updateCache, userId] + ); const ownedTokens = tokens.filter((token) => token.owner === userId); @@ -166,6 +204,7 @@ export function TokenDataProvider({ children }) { getToken, tokensById, tokensLoading, + getTokenFromDB, }; return ( diff --git a/src/network/NetworkedMapAndTokens.js b/src/network/NetworkedMapAndTokens.js index dcd8dbd..d73d02e 100644 --- a/src/network/NetworkedMapAndTokens.js +++ b/src/network/NetworkedMapAndTokens.js @@ -36,9 +36,13 @@ function NetworkedMapAndTokens({ session }) { } = useContext(MapLoadingContext); const { putToken, getToken, updateToken } = useContext(TokenDataContext); - const { putMap, updateMap, getMapFromDB, updateMapState } = useContext( - MapDataContext - ); + const { + putMap, + updateMap, + getMap, + getMapFromDB, + updateMapState, + } = useContext(MapDataContext); const [currentMap, setCurrentMap] = useState(null); const [currentMapState, setCurrentMapState] = useNetworkedState( @@ -129,8 +133,10 @@ function NetworkedMapAndTokens({ session }) { } if (asset.type === "map") { - const cachedMap = await getMapFromDB(asset.id); - if (cachedMap && cachedMap.lastModified >= asset.lastModified) { + const cachedMap = getMap(asset.id); + if (cachedMap && cachedMap.lastModified === asset.lastModified) { + continue; + } else if (cachedMap && cachedMap.lastModified > asset.lastModified) { // Update last used for cache invalidation const lastUsed = Date.now(); await updateMap(cachedMap.id, { lastUsed }); @@ -140,7 +146,12 @@ function NetworkedMapAndTokens({ session }) { } } else if (asset.type === "token") { const cachedToken = getToken(asset.id); - if (cachedToken && cachedToken.lastModified >= asset.lastModified) { + if (cachedToken && cachedToken.lastModified === asset.lastModified) { + continue; + } else if ( + cachedToken && + cachedToken.lastModified > asset.lastModified + ) { // Update last used for cache invalidation const lastUsed = Date.now(); await updateToken(cachedToken.id, { lastUsed }); @@ -152,8 +163,16 @@ function NetworkedMapAndTokens({ session }) { } requestAssetsIfNeeded(); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [assetManifest, partyState, session]); + }, [ + assetManifest, + partyState, + session, + getMap, + getToken, + updateMap, + updateToken, + userId, + ]); /** * Map state @@ -172,8 +191,7 @@ function NetworkedMapAndTokens({ session }) { ) { updateMapState(debouncedMapState.mapId, debouncedMapState); } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [currentMap, debouncedMapState, userId, database]); + }, [currentMap, debouncedMapState, userId, database, updateMapState]); function handleMapChange(newMap, newMapState) { // Clear map before sending new one @@ -402,10 +420,9 @@ function NetworkedMapAndTokens({ session }) { async function handleSocketMap(map) { if (map) { - // If we're the owner get the full map from the database - if (map.type === "file" && map.owner === userId) { + if (map.type === "file") { const fullMap = await getMapFromDB(map.id); - setCurrentMap(fullMap); + setCurrentMap(fullMap || map); } else { setCurrentMap(map); } @@ -428,19 +445,19 @@ function NetworkedMapAndTokens({ session }) { const canChangeMap = !isLoading; const canEditMapDrawing = - currentMap !== null && - currentMapState !== null && + currentMap && + currentMapState && (currentMapState.editFlags.includes("drawing") || currentMap.owner === userId); const canEditFogDrawing = - currentMap !== null && - currentMapState !== null && + currentMap && + currentMapState && (currentMapState.editFlags.includes("fog") || currentMap.owner === userId); const canEditNotes = - currentMap !== null && - currentMapState !== null && + currentMap && + currentMapState && (currentMapState.editFlags.includes("notes") || currentMap.owner === userId); @@ -448,8 +465,8 @@ function NetworkedMapAndTokens({ session }) { // If we have a map and state and have the token permission disabled // and are not the map owner if ( - currentMapState !== null && - currentMap !== null && + currentMapState && + currentMap && !currentMapState.editFlags.includes("tokens") && currentMap.owner !== userId ) { diff --git a/src/workers/DatabaseWorker.js b/src/workers/DatabaseWorker.js index 4c5fff3..40db4c8 100644 --- a/src/workers/DatabaseWorker.js +++ b/src/workers/DatabaseWorker.js @@ -4,13 +4,24 @@ import { getDatabase } from "../database"; // Worker to load large amounts of database data on a separate thread let obj = { - data: [], - async loadData(table) { - this.data = []; + data: null, + /** + * Load either a whole table or individual item from the DB + * @param {string} table Table to load from + * @param {string|undefined} key Optional database key to load, if undefined whole table will be loaded + */ + async loadData(table, key) { try { let db = getDatabase({}); - // Use a cursor instead of toArray to prevent IPC max size error - await db.table(table).each((map) => this.data.push(map)); + if (key) { + // Load specific item + this.data = await db.table(table).get(key); + } else { + // Load entire table + this.data = []; + // Use a cursor instead of toArray to prevent IPC max size error + await db.table(table).each((item) => this.data.push(item)); + } } catch {} }, };