mirror of
https://github.com/OpenDiablo2/OpenDiablo2
synced 2025-02-02 22:57:04 -05:00
Merge branch 'master' into anim-data-encoder
This commit is contained in:
commit
ab14168f50
@ -80,24 +80,17 @@ type COF struct {
|
||||
}
|
||||
|
||||
// Unmarshal a byte slice to this COF
|
||||
// nolint:funlen // no need to change
|
||||
func (c *COF) Unmarshal(fileData []byte) error {
|
||||
streamReader := d2datautils.CreateStreamReader(fileData)
|
||||
|
||||
var b []byte
|
||||
|
||||
var err error
|
||||
|
||||
b, err = streamReader.ReadBytes(numHeaderBytes)
|
||||
streamReader := d2datautils.CreateStreamReader(fileData)
|
||||
|
||||
headerBytes, err := streamReader.ReadBytes(numHeaderBytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c.NumberOfLayers = int(b[headerNumLayers])
|
||||
c.FramesPerDirection = int(b[headerFramesPerDir])
|
||||
c.NumberOfDirections = int(b[headerNumDirs])
|
||||
c.unknownHeaderBytes = b[headerNumDirs+1 : headerSpeed]
|
||||
c.Speed = int(b[headerSpeed])
|
||||
c.loadHeader(headerBytes)
|
||||
|
||||
c.unknownBodyBytes, err = streamReader.ReadBytes(numUnknownBodyBytes)
|
||||
if err != nil {
|
||||
@ -107,10 +100,44 @@ func (c *COF) Unmarshal(fileData []byte) error {
|
||||
c.CofLayers = make([]CofLayer, c.NumberOfLayers)
|
||||
c.CompositeLayers = make(map[d2enum.CompositeType]int)
|
||||
|
||||
err = c.loadCOFLayers(streamReader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
animationFramesData, err := streamReader.ReadBytes(c.FramesPerDirection)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c.loadAnimationFrames(animationFramesData)
|
||||
|
||||
priorityLen := c.FramesPerDirection * c.NumberOfDirections * c.NumberOfLayers
|
||||
c.Priority = make([][][]d2enum.CompositeType, c.NumberOfDirections)
|
||||
|
||||
priorityBytes, err := streamReader.ReadBytes(priorityLen)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c.loadPriority(priorityBytes)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *COF) loadHeader(b []byte) {
|
||||
c.NumberOfLayers = int(b[headerNumLayers])
|
||||
c.FramesPerDirection = int(b[headerFramesPerDir])
|
||||
c.NumberOfDirections = int(b[headerNumDirs])
|
||||
c.unknownHeaderBytes = b[headerNumDirs+1 : headerSpeed]
|
||||
c.Speed = int(b[headerSpeed])
|
||||
}
|
||||
|
||||
func (c *COF) loadCOFLayers(streamReader *d2datautils.StreamReader) error {
|
||||
for i := 0; i < c.NumberOfLayers; i++ {
|
||||
layer := CofLayer{}
|
||||
|
||||
b, err = streamReader.ReadBytes(numLayerBytes)
|
||||
b, err := streamReader.ReadBytes(numLayerBytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -128,25 +155,18 @@ func (c *COF) Unmarshal(fileData []byte) error {
|
||||
c.CompositeLayers[layer.Type] = i
|
||||
}
|
||||
|
||||
b, err = streamReader.ReadBytes(c.FramesPerDirection)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *COF) loadAnimationFrames(b []byte) {
|
||||
c.AnimationFrames = make([]d2enum.AnimationFrame, c.FramesPerDirection)
|
||||
|
||||
for i := range b {
|
||||
c.AnimationFrames[i] = d2enum.AnimationFrame(b[i])
|
||||
}
|
||||
}
|
||||
|
||||
priorityLen := c.FramesPerDirection * c.NumberOfDirections * c.NumberOfLayers
|
||||
c.Priority = make([][][]d2enum.CompositeType, c.NumberOfDirections)
|
||||
|
||||
priorityBytes, err := streamReader.ReadBytes(priorityLen)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *COF) loadPriority(priorityBytes []byte) {
|
||||
priorityIndex := 0
|
||||
|
||||
for direction := 0; direction < c.NumberOfDirections; direction++ {
|
||||
@ -159,8 +179,6 @@ func (c *COF) Unmarshal(fileData []byte) error {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Marshal this COF to a byte slice
|
||||
|
27
d2common/d2fileformats/d2cof/helpers.go
Normal file
27
d2common/d2fileformats/d2cof/helpers.go
Normal file
@ -0,0 +1,27 @@
|
||||
package d2cof
|
||||
|
||||
// FPS returns FPS value basing on cof's speed
|
||||
func (c *COF) FPS() float64 {
|
||||
const (
|
||||
baseFPS = 25
|
||||
speedDivisor = 256
|
||||
)
|
||||
|
||||
fps := baseFPS * (float64(c.Speed) / speedDivisor)
|
||||
if fps == 0 {
|
||||
fps = baseFPS
|
||||
}
|
||||
|
||||
return fps
|
||||
}
|
||||
|
||||
// Duration returns animation's duration
|
||||
func (c *COF) Duration() float64 {
|
||||
const (
|
||||
milliseconds = 1000
|
||||
)
|
||||
|
||||
frameDelay := milliseconds / c.FPS()
|
||||
|
||||
return float64(c.FramesPerDirection) * frameDelay
|
||||
}
|
@ -41,3 +41,15 @@ func Load(data []byte) (*PL2, error) {
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// Marshal encodes PL2 back into byte slice
|
||||
func (p *PL2) Marshal() []byte {
|
||||
restruct.EnableExprBeta()
|
||||
|
||||
data, err := restruct.Pack(binary.LittleEndian, p)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
|
40
d2common/d2fileformats/d2pl2/pl2_test.go
Normal file
40
d2common/d2fileformats/d2pl2/pl2_test.go
Normal file
@ -0,0 +1,40 @@
|
||||
package d2pl2
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func exampleData() *PL2 {
|
||||
result := &PL2{
|
||||
BasePalette: PL2Palette{},
|
||||
SelectedUintShift: PL2PaletteTransform{},
|
||||
RedTones: PL2PaletteTransform{},
|
||||
GreenTones: PL2PaletteTransform{},
|
||||
BlueTones: PL2PaletteTransform{},
|
||||
DarkendColorShift: PL2PaletteTransform{},
|
||||
}
|
||||
|
||||
result.BasePalette.Colors[0].R = 8
|
||||
result.DarkendColorShift.Indices[0] = 123
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func TestPL2_MarshalUnmarshal(t *testing.T) {
|
||||
pl2 := exampleData()
|
||||
|
||||
data := pl2.Marshal()
|
||||
|
||||
newPL2, err := Load(data)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if newPL2.BasePalette.Colors[0] != pl2.BasePalette.Colors[0] {
|
||||
t.Fatal("unexpected length")
|
||||
}
|
||||
|
||||
if pl2.DarkendColorShift.Indices[0] != newPL2.DarkendColorShift.Indices[0] {
|
||||
t.Fatal("unexpected index set")
|
||||
}
|
||||
}
|
@ -1,89 +1,77 @@
|
||||
package d2tbl
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"sort"
|
||||
"fmt"
|
||||
|
||||
"strconv"
|
||||
|
||||
"github.com/OpenDiablo2/OpenDiablo2/d2common/d2datautils"
|
||||
)
|
||||
|
||||
// TextDictionary is a string map
|
||||
type TextDictionary struct {
|
||||
crcBytes []byte
|
||||
elementIndex []uint16
|
||||
hashTableSize uint32
|
||||
version byte
|
||||
stringOffset uint32
|
||||
unknown1 uint32
|
||||
fileSize uint32
|
||||
hashEntries []*textDictionaryHashEntry
|
||||
Entries map[string]string
|
||||
}
|
||||
type TextDictionary map[string]string
|
||||
|
||||
func (td *TextDictionary) loadHashEntries(br *d2datautils.StreamReader) error {
|
||||
var err error
|
||||
|
||||
for i := 0; i < len(td.hashEntries); i++ {
|
||||
func (td TextDictionary) loadHashEntries(hashEntries []*textDictionaryHashEntry, br *d2datautils.StreamReader) error {
|
||||
for i := 0; i < len(hashEntries); i++ {
|
||||
entry := textDictionaryHashEntry{}
|
||||
|
||||
entry.active, err = br.ReadByte()
|
||||
active, err := br.ReadByte()
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("reading active: %v", err)
|
||||
}
|
||||
|
||||
entry.IsActive = active > 0
|
||||
|
||||
entry.Index, err = br.ReadUInt16()
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("reading Index: %v", err)
|
||||
}
|
||||
|
||||
entry.HashValue, err = br.ReadUInt32()
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("reading hash value: %v", err)
|
||||
}
|
||||
|
||||
entry.IndexString, err = br.ReadUInt32()
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("reading index string pos: %v", err)
|
||||
}
|
||||
|
||||
entry.NameString, err = br.ReadUInt32()
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("reading name string pos: %v", err)
|
||||
}
|
||||
|
||||
entry.NameLength, err = br.ReadUInt16()
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("reading name length: %v", err)
|
||||
}
|
||||
|
||||
td.hashEntries[i] = &entry
|
||||
hashEntries[i] = &entry
|
||||
}
|
||||
|
||||
for idx := range td.hashEntries {
|
||||
if !td.hashEntries[idx].IsActive() {
|
||||
for idx := range hashEntries {
|
||||
if !hashEntries[idx].IsActive {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := td.loadHashEntry(idx, td.hashEntries[idx], br); err != nil {
|
||||
return err
|
||||
if err := td.loadHashEntry(idx, hashEntries[idx], br); err != nil {
|
||||
return fmt.Errorf("loading entry %d: %v", idx, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (td *TextDictionary) loadHashEntry(idx int, hashEntry *textDictionaryHashEntry, br *d2datautils.StreamReader) error {
|
||||
var err error
|
||||
|
||||
func (td TextDictionary) loadHashEntry(idx int, hashEntry *textDictionaryHashEntry, br *d2datautils.StreamReader) error {
|
||||
br.SetPosition(uint64(hashEntry.NameString))
|
||||
|
||||
nameVal, err := br.ReadBytes(int(hashEntry.NameLength - 1))
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("reading name value: %v", err)
|
||||
}
|
||||
|
||||
hashEntry.name = string(nameVal)
|
||||
value := string(nameVal)
|
||||
|
||||
br.SetPosition(uint64(hashEntry.IndexString))
|
||||
|
||||
@ -96,39 +84,31 @@ func (td *TextDictionary) loadHashEntry(idx int, hashEntry *textDictionaryHashEn
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("reading kay char: %v", err)
|
||||
}
|
||||
|
||||
key += string(b)
|
||||
}
|
||||
|
||||
hashEntry.key = key
|
||||
|
||||
if hashEntry.key == "x" || hashEntry.key == "X" {
|
||||
if key == "x" || key == "X" {
|
||||
key = "#" + strconv.Itoa(idx)
|
||||
}
|
||||
|
||||
_, exists := td.Entries[key]
|
||||
_, exists := td[key]
|
||||
if !exists {
|
||||
td.Entries[key] = hashEntry.name
|
||||
td[key] = value
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type textDictionaryHashEntry struct {
|
||||
name string
|
||||
key string
|
||||
IsActive bool
|
||||
Index uint16
|
||||
HashValue uint32
|
||||
IndexString uint32
|
||||
NameString uint32
|
||||
Index uint16
|
||||
NameLength uint16
|
||||
active byte
|
||||
}
|
||||
|
||||
func (t *textDictionaryHashEntry) IsActive() bool {
|
||||
return t.active > 0
|
||||
}
|
||||
|
||||
const (
|
||||
@ -136,133 +116,130 @@ const (
|
||||
)
|
||||
|
||||
// LoadTextDictionary loads the text dictionary from the given data
|
||||
func LoadTextDictionary(dictionaryData []byte) (*TextDictionary, error) {
|
||||
var err error
|
||||
|
||||
lookupTable := &TextDictionary{
|
||||
Entries: make(map[string]string),
|
||||
}
|
||||
func LoadTextDictionary(dictionaryData []byte) (TextDictionary, error) {
|
||||
lookupTable := make(TextDictionary)
|
||||
|
||||
br := d2datautils.CreateStreamReader(dictionaryData)
|
||||
|
||||
// skip past the CRC
|
||||
lookupTable.crcBytes, err = br.ReadBytes(crcByteCount)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, _ = br.ReadBytes(crcByteCount)
|
||||
|
||||
var err error
|
||||
|
||||
/*
|
||||
number of indicates
|
||||
(https://d2mods.info/forum/viewtopic.php?p=202077#p202077)
|
||||
Indices ...
|
||||
An array of WORD. Each entry is an index into the hash table.
|
||||
The actual string key index in the .bin file is an index into this table.
|
||||
So to get a string from a key index ...
|
||||
*/
|
||||
numberOfElements, err := br.ReadUInt16()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("reading number of elements: %v", err)
|
||||
}
|
||||
|
||||
lookupTable.hashTableSize, err = br.ReadUInt32()
|
||||
hashTableSize, err := br.ReadUInt32()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("reading hash table size: %v", err)
|
||||
}
|
||||
|
||||
// Version (always 0)
|
||||
if lookupTable.version, err = br.ReadByte(); err != nil {
|
||||
return nil, errors.New("error reading Version record")
|
||||
}
|
||||
|
||||
// StringOffset
|
||||
lookupTable.stringOffset, err = br.ReadUInt32()
|
||||
// Version
|
||||
_, err = br.ReadByte()
|
||||
if err != nil {
|
||||
return nil, errors.New("error reading string offset")
|
||||
return nil, fmt.Errorf("reading version: %v", err)
|
||||
}
|
||||
|
||||
_, _ = br.ReadUInt32() // StringOffset
|
||||
|
||||
// When the number of times you have missed a match with a
|
||||
// hash key equals this value, you give up because it is not there.
|
||||
lookupTable.unknown1, err = br.ReadUInt32()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, _ = br.ReadUInt32()
|
||||
|
||||
// FileSize
|
||||
lookupTable.fileSize, err = br.ReadUInt32()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, _ = br.ReadUInt32() // FileSize
|
||||
|
||||
elementIndex := make([]uint16, numberOfElements)
|
||||
for i := 0; i < int(numberOfElements); i++ {
|
||||
elementIndex[i], err = br.ReadUInt16()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("reading element index %d: %v", i, err)
|
||||
}
|
||||
}
|
||||
|
||||
lookupTable.elementIndex = elementIndex
|
||||
hashEntries := make([]*textDictionaryHashEntry, hashTableSize)
|
||||
|
||||
lookupTable.hashEntries = make([]*textDictionaryHashEntry, lookupTable.hashTableSize)
|
||||
|
||||
err = lookupTable.loadHashEntries(br)
|
||||
err = lookupTable.loadHashEntries(hashEntries, br)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("loading has entries: %v", err)
|
||||
}
|
||||
|
||||
return lookupTable, nil
|
||||
}
|
||||
|
||||
// Marshal encodes text dictionary back to byte slice
|
||||
// Marshal encodes text dictionary back into byte slice
|
||||
func (td *TextDictionary) Marshal() []byte {
|
||||
// create stream writter
|
||||
sw := d2datautils.CreateStreamWriter()
|
||||
|
||||
sw.PushBytes(td.crcBytes...)
|
||||
sw.PushUint16(uint16(len(td.elementIndex)))
|
||||
sw.PushUint32(td.hashTableSize)
|
||||
sw.PushBytes(td.version)
|
||||
sw.PushUint32(td.stringOffset)
|
||||
sw.PushUint32(td.unknown1)
|
||||
// https://github.com/OpenDiablo2/OpenDiablo2/issues/1043
|
||||
sw.PushBytes(0, 0)
|
||||
|
||||
sw.PushUint32(td.fileSize)
|
||||
sw.PushUint16(0)
|
||||
|
||||
for _, i := range td.elementIndex {
|
||||
sw.PushUint16(i)
|
||||
sw.PushInt32(int32(len(*td)))
|
||||
|
||||
// version (always 0)
|
||||
sw.PushBytes(0)
|
||||
|
||||
// offset of start of data (unnecessary for our decoder)
|
||||
sw.PushUint32(0)
|
||||
|
||||
// Max retry count for a hash hit.
|
||||
sw.PushUint32(0)
|
||||
|
||||
// offset to end of data (noop)
|
||||
sw.PushUint32(0)
|
||||
|
||||
// indicates (len = 0, so nothing here)
|
||||
|
||||
// nolint:gomnd // 17 comes from the size of one "data-header index"
|
||||
// dataPos is a position, when we're placing data stream
|
||||
dataPos := len(sw.GetBytes()) + 17*len(*td)
|
||||
|
||||
for key, value := range *td {
|
||||
// non-zero if record is used (for us, every record is used ;-)
|
||||
sw.PushBytes(1)
|
||||
|
||||
// generally unused;
|
||||
// string key index (used in .bin)
|
||||
sw.PushUint16(0)
|
||||
|
||||
// also unused in our decoder
|
||||
// calculated hash of the string.
|
||||
sw.PushUint32(0)
|
||||
|
||||
sw.PushUint32(uint32(dataPos))
|
||||
dataPos += len(key) + 1
|
||||
|
||||
sw.PushUint32(uint32(dataPos))
|
||||
dataPos += len(value) + 1
|
||||
|
||||
sw.PushUint16(uint16(len(value) + 1))
|
||||
}
|
||||
|
||||
for i := 0; i < len(td.hashEntries); i++ {
|
||||
sw.PushBytes(td.hashEntries[i].active)
|
||||
sw.PushUint16(td.hashEntries[i].Index)
|
||||
sw.PushUint32(td.hashEntries[i].HashValue)
|
||||
sw.PushUint32(td.hashEntries[i].IndexString)
|
||||
sw.PushUint32(td.hashEntries[i].NameString)
|
||||
sw.PushUint16(td.hashEntries[i].NameLength)
|
||||
}
|
||||
// data stream: put all data in appropriate order
|
||||
for key, value := range *td {
|
||||
for _, i := range key {
|
||||
sw.PushBytes(byte(i))
|
||||
}
|
||||
|
||||
// values are table entries data (key & values)
|
||||
var values map[int]string = make(map[int]string)
|
||||
// valuesSorted are sorted values
|
||||
var valuesSorted map[int]string = make(map[int]string)
|
||||
// 0 as separator
|
||||
sw.PushBytes(0)
|
||||
|
||||
// add values key / names to map
|
||||
for _, i := range td.hashEntries {
|
||||
values[int(i.IndexString)] = i.key
|
||||
values[int(i.NameString)] = i.name
|
||||
}
|
||||
for _, i := range value {
|
||||
sw.PushBytes(byte(i))
|
||||
}
|
||||
|
||||
// added map keys
|
||||
keys := make([]int, 0, len(values))
|
||||
for k := range values {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
|
||||
// sort keys
|
||||
sort.Ints(keys)
|
||||
|
||||
// create sorted values map
|
||||
for _, k := range keys {
|
||||
valuesSorted[k] = values[k]
|
||||
}
|
||||
|
||||
// add first value (without 0-byte separator)
|
||||
sw.PushBytes([]byte(valuesSorted[keys[0]])...)
|
||||
|
||||
// adds values to result
|
||||
for i := 1; i < len(valuesSorted); i++ {
|
||||
sw.PushBytes([]byte(valuesSorted[keys[i]])...)
|
||||
// 0 as separator
|
||||
sw.PushBytes(0)
|
||||
}
|
||||
|
||||
|
37
d2common/d2fileformats/d2tbl/text_dictionary_test.go
Normal file
37
d2common/d2fileformats/d2tbl/text_dictionary_test.go
Normal file
@ -0,0 +1,37 @@
|
||||
package d2tbl
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func exampleData() *TextDictionary {
|
||||
result := &TextDictionary{
|
||||
"abc": "def",
|
||||
"someStr": "Some long string",
|
||||
"teststring": "TeSt",
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func TestTBL_Marshal(t *testing.T) {
|
||||
tbl := exampleData()
|
||||
data := tbl.Marshal()
|
||||
|
||||
newTbl, err := LoadTextDictionary(data)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
for key, value := range *tbl {
|
||||
newValue, ok := newTbl[key]
|
||||
|
||||
if !ok {
|
||||
t.Fatal("string wasn't encoded to table")
|
||||
}
|
||||
|
||||
if newValue != value {
|
||||
t.Fatal("unexpected value set")
|
||||
}
|
||||
}
|
||||
}
|
@ -65,7 +65,7 @@ type AssetManager struct {
|
||||
*d2util.Logger
|
||||
*d2loader.Loader
|
||||
|
||||
tables []*d2tbl.TextDictionary
|
||||
tables []d2tbl.TextDictionary
|
||||
dt1s d2interface.Cache
|
||||
ds1s d2interface.Cache
|
||||
cofs d2interface.Cache
|
||||
@ -268,7 +268,7 @@ func (am *AssetManager) LoadPalette(palettePath string) (d2interface.Palette, er
|
||||
}
|
||||
|
||||
// LoadStringTable loads a string table from the given path
|
||||
func (am *AssetManager) LoadStringTable(tablePath string) (*d2tbl.TextDictionary, error) {
|
||||
func (am *AssetManager) LoadStringTable(tablePath string) (d2tbl.TextDictionary, error) {
|
||||
data, err := am.LoadFile(tablePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -302,7 +302,7 @@ func (am *AssetManager) TranslateString(input interface{}) string {
|
||||
}
|
||||
|
||||
for idx := range am.tables {
|
||||
if value, found := am.tables[idx].Entries[key]; found {
|
||||
if value, found := am.tables[idx][key]; found {
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ func NewAssetManager(logLevel d2util.LogLevel) (*AssetManager, error) {
|
||||
manager := &AssetManager{
|
||||
Logger: logger,
|
||||
Loader: loader,
|
||||
tables: make([]*d2tbl.TextDictionary, 0),
|
||||
tables: make([]d2tbl.TextDictionary, 0),
|
||||
animations: d2cache.CreateCache(animationBudget),
|
||||
fonts: d2cache.CreateCache(fontBudget),
|
||||
palettes: d2cache.CreateCache(paletteBudget),
|
||||
|
Loading…
Reference in New Issue
Block a user