diff --git a/SavegameToolkit/ArkSavegame.cs b/SavegameToolkit/ArkSavegame.cs index 2b3e88c..d672c5a 100644 --- a/SavegameToolkit/ArkSavegame.cs +++ b/SavegameToolkit/ArkSavegame.cs @@ -45,9 +45,15 @@ public class ArkSavegame : GameObjectContainerMixin, IConversionSupport [JsonProperty(Order = 7)] public override List Objects { get; } = new List(); + public List Tribes { get; } = new List(); + public List Profiles { get; } = new List(); public List> StoredDataOffsets = new List>(); + private ChunkedStore TribeDataStore { get; set; } = new ChunkedStore(); + private ChunkedStore PlayerDataStore { get; set; } = new ChunkedStore(); + + private int hibernationOffset; private int nameTableOffset; @@ -87,6 +93,8 @@ public class ArkSavegame : GameObjectContainerMixin, IConversionSupport private int hibernationV9Check5; private int hibernationV9Check6; + + #region readBinary public void ReadBinary(ArkArchive archive, ReadingOptions options) @@ -371,36 +379,15 @@ private void readBinaryHibernation(ArkArchive archive, ReadingOptions options) // it's assumed there are two new int32, making it a total of 6 unknown int32 along with the 4 version8 int32, the first two are -1 and 2, and all of these 6 int32 are repeated once. if (SaveVersion > 8 && hibernationV8Unknown1 == -1 && hibernationV8Unknown2 == 2) { - archive.DebugMessage("non-zero unknown V9 fields, expecting duplicated set per 349.10"); - hibernationV9_34910Unknown1 = archive.ReadInt(); - hibernationV9_34910Unknown2 = archive.ReadInt(); + // Move back by those four integers, so we can use ChunkedStore + archive.Position -= sizeof(int) * 4; + TribeDataStore = new ChunkedStore(); + PlayerDataStore = new ChunkedStore(); - hibernationV9Check1 = archive.ReadInt(); - hibernationV9Check2 = archive.ReadInt(); - hibernationV9Check3 = archive.ReadInt(); - hibernationV9Check4 = archive.ReadInt(); - hibernationV9Check5 = archive.ReadInt(); - hibernationV9Check6 = archive.ReadInt(); - - if (!(hibernationV8Unknown1 == hibernationV9Check1 - && hibernationV8Unknown2 == hibernationV9Check2 - && hibernationV8Unknown3 == hibernationV9Check3 - && hibernationV8Unknown4 == hibernationV9Check4 - && hibernationV9_34910Unknown1 == hibernationV9Check5 - && hibernationV9_34910Unknown2 == hibernationV9Check6)) - { - if (SaveVersion > 10) - { - //TODO:// more data reads - is it always 112 bytes? - archive.SkipBytes(112); + TribeDataStore.ReadBinary(archive, options); + PlayerDataStore.ReadBinary(archive, options); - } - else - { - throw new NotSupportedException("349.10 workaround for duplicate unknown hibernation bytes failed"); - } - } } // No hibernate section if we reached the nameTable @@ -447,6 +434,93 @@ private void readBinaryHibernation(ArkArchive archive, ReadingOptions options) } } + private void readBinaryStoredTribes(ArkArchive archive, ReadingOptions options) + { + if (!options.StoredTribes) return; + + Tribes.Clear(); + + if (TribeDataStore != null && TribeDataStore.IndexChunks.Count > 0) + { + for (int tribeFileIndex = 0; tribeFileIndex < TribeDataStore.IndexChunks.Count; tribeFileIndex++) + { + long storedIndexOffset = StoredDataOffsets[tribeFileIndex].Item1 + TribeDataStore.IndexChunks[tribeFileIndex].ArchiveOffset; + long storedIndexSize = TribeDataStore.IndexChunks[tribeFileIndex].Size; + long storedDataOffset = StoredDataOffsets[tribeFileIndex].Item1 + TribeDataStore.DataChunks[tribeFileIndex].ArchiveOffset; + + + archive.Position = storedIndexOffset; + long indexLimit = storedIndexSize + storedIndexOffset; + + List tribeOffsets = new List(); + while (archive.Position < indexLimit) + { + long tribeId = archive.ReadLong(); + long tribeOffset = archive.ReadLong(); + long tribeSize = archive.ReadLong(); + + long tribeDataOffset = storedDataOffset + tribeOffset; + tribeOffsets.Add(tribeDataOffset); + } + + foreach (var tribeOffset in tribeOffsets) + { + archive.Position = tribeOffset; + ArkStoreTribe storedTribe = new ArkStoreTribe(); + storedTribe.ReadBinary(archive, options); + + Tribes.AddRange(storedTribe.Objects); + } + } + + } + } + + private void readBinaryStoredProfiles(ArkArchive archive, ReadingOptions options) + { + if (!options.StoredProfiles) return; + + Profiles.Clear(); + + if (PlayerDataStore != null && PlayerDataStore.IndexChunks.Count > 0) + { + for (int playerFileIndex = 0; playerFileIndex < PlayerDataStore.IndexChunks.Count; playerFileIndex++) + { + long storedIndexOffset = StoredDataOffsets[playerFileIndex].Item1 + PlayerDataStore.IndexChunks[playerFileIndex].ArchiveOffset; + long storedIndexSize = PlayerDataStore.IndexChunks[playerFileIndex].Size; + long storedDataOffset = StoredDataOffsets[playerFileIndex].Item1 + PlayerDataStore.DataChunks[playerFileIndex].ArchiveOffset; + + + archive.Position = storedIndexOffset; + long indexLimit = storedIndexSize + storedIndexOffset; + + List playerOffsets = new List(); + while (archive.Position < indexLimit) + { + long playerId = archive.ReadLong(); + long playerOffset = archive.ReadLong(); + long playerSize = archive.ReadLong(); + + long playerDataOffset = storedDataOffset + playerOffset; + playerOffsets.Add(playerDataOffset); + } + + foreach (var playerOffset in playerOffsets) + { + archive.Position = playerOffset; + ArkStoreProfile storedProfile = new ArkStoreProfile(); + storedProfile.ReadBinary(archive, options); + + Profiles.AddRange(storedProfile.Objects); + } + } + + } + + + } + + private void readBinaryStoredObjects(ArkArchive archive, ReadingOptions options) { if (!options.CryopodCreatures) return; diff --git a/SavegameToolkit/ArkStoreProfile.cs b/SavegameToolkit/ArkStoreProfile.cs new file mode 100644 index 0000000..ceda846 --- /dev/null +++ b/SavegameToolkit/ArkStoreProfile.cs @@ -0,0 +1,119 @@ +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using SavegameToolkit.Propertys; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace SavegameToolkit +{ + internal class ArkStoreProfile : GameObjectContainerMixin, IConversionSupport, IPropertyContainer + { + + public int ProfileVersion { get; set; } + + public List Properties => profile.Properties; + + private GameObject profile; + private long propertiesBlockOffset; + + public GameObject Profile + { + get => profile; + set + { + if (profile != null) + { + int oldIndex = Objects.IndexOf(profile); + if (oldIndex > -1) + { + Objects.RemoveAt(oldIndex); + } + } + + profile = value; + if (value != null && Objects.IndexOf(value) == -1) + { + Objects.Insert(0, value); + } + } + } + + public void ReadBinary(ArkArchive archive, ReadingOptions options) + { + + propertiesBlockOffset = archive.Position; + var useNameTable = archive.UseNameTable; + archive.UseNameTable = false; + try + { + ProfileVersion = archive.ReadInt(); + + if (ProfileVersion != 1) + { + //throw new NotSupportedException("Unknown Profile Version " + ProfileVersion); + } + + int profilesCount = archive.ReadInt(); + + Objects.Clear(); + ObjectMap.Clear(); + for (int i = 0; i < profilesCount; i++) + { + addObject(new GameObject(archive), options.BuildComponentTree); + } + + for (int i = 0; i < profilesCount; i++) + { + GameObject gameObject = Objects[i]; + if (gameObject.ClassString == "PrimalPlayerData" || gameObject.ClassString == "PrimalPlayerDataBP_C") + { + profile = gameObject; + } + + gameObject.LoadProperties(archive, new GameObject(), propertiesBlockOffset); + } + } + catch + { + + } + archive.UseNameTable = useNameTable; + //var tekGrams = Objects.Where(o => o.ClassString.ToLower().Contains("canteen")).ToList(); + + } + + + public int CalculateSize() + { + int size = sizeof(int) * 2; + + NameSizeCalculator nameSizer = ArkArchive.GetNameSizer(false); + + size += Objects.Sum(o => o.Size(nameSizer)); + + propertiesBlockOffset = size; + + size += Objects.Sum(o => o.PropertiesSize(nameSizer)); + return size; + } + + public void WriteBinary(ArkArchive archive, WritingOptions options) + { + throw new NotImplementedException(); + } + + public void ReadJson(JToken node, ReadingOptions options) + { + throw new NotImplementedException(); + } + + public void WriteJson(JsonTextWriter generator, WritingOptions options) + { + throw new NotImplementedException(); + } + } + +} diff --git a/SavegameToolkit/ArkStoreTribe.cs b/SavegameToolkit/ArkStoreTribe.cs new file mode 100644 index 0000000..87cf3cd --- /dev/null +++ b/SavegameToolkit/ArkStoreTribe.cs @@ -0,0 +1,82 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using SavegameToolkit.Propertys; + +namespace SavegameToolkit +{ + + public class ArkStoreTribe : GameObjectContainerMixin, IConversionSupport, IPropertyContainer + { + + public int TribeVersion { get; set; } + + public GameObject Tribe { get; set; } + + public List Properties => Tribe.Properties; + + private long propertiesBlockOffset; + + public void ReadBinary(ArkArchive archive, ReadingOptions options) + { + + propertiesBlockOffset = archive.Position; + var useNameTable = archive.UseNameTable; + archive.UseNameTable = false; + + TribeVersion = archive.ReadInt(); + + if (TribeVersion != 1) + { + throw new NotSupportedException("Unknown Tribe Version " + TribeVersion); + } + + int tribesCount = archive.ReadInt(); + + Objects.Clear(); + ObjectMap.Clear(); + for (int i = 0; i < tribesCount; i++) + { + var newObject = new GameObject(archive); + + addObject(newObject, false); + } + + for (int i = 0; i < tribesCount; i++) + { + GameObject gameObject = Objects[i]; + if (gameObject.ClassString == "PrimalTribeData") + { + Tribe = gameObject; + } + + gameObject.LoadProperties(archive, new GameObject(), propertiesBlockOffset); + } + + archive.UseNameTable = useNameTable; + } + + public void WriteBinary(ArkArchive archive, WritingOptions options) + { + throw new NotImplementedException(); + } + + public int CalculateSize() + { + throw new NotImplementedException(); + } + + public void ReadJson(JToken node, ReadingOptions options) + { + throw new NotImplementedException(); + } + + public void WriteJson(JsonTextWriter generator, WritingOptions options) + { + throw new NotImplementedException(); + } + } + +} diff --git a/SavegameToolkit/GameObject.cs b/SavegameToolkit/GameObject.cs index f45975d..eb677f7 100644 --- a/SavegameToolkit/GameObject.cs +++ b/SavegameToolkit/GameObject.cs @@ -279,7 +279,7 @@ private void readBinary(ArkArchive archive) { archive.HasUnknownData = true; } - public void LoadProperties(ArkArchive archive, GameObject next, int propertiesBlockOffset) { + public void LoadProperties(ArkArchive archive, GameObject next, long propertiesBlockOffset) { long offset = propertiesBlockOffset + propertiesOffset; long nextOffset = propertiesBlockOffset + next?.propertiesOffset ?? archive.Limit; diff --git a/SavegameToolkit/ReadingOptions.cs b/SavegameToolkit/ReadingOptions.cs index 303a48a..1ff9082 100644 --- a/SavegameToolkit/ReadingOptions.cs +++ b/SavegameToolkit/ReadingOptions.cs @@ -65,6 +65,8 @@ public class ReadingOptions : BaseOptions /// true if reading, false if skipping /// public bool CryopodCreatures { get; private set; } = true; + public bool StoredTribes { get; set; } = false; + public bool StoredProfiles { get; set; } = false; public bool BuildComponentTree { get; private set; } @@ -171,6 +173,19 @@ public ReadingOptions WithCryopodCreatures(bool cryopodCreatures) return this; } + public ReadingOptions WithStoredTribes(bool storedTribes) + { + StoredTribes = storedTribes; + return this; + } + + public ReadingOptions WithStoredProfiles(bool storedProfiles) + { + StoredProfiles = storedProfiles; + return this; + } + + public ReadingOptions WithBuildComponentTree(bool buildComponentTree) { BuildComponentTree = buildComponentTree; diff --git a/SavegameToolkit/Types/ChunkedStore.cs b/SavegameToolkit/Types/ChunkedStore.cs new file mode 100644 index 0000000..1f2966a --- /dev/null +++ b/SavegameToolkit/Types/ChunkedStore.cs @@ -0,0 +1,90 @@ +using Newtonsoft.Json.Linq; +using Newtonsoft.Json; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace SavegameToolkit.Types +{ + internal class ChunkedStore : IConversionSupport + { + internal struct ChunkInfo + { + public int ArchiveIndex { get; private set; } + public long ArchiveOffset { get; private set; } + public long Unknown { get; private set; } + public long Size { get; private set; } + + internal static ChunkInfo ReadBinary(ArkArchive archive) + { + ChunkInfo result = new ChunkInfo(); + result.ArchiveIndex = archive.ReadInt(); + result.ArchiveOffset = archive.ReadLong(); + result.Unknown = archive.ReadLong(); + result.Size = archive.ReadLong(); + return result; + } + } + + public long TotalIndexSize { get; private set; } + public long TotalDataSize { get; private set; } + public List IndexChunks { get; private set; } + public List DataChunks { get; private set; } + + public void ReadBinary(ArkArchive archive, ReadingOptions options) + { + bool versioned = archive.ReadInt() == -1; + int version = archive.ReadInt(); + if (!versioned || version != 2) + { + throw new NotSupportedException("Only version 2 ChunkedStores can be read."); + } + + (TotalDataSize, DataChunks) = _readChunkChain(archive, 1); + (TotalIndexSize, IndexChunks) = _readChunkChain(archive, 24); + + // TODO: seems to be a map. + // TODO: each index entry looks to be 64-bit ID + 64-bit offset (relative to data buffer start) + 64-bit + // TODO: sub-buffer size. + // TODO: handle the chunking, read the tribe/player data, and should be good. + } + + private Tuple> _readChunkChain(ArkArchive archive, int elementSize) + { + List results = new List(); + + long totalSize = archive.ReadLong() * elementSize; + long bytesRemaining = totalSize; + while (bytesRemaining > 0) + { + ChunkInfo chunk = ChunkInfo.ReadBinary(archive); + bytesRemaining -= chunk.Size; + results.Add(chunk); + } + + return new Tuple>(totalSize, results); + } + + public void WriteBinary(ArkArchive archive, WritingOptions options) + { + throw new NotImplementedException(); + } + + public int CalculateSize() + { + throw new NotImplementedException(); + } + + public void ReadJson(JToken node, ReadingOptions options) + { + throw new NotImplementedException(); + } + + public void WriteJson(JsonTextWriter generator, WritingOptions options) + { + throw new NotImplementedException(); + } + } +}