From 5df22ab72c2729bfbfdf5a715617103397363df5 Mon Sep 17 00:00:00 2001 From: Diogo Trindade Date: Thu, 10 Oct 2024 22:07:00 +0100 Subject: [PATCH] more fixes --- src/StarBreaker.Common/CigGuid.cs | 5 - src/StarBreaker.Forge/DataForge.cs | 243 +++++++---------------- src/StarBreaker.Forge/DataForgeRecord.cs | 3 +- src/StarBreaker.Forge/Database.cs | 4 +- src/StarBreaker.Tests/DataForgeTests.cs | 16 +- 5 files changed, 93 insertions(+), 178 deletions(-) diff --git a/src/StarBreaker.Common/CigGuid.cs b/src/StarBreaker.Common/CigGuid.cs index 2435ab3..7e7e880 100644 --- a/src/StarBreaker.Common/CigGuid.cs +++ b/src/StarBreaker.Common/CigGuid.cs @@ -117,9 +117,4 @@ static void WriteHexDigit(Span buffer, byte value, int offset) buffer[offset + 1] = _map[value & 15]; } } - - public static CigGuid Parse(string input) - { - return new CigGuid(input); - } } \ No newline at end of file diff --git a/src/StarBreaker.Forge/DataForge.cs b/src/StarBreaker.Forge/DataForge.cs index e4bfb07..5c160c3 100644 --- a/src/StarBreaker.Forge/DataForge.cs +++ b/src/StarBreaker.Forge/DataForge.cs @@ -19,124 +19,107 @@ public DataForge(string dcb) _offsets = ReadOffsets(bytesRead); } - public void Extract(string outputFolder, string? fileNameFilter = null, IProgress? progress = null) + private Dictionary ReadOffsets(int initialOffset) { - var progressValue = 0; - var structsPerFileName = new Dictionary>(); - foreach (var record in _database.RecordDefinitions) - { - var fileName = _database.GetString(record.FileNameOffset); + var instances = new Dictionary(); - if (fileNameFilter != null && !FileSystemName.MatchesSimpleExpression(fileNameFilter, fileName, true)) - continue; + foreach (var mapping in _database.DataMappings) + { + var arr = new int[mapping.StructCount]; + var structDef = _database.StructDefinitions[mapping.StructIndex]; + var structSize = structDef.CalculateSize(_database.StructDefinitions, _database.PropertyDefinitions); - if (!structsPerFileName.TryGetValue(fileName, out var list)) + for (var i = 0; i < mapping.StructCount; i++) { - list = []; - structsPerFileName.Add(fileName, list); + arr[i] = initialOffset; + initialOffset += structSize; } - list.Add(record); + instances.Add(mapping.StructIndex, arr); } - var total = structsPerFileName.Count; + return instances; + } - Parallel.ForEach(structsPerFileName, data => - { - var structs = _database.StructDefinitions.AsSpan(); - var properties = _database.PropertyDefinitions.AsSpan(); - var filePath = Path.Combine(outputFolder, data.Key); - Directory.CreateDirectory(Path.GetDirectoryName(filePath)!); + public Dictionary ExportEnums() + { + var result = new Dictionary(_database.EnumDefinitions.Length); - if (data.Value.Count == 1) + foreach (var enumDef in _database.EnumDefinitions) + { + var enumValues = new string[enumDef.ValueCount]; + for (var i = 0; i < enumDef.ValueCount; i++) { - using var writer = new StreamWriter(filePath); - - var record = data.Value[0]; - var structDef = _database.StructDefinitions[record.StructIndex]; - var offset = _offsets[record.StructIndex][record.InstanceIndex]; - - var reader = _database.GetReader(offset); + enumValues[i] = _database.GetString2(_database.EnumOptions[enumDef.FirstValueIndex + i]); + } - var node = new XmlNode(structDef.GetName(_database)); + result.Add(enumDef.GetName(_database), enumValues); + } - FillNode(node, structDef, ref reader); + return result; + } - node.WriteTo(writer, 0); - } - else - { - using var writer = new StreamWriter(filePath); + public Dictionary GetRecordsByFileName(string? fileNameFilter = null) + { + var structsPerFileName = new Dictionary(); + foreach (var record in _database.RecordDefinitions) + { + var fileName = record.GetFileName(_database); - writer.Write('<'); - writer.Write("__root"); - writer.Write('>'); - writer.WriteLine(); + if (fileNameFilter != null && !FileSystemName.MatchesSimpleExpression(fileNameFilter, fileName, true)) + continue; - foreach (var record in data.Value) - { - var structDef = structs[record.StructIndex]; - var offset = _offsets[record.StructIndex][record.InstanceIndex]; + //this looks a lil wonky, but it's correct. + //we will either find only on record for any given name, + //or when we find multiple, we only care about the last one. + structsPerFileName[fileName] = record; + } - var reader = _database.GetReader(offset); + return structsPerFileName; + } - var node = new XmlNode(structDef.GetName(_database)); + public void ExtractSingleRecord(TextWriter writer, DataForgeRecord record) + { + var structDef = _database.StructDefinitions[record.StructIndex]; + var offset = _offsets[record.StructIndex][record.InstanceIndex]; - FillNode(node, structDef, ref reader); + var reader = _database.GetReader(offset); - node.WriteTo(writer, 1); - } + var node = new XmlNode(structDef.GetName(_database)); - writer.WriteLine(); - writer.Write("'); - } + FillNode(node, structDef, ref reader); - lock (structsPerFileName) - { - var currentProgress = Interlocked.Increment(ref progressValue); - //only report progress every 250 records and when we are done - if (currentProgress == total || currentProgress % 250 == 0) - progress?.Report(currentProgress / (double)total); - } - }); + node.WriteTo(writer, 0); } - public void ExtractSingle(string outputFolder, string? fileNameFilter = null, IProgress? progress = null) + public void Extract(string outputFolder, string? fileNameFilter = null, IProgress? progress = null) { var progressValue = 0; - var total = _database.RecordDefinitions.Length; - if (!Directory.Exists(outputFolder)) - Directory.CreateDirectory(outputFolder); - using var writer = new StreamWriter(Path.Combine(outputFolder, "StarBreaker.Export.xml"), false, Encoding.UTF8, 1024 * 1024); - writer.WriteLine("<__root>"); + var recordsByFileName = GetRecordsByFileName(fileNameFilter); + var total = recordsByFileName.Count; - foreach (var record in _database.RecordDefinitions) + Parallel.ForEach(recordsByFileName, kvp => { - if (fileNameFilter != null) - { - var fileName = _database.GetString(record.FileNameOffset); + var (fileName, record) = kvp; - if (!FileSystemName.MatchesSimpleExpression(fileNameFilter, fileName, true)) - continue; - } + var filePath = Path.Combine(outputFolder, fileName); - var structDef = _database.StructDefinitions[record.StructIndex]; - var offset = _offsets[record.StructIndex][record.InstanceIndex]; - var reader = _database.GetReader(offset); - var child = new XmlNode(structDef.GetName(_database)); - - FillNode(child, structDef, ref reader); + Directory.CreateDirectory(Path.GetDirectoryName(filePath)!); - child.WriteTo(writer, 1); + { + using var writer = new StreamWriter(filePath); - ++progressValue; - if (progressValue % 250 == 0 || progressValue == total) - progress?.Report(progressValue / (double)total); - } + ExtractSingleRecord(writer, record); + } - writer.WriteLine(""); + lock (recordsByFileName) + { + var currentProgress = Interlocked.Increment(ref progressValue); + //only report progress every 250 records and when we are done + if (currentProgress == total || currentProgress % 250 == 0) + progress?.Report(currentProgress / (double)total); + } + }); } private void FillNode(XmlNode node, DataForgeStructDefinition structDef, ref SpanReader reader) @@ -352,47 +335,6 @@ private void FillNode(XmlNode node, DataForgeStructDefinition structDef, ref Spa } } - //verified same as scdatatools - private Dictionary ReadOffsets(int initialOffset) - { - var instances = new Dictionary(); - - foreach (var mapping in _database.DataMappings) - { - var arr = new int[mapping.StructCount]; - var structDef = _database.StructDefinitions[mapping.StructIndex]; - var structSize = structDef.CalculateSize(_database.StructDefinitions, _database.PropertyDefinitions); - - for (var i = 0; i < mapping.StructCount; i++) - { - arr[i] = initialOffset; - initialOffset += structSize; - } - - instances.Add(mapping.StructIndex, arr); - } - - return instances; - } - - public Dictionary ExportEnums() - { - var result = new Dictionary(_database.EnumDefinitions.Length); - - foreach (var enumDef in _database.EnumDefinitions) - { - var enumValues = new string[enumDef.ValueCount]; - for (var i = 0; i < enumDef.ValueCount; i++) - { - enumValues[i] = _database.GetString(_database.EnumOptions[enumDef.FirstValueIndex + i]); - } - - result.Add(enumDef.GetName(_database), enumValues); - } - - return result; - } - public void WriteTo(TextWriter writer, DataForgeStructDefinition structDef, ref SpanReader reader) { writer.Write('<'); @@ -584,7 +526,7 @@ public void WriteTo(TextWriter writer, DataForgeStructDefinition structDef, ref } } - public void ExtractSingle2(string outputFolder, string? fileNameFilter = null, IProgress? progress = null) + public void ExtractSingle(string outputFolder, string? fileNameFilter = null, IProgress? progress = null) { var progressValue = 0; var total = _database.RecordDefinitions.Length; @@ -597,16 +539,20 @@ public void ExtractSingle2(string outputFolder, string? fileNameFilter = null, I { if (fileNameFilter != null) { - var s = _database.GetString(record.FileNameOffset); - if (!FileSystemName.MatchesSimpleExpression(fileNameFilter, s, true)) + var fileName = record.GetFileName(_database); + + if (!FileSystemName.MatchesSimpleExpression(fileNameFilter, fileName, true)) continue; } var structDef = _database.StructDefinitions[record.StructIndex]; var offset = _offsets[record.StructIndex][record.InstanceIndex]; var reader = _database.GetReader(offset); + var child = new XmlNode(structDef.GetName(_database)); - WriteTo(writer, structDef, ref reader); + FillNode(child, structDef, ref reader); + + child.WriteTo(writer, 1); ++progressValue; if (progressValue % 250 == 0 || progressValue == total) @@ -615,41 +561,4 @@ public void ExtractSingle2(string outputFolder, string? fileNameFilter = null, I writer.WriteLine(""); } - - public void X(string recordFileName, TextWriter writer) - { - var targetRecords = _database.RecordDefinitions.Where(a => _database.GetString(a.FileNameOffset) == recordFileName).ToArray(); - - //assume there are multiple records with the same name. - //in this case, the "root" record is always the last one - var mainRecord = targetRecords[^1]; - var structDefMain = _database.StructDefinitions[mainRecord.StructIndex]; - var offsetMain = _offsets[mainRecord.StructIndex][mainRecord.InstanceIndex]; - var readerMain = _database.GetReader(offsetMain); - - var mainNode = new XmlNode(structDefMain.GetName(_database)); - - FillNode(mainNode, structDefMain, ref readerMain); - - mainNode.WriteTo(writer, 0); - - return; - - - writer.WriteLine(""); - - foreach (var record in targetRecords) - { - var structDef = _database.StructDefinitions[record.StructIndex]; - var offset = _offsets[record.StructIndex][record.InstanceIndex]; - var reader = _database.GetReader(offset); - var child = new XmlNode(structDef.GetName(_database)); - - FillNode(child, structDef, ref reader); - - child.WriteTo(writer, 1); - } - - writer.WriteLine(""); - } } \ No newline at end of file diff --git a/src/StarBreaker.Forge/DataForgeRecord.cs b/src/StarBreaker.Forge/DataForgeRecord.cs index 1c738d2..7dd4cb4 100644 --- a/src/StarBreaker.Forge/DataForgeRecord.cs +++ b/src/StarBreaker.Forge/DataForgeRecord.cs @@ -7,13 +7,14 @@ namespace StarBreaker.Forge; public readonly record struct DataForgeRecord { private readonly DataForgeStringId2 NameOffset; - public readonly DataForgeStringId FileNameOffset; + private readonly DataForgeStringId FileNameOffset; public readonly int StructIndex; public readonly CigGuid Hash; public readonly ushort InstanceIndex; public readonly ushort OtherIndex; public string GetName(Database db) => db.GetString2(NameOffset); + public string GetFileName(Database db) => db.GetString(FileNameOffset); #if DEBUG public DataForgeStructDefinition Struct => DebugGlobal.Database.StructDefinitions[(int)StructIndex]; diff --git a/src/StarBreaker.Forge/Database.cs b/src/StarBreaker.Forge/Database.cs index 54f9894..8bfa9d7 100644 --- a/src/StarBreaker.Forge/Database.cs +++ b/src/StarBreaker.Forge/Database.cs @@ -38,7 +38,7 @@ public class Database public readonly DataForgePointer[] WeakValues; public readonly DataForgeReference[] ReferenceValues; - public readonly DataForgeStringId[] EnumOptions; + public readonly DataForgeStringId2[] EnumOptions; private readonly FrozenDictionary _cachedStrings; private readonly FrozenDictionary _cachedStrings2; @@ -110,7 +110,7 @@ public Database(string filePath, out int bytesRead) StrongValues = reader.ReadArray(strongValueCount); WeakValues = reader.ReadArray(weakValueCount); ReferenceValues = reader.ReadArray(referenceValueCount); - EnumOptions = reader.ReadArray(enumOptionCount); + EnumOptions = reader.ReadArray(enumOptionCount); _cachedStrings = ReadStringTable(reader.ReadBytes((int)textLength).AsSpan()); _cachedStrings2 = ReadStringTable(reader.ReadBytes((int)textLength2).AsSpan()); diff --git a/src/StarBreaker.Tests/DataForgeTests.cs b/src/StarBreaker.Tests/DataForgeTests.cs index f49fbea..ac7ee24 100644 --- a/src/StarBreaker.Tests/DataForgeTests.cs +++ b/src/StarBreaker.Tests/DataForgeTests.cs @@ -16,11 +16,21 @@ public void Setup() public void TestTagDatabase() { var forge = new DataForge(@"D:\out\Data\Game.dcb"); + var tagdatabase = forge.GetRecordsByFileName("*TagDatabase*"); - var stringwriter = new StringWriter(); - forge.X(@"libs/foundry/records/tagdatabase/tagdatabase.tagdatabase.xml", stringwriter); + var writer = new StringWriter(); + forge.ExtractSingleRecord(writer, tagdatabase.Values.Single()); - var s = stringwriter.ToString(); + var s = writer.ToString(); Assert.That(s, Is.EqualTo(_target)); } + + [Test] + public void Enums() + { + var forge = new DataForge(@"D:\out\Data\Game.dcb"); + var enums = forge.ExportEnums(); + + var writer = new StringWriter(); + } } \ No newline at end of file