Skip to content

Commit

Permalink
more fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
diogotr7 committed Oct 10, 2024
1 parent 5ed06ee commit 5df22ab
Show file tree
Hide file tree
Showing 5 changed files with 93 additions and 178 deletions.
5 changes: 0 additions & 5 deletions src/StarBreaker.Common/CigGuid.cs
Original file line number Diff line number Diff line change
Expand Up @@ -117,9 +117,4 @@ static void WriteHexDigit(Span<char> buffer, byte value, int offset)
buffer[offset + 1] = _map[value & 15];
}
}

public static CigGuid Parse(string input)
{
return new CigGuid(input);
}
}
243 changes: 76 additions & 167 deletions src/StarBreaker.Forge/DataForge.cs
Original file line number Diff line number Diff line change
Expand Up @@ -19,124 +19,107 @@ public DataForge(string dcb)
_offsets = ReadOffsets(bytesRead);
}

public void Extract(string outputFolder, string? fileNameFilter = null, IProgress<double>? progress = null)
private Dictionary<int, int[]> ReadOffsets(int initialOffset)
{
var progressValue = 0;
var structsPerFileName = new Dictionary<string, List<DataForgeRecord>>();
foreach (var record in _database.RecordDefinitions)
{
var fileName = _database.GetString(record.FileNameOffset);
var instances = new Dictionary<int, int[]>();

if (fileNameFilter != null && !FileSystemName.MatchesSimpleExpression(fileNameFilter, fileName, true))
continue;
foreach (var mapping in _database.DataMappings)
{
var arr = new int[mapping.StructCount];
var structDef = _database.StructDefinitions[mapping.StructIndex];
var structSize = structDef.CalculateSize(_database.StructDefinitions, _database.PropertyDefinitions);

if (!structsPerFileName.TryGetValue(fileName, out var list))
for (var i = 0; i < mapping.StructCount; i++)
{
list = [];
structsPerFileName.Add(fileName, list);
arr[i] = initialOffset;
initialOffset += structSize;
}

list.Add(record);
instances.Add(mapping.StructIndex, arr);
}

var total = structsPerFileName.Count;
return instances;
}

Parallel.ForEach(structsPerFileName, data =>
{
var structs = _database.StructDefinitions.AsSpan();
var properties = _database.PropertyDefinitions.AsSpan();
var filePath = Path.Combine(outputFolder, data.Key);
Directory.CreateDirectory(Path.GetDirectoryName(filePath)!);
public Dictionary<string, string[]> ExportEnums()
{
var result = new Dictionary<string, string[]>(_database.EnumDefinitions.Length);

if (data.Value.Count == 1)
foreach (var enumDef in _database.EnumDefinitions)
{
var enumValues = new string[enumDef.ValueCount];
for (var i = 0; i < enumDef.ValueCount; i++)
{
using var writer = new StreamWriter(filePath);
var record = data.Value[0];
var structDef = _database.StructDefinitions[record.StructIndex];
var offset = _offsets[record.StructIndex][record.InstanceIndex];
var reader = _database.GetReader(offset);
enumValues[i] = _database.GetString2(_database.EnumOptions[enumDef.FirstValueIndex + i]);
}

var node = new XmlNode(structDef.GetName(_database));
result.Add(enumDef.GetName(_database), enumValues);
}

FillNode(node, structDef, ref reader);
return result;
}

node.WriteTo(writer, 0);
}
else
{
using var writer = new StreamWriter(filePath);
public Dictionary<string, DataForgeRecord> GetRecordsByFileName(string? fileNameFilter = null)
{
var structsPerFileName = new Dictionary<string, DataForgeRecord>();
foreach (var record in _database.RecordDefinitions)
{
var fileName = record.GetFileName(_database);

writer.Write('<');
writer.Write("__root");
writer.Write('>');
writer.WriteLine();
if (fileNameFilter != null && !FileSystemName.MatchesSimpleExpression(fileNameFilter, fileName, true))
continue;

foreach (var record in data.Value)
{
var structDef = structs[record.StructIndex];
var offset = _offsets[record.StructIndex][record.InstanceIndex];
//this looks a lil wonky, but it's correct.
//we will either find only on record for any given name,
//or when we find multiple, we only care about the last one.
structsPerFileName[fileName] = record;
}

var reader = _database.GetReader(offset);
return structsPerFileName;
}

var node = new XmlNode(structDef.GetName(_database));
public void ExtractSingleRecord(TextWriter writer, DataForgeRecord record)
{
var structDef = _database.StructDefinitions[record.StructIndex];
var offset = _offsets[record.StructIndex][record.InstanceIndex];

FillNode(node, structDef, ref reader);
var reader = _database.GetReader(offset);

node.WriteTo(writer, 1);
}
var node = new XmlNode(structDef.GetName(_database));

writer.WriteLine();
writer.Write("</");
writer.Write("__root");
writer.Write('>');
}
FillNode(node, structDef, ref reader);

lock (structsPerFileName)
{
var currentProgress = Interlocked.Increment(ref progressValue);
//only report progress every 250 records and when we are done
if (currentProgress == total || currentProgress % 250 == 0)
progress?.Report(currentProgress / (double)total);
}
});
node.WriteTo(writer, 0);
}

public void ExtractSingle(string outputFolder, string? fileNameFilter = null, IProgress<double>? progress = null)
public void Extract(string outputFolder, string? fileNameFilter = null, IProgress<double>? progress = null)
{
var progressValue = 0;
var total = _database.RecordDefinitions.Length;
if (!Directory.Exists(outputFolder))
Directory.CreateDirectory(outputFolder);
using var writer = new StreamWriter(Path.Combine(outputFolder, "StarBreaker.Export.xml"), false, Encoding.UTF8, 1024 * 1024);
writer.WriteLine("<__root>");
var recordsByFileName = GetRecordsByFileName(fileNameFilter);
var total = recordsByFileName.Count;

foreach (var record in _database.RecordDefinitions)
Parallel.ForEach(recordsByFileName, kvp =>
{
if (fileNameFilter != null)
{
var fileName = _database.GetString(record.FileNameOffset);
var (fileName, record) = kvp;
if (!FileSystemName.MatchesSimpleExpression(fileNameFilter, fileName, true))
continue;
}
var filePath = Path.Combine(outputFolder, fileName);
var structDef = _database.StructDefinitions[record.StructIndex];
var offset = _offsets[record.StructIndex][record.InstanceIndex];
var reader = _database.GetReader(offset);
var child = new XmlNode(structDef.GetName(_database));

FillNode(child, structDef, ref reader);
Directory.CreateDirectory(Path.GetDirectoryName(filePath)!);
child.WriteTo(writer, 1);
{
using var writer = new StreamWriter(filePath);
++progressValue;
if (progressValue % 250 == 0 || progressValue == total)
progress?.Report(progressValue / (double)total);
}
ExtractSingleRecord(writer, record);
}
writer.WriteLine("</__root>");
lock (recordsByFileName)
{
var currentProgress = Interlocked.Increment(ref progressValue);
//only report progress every 250 records and when we are done
if (currentProgress == total || currentProgress % 250 == 0)
progress?.Report(currentProgress / (double)total);
}
});
}

private void FillNode(XmlNode node, DataForgeStructDefinition structDef, ref SpanReader reader)
Expand Down Expand Up @@ -352,47 +335,6 @@ private void FillNode(XmlNode node, DataForgeStructDefinition structDef, ref Spa
}
}

//verified same as scdatatools
private Dictionary<int, int[]> ReadOffsets(int initialOffset)
{
var instances = new Dictionary<int, int[]>();

foreach (var mapping in _database.DataMappings)
{
var arr = new int[mapping.StructCount];
var structDef = _database.StructDefinitions[mapping.StructIndex];
var structSize = structDef.CalculateSize(_database.StructDefinitions, _database.PropertyDefinitions);

for (var i = 0; i < mapping.StructCount; i++)
{
arr[i] = initialOffset;
initialOffset += structSize;
}

instances.Add(mapping.StructIndex, arr);
}

return instances;
}

public Dictionary<string, string[]> ExportEnums()
{
var result = new Dictionary<string, string[]>(_database.EnumDefinitions.Length);

foreach (var enumDef in _database.EnumDefinitions)
{
var enumValues = new string[enumDef.ValueCount];
for (var i = 0; i < enumDef.ValueCount; i++)
{
enumValues[i] = _database.GetString(_database.EnumOptions[enumDef.FirstValueIndex + i]);
}

result.Add(enumDef.GetName(_database), enumValues);
}

return result;
}

public void WriteTo(TextWriter writer, DataForgeStructDefinition structDef, ref SpanReader reader)
{
writer.Write('<');
Expand Down Expand Up @@ -584,7 +526,7 @@ public void WriteTo(TextWriter writer, DataForgeStructDefinition structDef, ref
}
}

public void ExtractSingle2(string outputFolder, string? fileNameFilter = null, IProgress<double>? progress = null)
public void ExtractSingle(string outputFolder, string? fileNameFilter = null, IProgress<double>? progress = null)
{
var progressValue = 0;
var total = _database.RecordDefinitions.Length;
Expand All @@ -597,16 +539,20 @@ public void ExtractSingle2(string outputFolder, string? fileNameFilter = null, I
{
if (fileNameFilter != null)
{
var s = _database.GetString(record.FileNameOffset);
if (!FileSystemName.MatchesSimpleExpression(fileNameFilter, s, true))
var fileName = record.GetFileName(_database);

if (!FileSystemName.MatchesSimpleExpression(fileNameFilter, fileName, true))
continue;
}

var structDef = _database.StructDefinitions[record.StructIndex];
var offset = _offsets[record.StructIndex][record.InstanceIndex];
var reader = _database.GetReader(offset);
var child = new XmlNode(structDef.GetName(_database));

WriteTo(writer, structDef, ref reader);
FillNode(child, structDef, ref reader);

child.WriteTo(writer, 1);

++progressValue;
if (progressValue % 250 == 0 || progressValue == total)
Expand All @@ -615,41 +561,4 @@ public void ExtractSingle2(string outputFolder, string? fileNameFilter = null, I

writer.WriteLine("</__root>");
}

public void X(string recordFileName, TextWriter writer)
{
var targetRecords = _database.RecordDefinitions.Where(a => _database.GetString(a.FileNameOffset) == recordFileName).ToArray();

//assume there are multiple records with the same name.
//in this case, the "root" record is always the last one
var mainRecord = targetRecords[^1];
var structDefMain = _database.StructDefinitions[mainRecord.StructIndex];
var offsetMain = _offsets[mainRecord.StructIndex][mainRecord.InstanceIndex];
var readerMain = _database.GetReader(offsetMain);

var mainNode = new XmlNode(structDefMain.GetName(_database));

FillNode(mainNode, structDefMain, ref readerMain);

mainNode.WriteTo(writer, 0);

return;


writer.WriteLine("<Record>");

foreach (var record in targetRecords)
{
var structDef = _database.StructDefinitions[record.StructIndex];
var offset = _offsets[record.StructIndex][record.InstanceIndex];
var reader = _database.GetReader(offset);
var child = new XmlNode(structDef.GetName(_database));

FillNode(child, structDef, ref reader);

child.WriteTo(writer, 1);
}

writer.WriteLine("</Record>");
}
}
3 changes: 2 additions & 1 deletion src/StarBreaker.Forge/DataForgeRecord.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,14 @@ namespace StarBreaker.Forge;
public readonly record struct DataForgeRecord
{
private readonly DataForgeStringId2 NameOffset;
public readonly DataForgeStringId FileNameOffset;
private readonly DataForgeStringId FileNameOffset;
public readonly int StructIndex;
public readonly CigGuid Hash;
public readonly ushort InstanceIndex;
public readonly ushort OtherIndex;

public string GetName(Database db) => db.GetString2(NameOffset);
public string GetFileName(Database db) => db.GetString(FileNameOffset);

#if DEBUG
public DataForgeStructDefinition Struct => DebugGlobal.Database.StructDefinitions[(int)StructIndex];
Expand Down
4 changes: 2 additions & 2 deletions src/StarBreaker.Forge/Database.cs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public class Database
public readonly DataForgePointer[] WeakValues;
public readonly DataForgeReference[] ReferenceValues;

public readonly DataForgeStringId[] EnumOptions;
public readonly DataForgeStringId2[] EnumOptions;

private readonly FrozenDictionary<int, string> _cachedStrings;
private readonly FrozenDictionary<int, string> _cachedStrings2;
Expand Down Expand Up @@ -110,7 +110,7 @@ public Database(string filePath, out int bytesRead)
StrongValues = reader.ReadArray<DataForgePointer>(strongValueCount);
WeakValues = reader.ReadArray<DataForgePointer>(weakValueCount);
ReferenceValues = reader.ReadArray<DataForgeReference>(referenceValueCount);
EnumOptions = reader.ReadArray<DataForgeStringId>(enumOptionCount);
EnumOptions = reader.ReadArray<DataForgeStringId2>(enumOptionCount);

_cachedStrings = ReadStringTable(reader.ReadBytes((int)textLength).AsSpan());
_cachedStrings2 = ReadStringTable(reader.ReadBytes((int)textLength2).AsSpan());
Expand Down
16 changes: 13 additions & 3 deletions src/StarBreaker.Tests/DataForgeTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,21 @@ public void Setup()
public void TestTagDatabase()
{
var forge = new DataForge(@"D:\out\Data\Game.dcb");
var tagdatabase = forge.GetRecordsByFileName("*TagDatabase*");

var stringwriter = new StringWriter();
forge.X(@"libs/foundry/records/tagdatabase/tagdatabase.tagdatabase.xml", stringwriter);
var writer = new StringWriter();
forge.ExtractSingleRecord(writer, tagdatabase.Values.Single());

var s = stringwriter.ToString();
var s = writer.ToString();
Assert.That(s, Is.EqualTo(_target));
}

[Test]
public void Enums()
{
var forge = new DataForge(@"D:\out\Data\Game.dcb");
var enums = forge.ExportEnums();

var writer = new StringWriter();
}
}

0 comments on commit 5df22ab

Please sign in to comment.