From d7340182cf45914e8c5b4eb22114fdc81f4e775f Mon Sep 17 00:00:00 2001 From: Lars Date: Thu, 23 Nov 2023 18:05:45 +0100 Subject: [PATCH] Fixed #173 now really. Might break some Arch.Extended libs? E.g. the System.SourceGenerator one? --- .../ArchetypeIterationBenchmark.cs | 10 +- .../ArchetypeIterationTechnqiquesBenchmark.cs | 14 +- src/Arch.Benchmarks/QueryBenchmark.cs | 3 +- .../Queries/AddWithQueryDescription.cs | 2 +- .../Queries/InlineParallelQuery.cs | 4 +- src/Arch.SourceGen/Queries/ParallelQuery.cs | 4 +- .../Queries/RemoveWithQueryDescription.cs | 2 +- src/Arch.Tests/ArchetypeTest.cs | 24 ++-- src/Arch.Tests/EnumeratorTest.cs | 4 +- src/Arch.Tests/WorldTest.cs | 16 +-- src/Arch/Core/Archetype.cs | 136 ++++++++++-------- src/Arch/Core/Enumerators.cs | 8 +- .../Dangerous/DangerousArchetypeExtensions.cs | 10 +- .../Dangerous/DangerousWorldExtensions.cs | 4 +- src/Arch/Core/Jobs/World.Jobs.cs | 2 +- src/Arch/Core/World.cs | 12 +- 16 files changed, 136 insertions(+), 119 deletions(-) diff --git a/src/Arch.Benchmarks/ArchetypeIterationBenchmark.cs b/src/Arch.Benchmarks/ArchetypeIterationBenchmark.cs index e89204e1..8044085e 100644 --- a/src/Arch.Benchmarks/ArchetypeIterationBenchmark.cs +++ b/src/Arch.Benchmarks/ArchetypeIterationBenchmark.cs @@ -39,7 +39,7 @@ public void Setup() [Benchmark] public void IterationNormalTwoComponents() { - var size = _globalArchetype.Size; + var size = _globalArchetype.ChunkCount; var chunks = _globalArchetype.Chunks; for (var chunkIndex = 0; chunkIndex < size; chunkIndex++) @@ -63,7 +63,7 @@ public void IterationNormalTwoComponents() [Benchmark] public void IterationUnsafeAddTwoComponents() { - var size = _globalArchetype.Size; + var size = _globalArchetype.ChunkCount; ref var chunk = ref _globalArchetype.Chunks[0]; for (var chunkIndex = 0; chunkIndex < size; chunkIndex++) @@ -92,7 +92,7 @@ public void IterationUnsafeAddTwoComponents() public void IterationParallelUnsafeAdd() { // Partition the entire source array. - var rangePartitioner = Partitioner.Create(0, _globalArchetype.Size); + var rangePartitioner = Partitioner.Create(0, _globalArchetype.ChunkCount); Parallel.ForEach(rangePartitioner, range => { var start = range.Item1; @@ -126,7 +126,7 @@ public void IterationParallelUnsafeAdd() [Benchmark] public void IterationNormalEntityTwoComponents() { - var size = _globalArchetype.Size; + var size = _globalArchetype.ChunkCount; var chunks = _globalArchetype.Chunks; for (var chunkIndex = 0; chunkIndex < size; chunkIndex++) @@ -154,7 +154,7 @@ public void IterationNormalEntityTwoComponents() [Benchmark] public void IterationUnsafeAddEntityTwoComponents() { - var size = _globalArchetype.Size; + var size = _globalArchetype.ChunkCount; ref var chunk = ref _globalArchetype.Chunks[0]; for (var chunkIndex = 0; chunkIndex < size; chunkIndex++) diff --git a/src/Arch.Benchmarks/ArchetypeIterationTechnqiquesBenchmark.cs b/src/Arch.Benchmarks/ArchetypeIterationTechnqiquesBenchmark.cs index 4cd75b9e..a14a0579 100644 --- a/src/Arch.Benchmarks/ArchetypeIterationTechnqiquesBenchmark.cs +++ b/src/Arch.Benchmarks/ArchetypeIterationTechnqiquesBenchmark.cs @@ -45,7 +45,7 @@ public void IterationNormalTwoComponents() { var chunks = _globalArchetype.Chunks; - for (var chunkIndex = 0; chunkIndex < _globalArchetype.Size; chunkIndex++) + for (var chunkIndex = 0; chunkIndex < _globalArchetype.ChunkCount; chunkIndex++) { ref var chunk = ref chunks[chunkIndex]; var transforms = chunk.GetArray(); @@ -67,7 +67,7 @@ public void IterationUnsafeAddTwoComponents() { ref var chunk = ref _globalArchetype.Chunks[0]; - for (var chunkIndex = 0; chunkIndex < _globalArchetype.Size; chunkIndex++) + for (var chunkIndex = 0; chunkIndex < _globalArchetype.ChunkCount; chunkIndex++) { ref var currentChunk = ref Unsafe.Add(ref chunk, chunkIndex); var transforms = currentChunk.GetArray(); @@ -92,7 +92,7 @@ public void IterationNormalTwoComponentsSpan() { var chunks = _globalArchetype.Chunks; - for (var chunkIndex = 0; chunkIndex < _globalArchetype.Size; chunkIndex++) + for (var chunkIndex = 0; chunkIndex < _globalArchetype.ChunkCount; chunkIndex++) { ref var chunk = ref chunks[chunkIndex]; var transforms = chunk.GetSpan(); @@ -114,7 +114,7 @@ public void IterationUnsafeAddTwoComponentsSpan() { ref var chunk = ref _globalArchetype.Chunks[0]; - for (var chunkIndex = 0; chunkIndex < _globalArchetype.Size; chunkIndex++) + for (var chunkIndex = 0; chunkIndex < _globalArchetype.ChunkCount; chunkIndex++) { ref var currentChunk = ref Unsafe.Add(ref chunk, chunkIndex); var transforms = currentChunk.GetSpan(); @@ -139,7 +139,7 @@ public void IterationUnsafeAddTwoComponentsSpan() public void IterationBackwardsUnsafeAdd() { ref var chunk = ref _globalArchetype.Chunks[0]; - for (var chunkIndex = 0; chunkIndex < _globalArchetype.Size; chunkIndex++) + for (var chunkIndex = 0; chunkIndex < _globalArchetype.ChunkCount; chunkIndex++) { ref var currentChunk = ref Unsafe.Add(ref chunk, chunkIndex); var chunkSize = currentChunk.Size; @@ -157,7 +157,7 @@ public void IterationBackwardsUnsafeAdd() public void IterationBackwardsUnsafeSubstract() { ref var chunk = ref _globalArchetype.Chunks[0]; - for (var chunkIndex = 0; chunkIndex < _globalArchetype.Size; chunkIndex++) + for (var chunkIndex = 0; chunkIndex < _globalArchetype.ChunkCount; chunkIndex++) { ref var currentChunk = ref Unsafe.Add(ref chunk, chunkIndex); var chunkSize = currentChunk.Size; @@ -175,7 +175,7 @@ public void IterationBackwardsUnsafeSubstract() public void IterationBackwardsLoop() { ref var chunk = ref _globalArchetype.Chunks[0]; - for (var chunkIndex = 0; chunkIndex < _globalArchetype.Size; chunkIndex++) + for (var chunkIndex = 0; chunkIndex < _globalArchetype.ChunkCount; chunkIndex++) { ref var currentChunk = ref Unsafe.Add(ref chunk, chunkIndex); var chunkSize = currentChunk.Size; diff --git a/src/Arch.Benchmarks/QueryBenchmark.cs b/src/Arch.Benchmarks/QueryBenchmark.cs index 206e10cd..62273ca5 100644 --- a/src/Arch.Benchmarks/QueryBenchmark.cs +++ b/src/Arch.Benchmarks/QueryBenchmark.cs @@ -46,12 +46,13 @@ public void WorldEntityQuery() [Benchmark] public void EntityExtensionQuery() { + /* _world.Query(in _queryDescription, (Entity entity) => { var refs = entity.Get(); refs.t0.X += refs.t1.X; refs.t0.Y += refs.t1.Y; - }); + });*/ } #endif diff --git a/src/Arch.SourceGen/Queries/AddWithQueryDescription.cs b/src/Arch.SourceGen/Queries/AddWithQueryDescription.cs index 22f1aa18..996e3a44 100644 --- a/src/Arch.SourceGen/Queries/AddWithQueryDescription.cs +++ b/src/Arch.SourceGen/Queries/AddWithQueryDescription.cs @@ -56,7 +56,7 @@ public static void AppendAddWithQueryDescription(this StringBuilder sb, int amou foreach (var archetype in query.GetArchetypeIterator()) { // Archetype with T shouldnt be skipped to prevent undefined behaviour. - if(archetype.Entities == 0 || archetype.Has<{{generics}}>()) + if(archetype.EntityCount == 0 || archetype.Has<{{generics}}>()) { continue; } diff --git a/src/Arch.SourceGen/Queries/InlineParallelQuery.cs b/src/Arch.SourceGen/Queries/InlineParallelQuery.cs index 6c10f567..25b2695e 100644 --- a/src/Arch.SourceGen/Queries/InlineParallelQuery.cs +++ b/src/Arch.SourceGen/Queries/InlineParallelQuery.cs @@ -28,7 +28,7 @@ public static void AppendHpParallelQuery(this StringBuilder builder, int amount) var query = Query(in description); foreach (var archetype in query.GetArchetypeIterator()) { - var archetypeSize = archetype.Size; + var archetypeSize = archetype.ChunkCount; var part = new RangePartitioner(Environment.ProcessorCount, archetypeSize); foreach (var range in part) { @@ -87,7 +87,7 @@ public static void AppendHpeParallelQuery(this StringBuilder builder, int amount var query = Query(in description); foreach (var archetype in query.GetArchetypeIterator()) { - var archetypeSize = archetype.Size; + var archetypeSize = archetype.ChunkCount; var part = new RangePartitioner(Environment.ProcessorCount, archetypeSize); foreach (var range in part) { diff --git a/src/Arch.SourceGen/Queries/ParallelQuery.cs b/src/Arch.SourceGen/Queries/ParallelQuery.cs index df73725a..06abb097 100644 --- a/src/Arch.SourceGen/Queries/ParallelQuery.cs +++ b/src/Arch.SourceGen/Queries/ParallelQuery.cs @@ -28,7 +28,7 @@ public static StringBuilder AppendParallelQuery(this StringBuilder sb, int amoun var query = Query(in description); foreach (var archetype in query.GetArchetypeIterator()) { - var archetypeSize = archetype.Size; + var archetypeSize = archetype.ChunkCount; var part = new RangePartitioner(Environment.ProcessorCount, archetypeSize); foreach (var range in part) { @@ -88,7 +88,7 @@ public static StringBuilder AppendParallelEntityQuery(this StringBuilder sb, int var query = Query(in description); foreach (var archetype in query.GetArchetypeIterator()) { - var archetypeSize = archetype.Size; + var archetypeSize = archetype.ChunkCount; var part = new RangePartitioner(Environment.ProcessorCount, archetypeSize); foreach (var range in part) { diff --git a/src/Arch.SourceGen/Queries/RemoveWithQueryDescription.cs b/src/Arch.SourceGen/Queries/RemoveWithQueryDescription.cs index ea0712ec..781d78ca 100644 --- a/src/Arch.SourceGen/Queries/RemoveWithQueryDescription.cs +++ b/src/Arch.SourceGen/Queries/RemoveWithQueryDescription.cs @@ -53,7 +53,7 @@ public static void AppendRemoveWithQueryDescription(this StringBuilder sb, int a foreach (var archetype in query.GetArchetypeIterator()) { // Archetype without T shouldnt be skipped to prevent undefined behaviour. - if(archetype.Entities <= 0 || !archetype.Has<{{generics}}>()) + if(archetype.EntityCount <= 0 || !archetype.Has<{{generics}}>()) { continue; } diff --git a/src/Arch.Tests/ArchetypeTest.cs b/src/Arch.Tests/ArchetypeTest.cs index ce880158..20f94ebf 100644 --- a/src/Arch.Tests/ArchetypeTest.cs +++ b/src/Arch.Tests/ArchetypeTest.cs @@ -36,17 +36,17 @@ public void CreateChunk() archetype.Add(entity, out _); } - That(archetype.Size, Is.EqualTo(1)); // Since we filled it with n entities, it must have one single chunk. + That(archetype.ChunkCount, Is.EqualTo(1)); // Since we filled it with n entities, it must have one single chunk. } /// - /// Checks if the increases when s and their components become too large. + /// Checks if the increases when s and their components become too large. /// [Test] public void ScaleChunkCapacity() { var archetype = new Archetype(_heavyGroup); - That(archetype.ChunkSize, Is.EqualTo(Archetype.BaseSize * 2)); // heavyGroup should be large enough to force the chunk to pick a 32KB chunk instead of 16KB + That(archetype.ChunkSizeInBytes, Is.EqualTo(Archetype.BaseSize * 2)); // heavyGroup should be large enough to force the chunk to pick a 32KB chunk instead of 16KB } /// @@ -64,7 +64,7 @@ public void CreateMultipleChunk() archetype.Add(entity, out _); } - That(archetype.Size, Is.EqualTo(2)); + That(archetype.ChunkCount, Is.EqualTo(2)); } /// @@ -83,8 +83,8 @@ public void Reserve() archetype.Add(entity, out _); } - That(archetype.Size, Is.EqualTo(10)); - That(archetype.Capacity, Is.EqualTo(10)); + That(archetype.ChunkCount, Is.EqualTo(10)); + That(archetype.ChunkCapacity, Is.EqualTo(10)); } /// @@ -105,8 +105,8 @@ public void RemoveFromChunkWithReplacement() var slot = new Slot(0, 0); archetype.Remove(ref slot, out _); - That(archetype.Size, Is.EqualTo(2)); - That(archetype.Capacity, Is.EqualTo(2)); + That(archetype.ChunkCount, Is.EqualTo(2)); + That(archetype.ChunkCapacity, Is.EqualTo(2)); That(archetype.Chunks[0].Size, Is.EqualTo(entities - 50)); That(archetype.Chunks[1].Size, Is.EqualTo(49)); That(archetype.Chunks[0].Entities[0].Id, Is.EqualTo(archetype.CalculateEntitiesPerChunk(_group) + 50 - 1)); // Last entity from second chunk now replaced the removed entity and is in the first chunk @@ -130,8 +130,8 @@ public void RemoveChunk() var slot = new Slot(0, 0); archetype.Remove(ref slot, out _); - That(archetype.Size, Is.EqualTo(1)); - That(archetype.Capacity, Is.EqualTo(2)); + That(archetype.ChunkCount, Is.EqualTo(1)); + That(archetype.ChunkCapacity, Is.EqualTo(2)); That(archetype.Chunks[0].Size, Is.EqualTo(entities - 1)); That(archetype.Chunks[0].Entities[0].Id, Is.EqualTo(archetype.CalculateEntitiesPerChunk(_group))); // Last entity from second chunk now replaced the removed entity and is in the first chunk } @@ -256,7 +256,7 @@ public void CopyTo([Values(1111,2222,3333)] int sourceAmount, [Values(1111,2222, } // Make sure that EVERY single entity was copied correctly - That(destination.Entities, Is.EqualTo(sourceAmount+destinationAmount)); + That(destination.EntityCount, Is.EqualTo(sourceAmount+destinationAmount)); That(countedSourceItems, Is.EqualTo(sourceAmount)); That(countedDestinationItems, Is.EqualTo(destinationAmount)); } @@ -301,7 +301,7 @@ public void CopyToShift([Values(1111,2222,3333)] int sourceAmount, [Values(1111, Archetype.Copy(source, destination); source.Clear(); - That(destination.Entities, Is.EqualTo(sourceAmount+destinationAmount)); + That(destination.EntityCount, Is.EqualTo(sourceAmount+destinationAmount)); That(source.Entity(ref sourceSlot), Is.EqualTo(destination.Entity(ref resultSlot))); // Make sure entities were copied correctly. } } diff --git a/src/Arch.Tests/EnumeratorTest.cs b/src/Arch.Tests/EnumeratorTest.cs index 22d89a21..6f15248d 100644 --- a/src/Arch.Tests/EnumeratorTest.cs +++ b/src/Arch.Tests/EnumeratorTest.cs @@ -122,8 +122,8 @@ public void QueryChunkEnumeration() counter++; } - var archetype1ChunkCount = _world.Archetypes[0].Size; - var archetype2ChunkCount = _world.Archetypes[1].Size; + var archetype1ChunkCount = _world.Archetypes[0].ChunkCount; + var archetype2ChunkCount = _world.Archetypes[1].ChunkCount; That(counter, Is.EqualTo(archetype1ChunkCount + archetype2ChunkCount)); } } diff --git a/src/Arch.Tests/WorldTest.cs b/src/Arch.Tests/WorldTest.cs index 7c93e0ee..b6274623 100644 --- a/src/Arch.Tests/WorldTest.cs +++ b/src/Arch.Tests/WorldTest.cs @@ -101,8 +101,8 @@ public void DestroyAll() } That(_world.Size, Is.EqualTo(0)); - That(_world.Archetypes[0].Size, Is.EqualTo(1)); - That(_world.Archetypes[1].Size, Is.EqualTo(1)); + That(_world.Archetypes[0].ChunkCount, Is.EqualTo(1)); + That(_world.Archetypes[1].ChunkCount, Is.EqualTo(1)); } /// @@ -276,8 +276,8 @@ public void TrimExcess() var archetype = world.Archetypes[0]; That(world.Size, Is.EqualTo(1)); That(world.Capacity, Is.EqualTo(archetype.EntitiesPerChunk)); - That(archetype.Size, Is.EqualTo(1)); - That(archetype.Capacity, Is.EqualTo(1)); + That(archetype.ChunkCount, Is.EqualTo(1)); + That(archetype.ChunkCapacity, Is.EqualTo(1)); } /// @@ -304,7 +304,7 @@ public void TrimExcessEmptyArchetypes() var archetype = world.Archetypes[0]; That(world.Archetypes.Count, Is.EqualTo(1)); - That(world.Capacity, Is.EqualTo(archetype.Size * archetype.EntitiesPerChunk)); + That(world.Capacity, Is.EqualTo(archetype.ChunkCount * archetype.EntitiesPerChunk)); } /// @@ -547,7 +547,7 @@ public void Remove() _world.Remove(entity2); That(_world.GetArchetype(entity2), Is.EqualTo(_world.GetArchetype(entity))); - That(_world.GetArchetype(entity).Size, Is.EqualTo(1)); + That(_world.GetArchetype(entity).ChunkCount, Is.EqualTo(1)); That(_world.GetArchetype(entity).Chunks[0].Size, Is.EqualTo(2)); } @@ -601,7 +601,7 @@ public void Remove_NonGeneric() _world.RemoveRange(entity2, typeof(Transform)); That(_world.GetArchetype(entity2), Is.EqualTo(_world.GetArchetype(entity))); - That(_world.GetArchetype(entity).Size, Is.EqualTo(1)); + That(_world.GetArchetype(entity).ChunkCount, Is.EqualTo(1)); That(_world.GetArchetype(entity).Chunks[0].Size, Is.EqualTo(2)); } @@ -671,7 +671,7 @@ public void GeneratedRemove() _world.Remove(entity2); That(_world.GetArchetype(entity2), Is.EqualTo(_world.GetArchetype(entity))); - That(_world.GetArchetype(entity).Size, Is.EqualTo(1)); + That(_world.GetArchetype(entity).ChunkCount, Is.EqualTo(1)); That(_world.GetArchetype(entity).Chunks[0].Size, Is.EqualTo(2)); } diff --git a/src/Arch/Core/Archetype.cs b/src/Arch/Core/Archetype.cs index ab395877..3da03481 100644 --- a/src/Arch/Core/Archetype.cs +++ b/src/Arch/Core/Archetype.cs @@ -140,7 +140,7 @@ internal Archetype(ComponentType[] types) Types = types; // Calculations - ChunkSize = MinimumRequiredChunkSize(types); + ChunkSizeInBytes = MinimumRequiredChunkSize(types); EntitiesPerChunk = CalculateEntitiesPerChunk(types); // The bitmask/set @@ -151,8 +151,8 @@ internal Archetype(ComponentType[] types) Chunks = ArrayPool.Shared.Rent(1); Chunks[0] = new Chunk(EntitiesPerChunk, _componentIdToArrayIndex, types); - Size = 1; - Capacity = 1; + ChunkCount = 1; + ChunkCapacity = 1; _addEdges = new SparseJaggedArray(BucketSize); _removeEdges = new SparseJaggedArray(BucketSize); @@ -186,11 +186,11 @@ internal int[] LookupArray /// The size of a within the in KB. /// Necessary because the adjusts the size of a . /// - public int ChunkSize { get; } = BaseSize; + public int ChunkSizeInBytes { get; } = BaseSize; /// /// The minimum number of 's that should fit into a within this . - /// On the basis of this, the is increased. + /// On the basis of this, the is increased. /// public int MinimumAmountOfEntitiesPerChunk { get; } = 100; @@ -198,23 +198,23 @@ internal int[] LookupArray /// How many ' have been deposited within the array. /// The total capacity. /// - public int Capacity { [MethodImpl(MethodImplOptions.AggressiveInlining)] get; [MethodImpl(MethodImplOptions.AggressiveInlining)] internal set; } + public int ChunkCapacity { [MethodImpl(MethodImplOptions.AggressiveInlining)] get; [MethodImpl(MethodImplOptions.AggressiveInlining)] internal set; } /// /// The number of occupied/used 's within the array. /// - public int Size { [MethodImpl(MethodImplOptions.AggressiveInlining)] get; [MethodImpl(MethodImplOptions.AggressiveInlining)] internal set; } + public int ChunkCount { [MethodImpl(MethodImplOptions.AggressiveInlining)] get; [MethodImpl(MethodImplOptions.AggressiveInlining)] internal set; } /// /// An array which stores the 's. - /// May contain null references since its being pooled, therefore use the and for acessing it. + /// May contain null references since its being pooled, therefore use the and for acessing it. /// public Chunk[] Chunks { [MethodImpl(MethodImplOptions.AggressiveInlining)] get; [MethodImpl(MethodImplOptions.AggressiveInlining)] internal set; } /// /// Points to the last that is not yet full. /// - private ref Chunk LastChunk { [MethodImpl(MethodImplOptions.AggressiveInlining)] get => ref Chunks[Size - 1]; } + private ref Chunk LastChunk { [MethodImpl(MethodImplOptions.AggressiveInlining)] get => ref Chunks[ChunkCount - 1]; } /// /// Points to the last . @@ -225,14 +225,14 @@ internal Slot LastSlot { var lastRow = LastChunk.Size - 1; //lastRow = lastRow > 0 ? lastRow : 0; // Make sure no negative slot is returned when chunk is empty. - return new(lastRow, Size - 1); + return new(lastRow, ChunkCount - 1); } } /// /// The number of s in this . /// - public int Entities + public int EntityCount { [MethodImpl(MethodImplOptions.AggressiveInlining)] get; @@ -241,6 +241,15 @@ public int Entities internal set; } + /// + /// The capacity of total s in this . + /// + public int EntityCapacity + { + [MethodImpl(MethodImplOptions.AggressiveInlining)] + get => ChunkCapacity * EntitiesPerChunk; + } + /// /// Adds an to the and offloads it to a . /// Uses the last that is not full, once it is full and the capacity is exhausted, a new is allocated. @@ -253,15 +262,15 @@ internal bool Add(Entity entity, out Slot slot) { // Increase size by one if the current chunk is full and theres capcity to prevent new chunk allocation. ref var lastChunk = ref LastChunk; - Size = lastChunk.Size == lastChunk.Capacity && Size < Capacity ? Size + 1 : Size; + ChunkCount = lastChunk.Size == lastChunk.Capacity && ChunkCount < ChunkCapacity ? ChunkCount + 1 : ChunkCount; // Fill chunk lastChunk = ref LastChunk; if (lastChunk.Size != lastChunk.Capacity) { slot.Index = lastChunk.Add(entity); - slot.ChunkIndex = Size - 1; - Entities++; + slot.ChunkIndex = ChunkCount - 1; + EntityCount++; return false; } @@ -269,15 +278,15 @@ internal bool Add(Entity entity, out Slot slot) // Create new chunk var newChunk = new Chunk(EntitiesPerChunk, _componentIdToArrayIndex, Types); slot.Index = newChunk.Add(entity); - Entities++; - slot.ChunkIndex = Size; + EntityCount++; + slot.ChunkIndex = ChunkCount; // Resize chunks & map entity - EnsureCapacity(Size + 1); - Chunks[Size] = newChunk; + EnsureChunkCapacity(ChunkCount + 1); + Chunks[ChunkCount] = newChunk; // Increase size - Size++; + ChunkCount++; return true; } @@ -293,15 +302,15 @@ internal void Remove(ref Slot slot, out int movedEntityId) // Move the last entity from the last chunk into the chunk to replace the removed entity directly ref var chunk = ref Chunks[slot.ChunkIndex]; movedEntityId = chunk.Transfer(slot.Index, ref LastChunk); - Entities--; + EntityCount--; // Return to prevent that Size decreases when chunk IS not Empty and to prevent Size becoming 0 or -1. - if (LastChunk.Size != 0 || Size <= 1) + if (LastChunk.Size != 0 || ChunkCount <= 1) { return; } - Size--; + ChunkCount--; } /// @@ -404,7 +413,7 @@ internal void SetRange(in Slot from, in Slot to, in T? component = default) [MethodImpl(MethodImplOptions.AggressiveInlining)] public Enumerator GetEnumerator() { - return new Enumerator(Chunks.AsSpan(0, Size)); + return new Enumerator(Chunks.AsSpan(0, ChunkCount)); } /// @@ -429,13 +438,13 @@ internal ChunkRangeIterator GetRangeIterator(int to) /// /// Cleares this , an efficient method to delete all s. - /// Does not dispose any resources nor modifies its . + /// Does not dispose any resources nor modifies its . /// [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Clear() { - Entities = 0; - Size = 1; + EntityCount = 0; + ChunkCount = 1; foreach (ref var chunk in this) { chunk.Clear(); @@ -450,7 +459,7 @@ public void Clear() public override string ToString() { var types = string.Join(",", Types.Select(p => p.Type.Name).ToArray()); - return $"Archetype {{ {nameof(Types)} = {{ {types} }}, {nameof(BitSet)} = {{ {BitSet} }}, {nameof(EntitiesPerChunk)} = {EntitiesPerChunk}, {nameof(ChunkSize)} = {ChunkSize}, {nameof(Capacity)} = {Capacity}, {nameof(Size)} = {Size}, {nameof(Entities)} = {Entities} }}"; + return $"Archetype {{ {nameof(Types)} = {{ {types} }}, {nameof(BitSet)} = {{ {BitSet} }}, {nameof(EntitiesPerChunk)} = {EntitiesPerChunk}, {nameof(ChunkSizeInBytes)} = {ChunkSizeInBytes}, {nameof(ChunkCapacity)} = {ChunkCapacity}, {nameof(ChunkCount)} = {ChunkCount}, {nameof(EntityCapacity)} = {EntityCapacity}, {nameof(EntityCount)} = {EntityCount} }}}}"; } } @@ -518,47 +527,51 @@ public int MinimumRequiredChunkSize(ComponentType[] types) /// The amount of 's. public int CalculateEntitiesPerChunk(ComponentType[] types) { - return ChunkSize / (sizeof(Entity) + types.ToByteSize()); + return ChunkSizeInBytes / (sizeof(Entity) + types.ToByteSize()); } /// /// Ensures the capacity of the array. - /// Increases the . + /// Increases the . /// /// The amount of 's required, in total. [MethodImpl(MethodImplOptions.AggressiveInlining)] - private void EnsureCapacity(int newCapacity) + private void EnsureChunkCapacity(int newCapacity) { // Increase chunk array size var newChunks = ArrayPool.Shared.Rent(newCapacity); - Array.Copy(Chunks, newChunks, Capacity); + Array.Copy(Chunks, newChunks, ChunkCapacity); ArrayPool.Shared.Return(Chunks, true); Chunks = newChunks; - Capacity = newCapacity; + ChunkCapacity = newCapacity; } /// TODO : Currently this only ensures additional entity capacity, instead it should take the whole capacity in count. /// /// Ensures the capacity of the array. - /// Increases the . + /// Increases the . /// /// The amount of 's required, in total. [MethodImpl(MethodImplOptions.AggressiveInlining)] internal void EnsureEntityCapacity(int newCapacity) { + // TODO: LastChunk updated sich nicht wenn von einem archetype weniger entities in einen anderen kopier werden als vorher drin waren. + // TODO: Dadurch bleibt z.B. ein Chunk am ende des Archetypes frei, wodurch beim entfernen eines entities wieder nen index -1 auftritt und ne exception + // TODO: LastChunk MUSS sich irgendwie updaten bei so nem Kopier quatsch? Glaube in dieser Methode machts keinen Sinn? Oder vllt doch? + // Calculate amount of required chunks. - var freeSpots = (Capacity * EntitiesPerChunk) - Entities; - var neededSpots = newCapacity - freeSpots; - var neededChunks = (int)Math.Ceiling((float)neededSpots / EntitiesPerChunk); + //var freeSpots = EntityCapacity - EntityCount; + //var neededSpots = newCapacity - freeSpots; + var neededChunks = (int)Math.Ceiling((float)newCapacity / EntitiesPerChunk); - if (Capacity-Size > neededChunks) + if (ChunkCapacity-ChunkCount > neededChunks) { return; } // Set capacity and insert new empty chunks. - var previousCapacity = Capacity; - EnsureCapacity(previousCapacity + neededChunks); + var previousCapacity = ChunkCapacity; + EnsureChunkCapacity(previousCapacity + neededChunks); for (var index = 0; index < neededChunks; index++) { @@ -567,27 +580,27 @@ internal void EnsureEntityCapacity(int newCapacity) } // If last chunk was full, add. - if (freeSpots == 0) + /*if (freeSpots == 0) { - Size++; - } + ChunkCount++; + }*/ } /// /// Trims the capacity of the array to its used minimum. - /// Reduces the . + /// Reduces the . /// internal void TrimExcess() { // This always spares one single chunk. - var minimalSize = Size > 0 ? Size : 1; + var minimalSize = ChunkCount > 0 ? ChunkCount : 1; // Decrease chunk size var newChunks = ArrayPool.Shared.Rent(minimalSize); Array.Copy(Chunks, newChunks, minimalSize); ArrayPool.Shared.Return(Chunks, true); Chunks = newChunks; - Capacity = minimalSize; + ChunkCapacity = minimalSize; } /// @@ -604,8 +617,8 @@ internal void Reserve(in int amount) var neededChunks = (int)Math.Ceiling((float)neededSpots / EntitiesPerChunk); // Set capacity and insert new empty chunks. - var previousCapacity = Capacity; - EnsureCapacity(previousCapacity + neededChunks); + var previousCapacity = ChunkCapacity; + EnsureChunkCapacity(previousCapacity + neededChunks); for (var index = 0; index < neededChunks; index++) { var newChunk = new Chunk(EntitiesPerChunk, _componentIdToArrayIndex, Types); @@ -615,7 +628,7 @@ internal void Reserve(in int amount) // If last chunk was full, add. if (freeSpots == 0) { - Size++; + ChunkCount++; } } } @@ -633,16 +646,16 @@ public sealed partial class Archetype internal static void Copy(Archetype source, Archetype destination) { // Make sure other archetype can fit additional entities from this archetype. - destination.EnsureEntityCapacity(source.Entities); + destination.EnsureEntityCapacity(destination.EntityCount + source.EntityCount); // Copy chunks into destination chunks var sourceChunkIndex = 0; - var destinationChunkIndex = destination.Size - 1; - while (sourceChunkIndex < source.Size) + var destinationChunkIndex = destination.ChunkCount - 1; + while (sourceChunkIndex < source.ChunkCount) { ref var sourceChunk = ref source.Chunks[sourceChunkIndex]; var index = 0; - while (sourceChunk.Size > 0 && destinationChunkIndex < destination.Capacity) // Making sure that we dont go out of bounds + while (sourceChunk.Size > 0 && destinationChunkIndex < destination.ChunkCapacity) // Making sure that we dont go out of bounds { ref var destinationChunk = ref destination.Chunks[destinationChunkIndex]; @@ -657,23 +670,26 @@ internal static void Copy(Archetype source, Archetype destination) destinationChunk.Size += length; index += length; - // Current source chunk still has remaining capacity, resume with next destination chunk. - if (destinationChunk.Size >= destinationChunk.Capacity) + // Current source chunk still has remaining capacity, destination is full, resume with next destination chunk. + if (destinationChunk.Size == destinationChunk.Capacity) { destinationChunkIndex++; - if(destination.Size + 1 <= destination.Capacity) - { - destination.Size++; - } } } sourceChunkIndex++; } + // Set new chunk count and if the lastchunk was set to 0 by the copy algorithm, reduce it by one to point to a valid chunk + destination.ChunkCount = destinationChunkIndex+1; + if (destination.LastChunk.Size == 0) + { + destination.ChunkCount--; + } + // Increase entities by destination since those were copied, set source to zero since its now empty. - destination.Entities += source.Entities; - source.Entities = 0; + destination.EntityCount += source.EntityCount; + source.EntityCount = 0; } /// diff --git a/src/Arch/Core/Enumerators.cs b/src/Arch/Core/Enumerators.cs index 6c74f9df..3977b879 100644 --- a/src/Arch/Core/Enumerators.cs +++ b/src/Arch/Core/Enumerators.cs @@ -112,7 +112,7 @@ public bool MoveNext() while (_archetypes.MoveNext()) { var archetype = _archetypes.Current; - if (archetype.Entities > 0 && _query.Valid(archetype.BitSet)) + if (archetype.EntityCount > 0 && _query.Valid(archetype.BitSet)) { return true; } @@ -199,7 +199,7 @@ public QueryChunkEnumerator(Query query, Span archetypes) // Make it move once, otherwhise we can not check directly for Current.Size which results in bad behaviour if (_archetypeEnumerator.MoveNext()) { - _index = _archetypeEnumerator.Current.Size; + _index = _archetypeEnumerator.Current.ChunkCount; } } @@ -225,7 +225,7 @@ public bool MoveNext() return false; } - _index = _archetypeEnumerator.Current.Size-1; + _index = _archetypeEnumerator.Current.ChunkCount-1; return true; } } @@ -243,7 +243,7 @@ public void Reset() // Make it move once, otherwhise we can not check directly for Current.Size which results in bad behaviour if (_archetypeEnumerator.MoveNext()) { - _index = _archetypeEnumerator.Current.Size; + _index = _archetypeEnumerator.Current.ChunkCount; } } diff --git a/src/Arch/Core/Extensions/Dangerous/DangerousArchetypeExtensions.cs b/src/Arch/Core/Extensions/Dangerous/DangerousArchetypeExtensions.cs index 3bd142b9..8f15103b 100644 --- a/src/Arch/Core/Extensions/Dangerous/DangerousArchetypeExtensions.cs +++ b/src/Arch/Core/Extensions/Dangerous/DangerousArchetypeExtensions.cs @@ -20,13 +20,13 @@ public static Archetype CreateArchetype(ComponentType[] types) } /// - /// Sets the . + /// Sets the . /// /// The . /// The size. public static void SetSize(this Archetype archetype, int size) { - archetype.Size = size; + archetype.ChunkCount = size; } /// @@ -37,17 +37,17 @@ public static void SetSize(this Archetype archetype, int size) public static void SetChunks(this Archetype archetype, List chunks) { archetype.Chunks = chunks.ToArray(); - archetype.Capacity = chunks.Count; + archetype.ChunkCapacity = chunks.Count; } /// - /// Sets the . + /// Sets the . /// /// The . /// The size. public static void SetEntities(this Archetype archetype, int entities) { - archetype.Entities = entities; + archetype.EntityCount = entities; } /// diff --git a/src/Arch/Core/Extensions/Dangerous/DangerousWorldExtensions.cs b/src/Arch/Core/Extensions/Dangerous/DangerousWorldExtensions.cs index 34584c4e..64799234 100644 --- a/src/Arch/Core/Extensions/Dangerous/DangerousWorldExtensions.cs +++ b/src/Arch/Core/Extensions/Dangerous/DangerousWorldExtensions.cs @@ -19,8 +19,8 @@ public static void SetArchetypes(this World world, List archetypes) world.Archetypes.AddRange(archetypes); foreach (var archetype in archetypes) { - world.Size += archetype.Entities; - world.Capacity += archetype.EntitiesPerChunk * archetype.Size; + world.Size += archetype.EntityCount; + world.Capacity += archetype.EntitiesPerChunk * archetype.ChunkCount; } } diff --git a/src/Arch/Core/Jobs/World.Jobs.cs b/src/Arch/Core/Jobs/World.Jobs.cs index 4f941779..39935ddd 100644 --- a/src/Arch/Core/Jobs/World.Jobs.cs +++ b/src/Arch/Core/Jobs/World.Jobs.cs @@ -96,7 +96,7 @@ public void InlineParallelChunkQuery(in QueryDescription queryDescription, in var query = Query(in queryDescription); foreach (var archetype in query.GetArchetypeIterator()) { - var archetypeSize = archetype.Size; + var archetypeSize = archetype.ChunkCount; var part = new RangePartitioner(Environment.ProcessorCount, archetypeSize); foreach (var range in part) { diff --git a/src/Arch/Core/World.cs b/src/Arch/Core/World.cs index 40c4725a..6366533e 100644 --- a/src/Arch/Core/World.cs +++ b/src/Arch/Core/World.cs @@ -378,7 +378,7 @@ public void TrimExcess() { // Remove empty archetypes. var archetype = Archetypes[index]; - if (archetype.Entities == 0) + if (archetype.EntityCount == 0) { Capacity += archetype.EntitiesPerChunk; // Since the destruction substracts that amount, add it before due to the way we calculate the new capacity. DestroyArchetype(archetype); @@ -386,7 +386,7 @@ public void TrimExcess() } archetype.TrimExcess(); - Capacity += archetype.Size * archetype.EntitiesPerChunk; // Since always one chunk always exists. + Capacity += archetype.ChunkCount * archetype.EntitiesPerChunk; // Since always one chunk always exists. } } @@ -454,7 +454,7 @@ public int CountEntities(in QueryDescription queryDescription) var query = Query(in queryDescription); foreach (var archetype in query.GetArchetypeIterator()) { - var entities = archetype.Entities; + var entities = archetype.EntityCount; counter += entities; } @@ -768,7 +768,7 @@ public void Destroy(in QueryDescription queryDescription) var query = Query(in queryDescription); foreach (var archetype in query.GetArchetypeIterator()) { - Size -= archetype.Entities; + Size -= archetype.EntityCount; foreach (ref var chunk in archetype) { ref var entityFirstElement = ref chunk.Entity(0); @@ -837,7 +837,7 @@ public void Add(in QueryDescription queryDescription, in T? component = defau foreach (var archetype in query.GetArchetypeIterator()) { // Archetype with T shouldnt be skipped to prevent undefined behaviour. - if (archetype.Entities == 0 || archetype.Has()) + if (archetype.EntityCount == 0 || archetype.Has()) { continue; } @@ -889,7 +889,7 @@ public void Remove(in QueryDescription queryDescription) foreach (var archetype in query.GetArchetypeIterator()) { // Archetype without T shouldnt be skipped to prevent undefined behaviour. - if (archetype.Entities <= 0 || !archetype.Has()) + if (archetype.EntityCount <= 0 || !archetype.Has()) { continue; }