From bb7d3e22f5cd8136ab9d621247ee6554d36b5b0c Mon Sep 17 00:00:00 2001 From: andreakarasho Date: Sat, 10 Feb 2024 20:25:36 +0100 Subject: [PATCH] netstandard2.1 --- src/Archetype.cs | 20 +- src/DictionarySlim.cs | 510 ++++++++++++++++++++++++++++++++++++++++++ src/DotnetAddons.cs | 89 +++++++- src/Iterator.cs | 89 -------- src/Query.cs | 61 ++++- src/SparseSet.cs | 10 +- src/Term.cs | 5 + src/TinyEcs.csproj | 6 +- src/UnsafeSpan.cs | 58 ----- src/World.cs | 35 ++- 10 files changed, 699 insertions(+), 184 deletions(-) create mode 100644 src/DictionarySlim.cs delete mode 100644 src/Iterator.cs delete mode 100644 src/UnsafeSpan.cs diff --git a/src/Archetype.cs b/src/Archetype.cs index 9b5e6dd..b237e30 100644 --- a/src/Archetype.cs +++ b/src/Archetype.cs @@ -11,7 +11,11 @@ public ref T GetReference(int column) where T : struct { EcsAssert.Assert(column >= 0 && column < Components!.Length); ref var array = ref Unsafe.As(ref Components![column]); +#if NET return ref MemoryMarshal.GetArrayDataReference(array); +#else + return ref array[0]; +#endif } [MethodImpl(MethodImplOptions.AggressiveInlining)] @@ -89,7 +93,7 @@ ComponentComparer comparer internal Span Chunks => _chunks.AsSpan(0, (_count + CHUNK_THRESHOLD - 1) / CHUNK_THRESHOLD); [SkipLocalsInit] - public ref ArchetypeChunk GetChunk(int index) + internal ref ArchetypeChunk GetChunk(int index) { if (index >= _chunks.Length) Array.Resize(ref _chunks, _chunks.Length * 2); @@ -266,11 +270,7 @@ private void InsertVertex(Archetype newNode) if (nodeTypeLen < newTypeLen - 1) { -#if NET5_0_OR_GREATER - foreach (ref var edge in CollectionsMarshal.AsSpan(_edgesRight)) -#else - foreach (var edge in _edgesRight) -#endif + foreach (ref var edge in CollectionsMarshal.AsSpan(_edgesRight)) { edge.Archetype.InsertVertex(newNode); } @@ -346,16 +346,14 @@ public void Print() static void PrintRec(Archetype root, int depth, int rootComponent) { Console.WriteLine( - "{0}Parent [{1}] common ID: {2}", + "{0}- Parent [{1}] common ID: {2}", new string('\t', depth), - string.Join(", ", root.Components.Select(s => s.ID)), + string.Join(", ", root.Components.Select(s => Lookup.GetArray(s.ID, 0).ToString() )), rootComponent ); if (root._edgesRight.Count > 0) - Console.WriteLine("{0}Children: ", new string('\t', depth)); - - //Console.WriteLine("{0}[{1}] |{2}| - Table [{3}]", new string('.', depth), string.Join(", ", root.ComponentInfo.Select(s => s.ID)), rootComponent, string.Join(", ", root.Table.Components.Select(s => s.ID))); + Console.WriteLine("{0} Children: ", new string('\t', depth)); foreach (ref readonly var edge in CollectionsMarshal.AsSpan(root._edgesRight)) { diff --git a/src/DictionarySlim.cs b/src/DictionarySlim.cs new file mode 100644 index 0000000..dd78ee5 --- /dev/null +++ b/src/DictionarySlim.cs @@ -0,0 +1,510 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Runtime.CompilerServices; +#nullable disable + +namespace Microsoft.Collections.Extensions +{ + /// + /// A lightweight Dictionary with three principal differences compared to + /// + /// 1) It is possible to do "get or add" in a single lookup using . For + /// values that are value types, this also saves a copy of the value. + /// 2) It assumes it is cheap to equate values. + /// 3) It assumes the keys implement or else Equals() and they are cheap and sufficient. + /// + /// + /// 1) This avoids having to do separate lookups ( + /// followed by . + /// There is not currently an API exposed to get a value by ref without adding if the key is not present. + /// 2) This means it can save space by not storing hash codes. + /// 3) This means it can avoid storing a comparer, and avoid the likely virtual call to a comparer. + /// + [DebuggerTypeProxy(typeof(DictionarySlimDebugView<,>))] + [DebuggerDisplay("Count = {Count}")] + public class DictionarySlim : IReadOnlyCollection> where TKey : IEquatable + { + // We want to initialize without allocating arrays. We also want to avoid null checks. + // Array.Empty would give divide by zero in modulo operation. So we use static one element arrays. + // The first add will cause a resize replacing these with real arrays of three elements. + // Arrays are wrapped in a class to avoid being duplicated for each + private static readonly Entry[] InitialEntries = new Entry[1]; + private int _count; + // 0-based index into _entries of head of free chain: -1 means empty + private int _freeList = -1; + // 1-based index into _entries; 0 means empty + private int[] _buckets; + private Entry[] _entries; + + + [DebuggerDisplay("({key}, {value})->{next}")] + private struct Entry + { + public TKey key; + public TValue value; + // 0-based index of next entry in chain: -1 means end of chain + // also encodes whether this entry _itself_ is part of the free list by changing sign and subtracting 3, + // so -2 means end of free list, -3 means index 0 but on free list, -4 means index 1 but on free list, etc. + public int next; + } + + /// + /// Construct with default capacity. + /// + public DictionarySlim() + { + _buckets = HashHelpers.SizeOneIntArray; + _entries = InitialEntries; + } + + /// + /// Construct with at least the specified capacity for + /// entries before resizing must occur. + /// + /// Requested minimum capacity + public DictionarySlim(int capacity) + { + if (capacity < 0) + ThrowHelper.ThrowCapacityArgumentOutOfRangeException(); + if (capacity < 2) + capacity = 2; // 1 would indicate the dummy array + capacity = HashHelpers.PowerOf2(capacity); + _buckets = new int[capacity]; + _entries = new Entry[capacity]; + } + + /// + /// Count of entries in the dictionary. + /// + public int Count => _count; + + /// + /// Clears the dictionary. Note that this invalidates any active enumerators. + /// + public void Clear() + { + _count = 0; + _freeList = -1; + _buckets = HashHelpers.SizeOneIntArray; + _entries = InitialEntries; + } + + /// + /// Looks for the specified key in the dictionary. + /// + /// Key to look for + /// true if the key is present, otherwise false + public bool ContainsKey(TKey key) + { + if (key == null) ThrowHelper.ThrowKeyArgumentNullException(); + Entry[] entries = _entries; + int collisionCount = 0; + for (int i = _buckets[key.GetHashCode() & (_buckets.Length-1)] - 1; + (uint)i < (uint)entries.Length; i = entries[i].next) + { + if (key.Equals(entries[i].key)) + return true; + if (collisionCount == entries.Length) + { + // The chain of entries forms a loop; which means a concurrent update has happened. + // Break out of the loop and throw, rather than looping forever. + ThrowHelper.ThrowInvalidOperationException_ConcurrentOperationsNotSupported(); + } + collisionCount++; + } + + return false; + } + + /// + /// Gets the value if present for the specified key. + /// + /// Key to look for + /// Value found, otherwise default(TValue) + /// true if the key is present, otherwise false + public bool TryGetValue(TKey key, out TValue value) + { + if (key == null) ThrowHelper.ThrowKeyArgumentNullException(); + Entry[] entries = _entries; + int collisionCount = 0; + for (int i = _buckets[key.GetHashCode() & (_buckets.Length - 1)] - 1; + (uint)i < (uint)entries.Length; i = entries[i].next) + { + if (key.Equals(entries[i].key)) + { + value = entries[i].value; + return true; + } + if (collisionCount == entries.Length) + { + // The chain of entries forms a loop; which means a concurrent update has happened. + // Break out of the loop and throw, rather than looping forever. + ThrowHelper.ThrowInvalidOperationException_ConcurrentOperationsNotSupported(); + } + collisionCount++; + } + + value = default; + return false; + } + + /// + /// Removes the entry if present with the specified key. + /// + /// Key to look for + /// true if the key is present, false if it is not + public bool Remove(TKey key) + { + if (key == null) ThrowHelper.ThrowKeyArgumentNullException(); + Entry[] entries = _entries; + int bucketIndex = key.GetHashCode() & (_buckets.Length - 1); + int entryIndex = _buckets[bucketIndex] - 1; + + int lastIndex = -1; + int collisionCount = 0; + while (entryIndex != -1) + { + Entry candidate = entries[entryIndex]; + if (candidate.key.Equals(key)) + { + if (lastIndex != -1) + { // Fixup preceding element in chain to point to next (if any) + entries[lastIndex].next = candidate.next; + } + else + { // Fixup bucket to new head (if any) + _buckets[bucketIndex] = candidate.next + 1; + } + + entries[entryIndex] = default; + + entries[entryIndex].next = -3 - _freeList; // New head of free list + _freeList = entryIndex; + + _count--; + return true; + } + lastIndex = entryIndex; + entryIndex = candidate.next; + + if (collisionCount == entries.Length) + { + // The chain of entries forms a loop; which means a concurrent update has happened. + // Break out of the loop and throw, rather than looping forever. + ThrowHelper.ThrowInvalidOperationException_ConcurrentOperationsNotSupported(); + } + collisionCount++; + } + + return false; + } + + // Not safe for concurrent _reads_ (at least, if either of them add) + // For concurrent reads, prefer TryGetValue(key, out value) + /// + /// Gets the value for the specified key, or, if the key is not present, + /// adds an entry and returns the value by ref. This makes it possible to + /// add or update a value in a single look up operation. + /// + /// Key to look for + /// Reference to the new or existing value + public ref TValue GetOrAddValueRef(TKey key) + { + if (key == null) ThrowHelper.ThrowKeyArgumentNullException(); + Entry[] entries = _entries; + int collisionCount = 0; + int bucketIndex = key.GetHashCode() & (_buckets.Length - 1); + for (int i = _buckets[bucketIndex] - 1; + (uint)i < (uint)entries.Length; i = entries[i].next) + { + if (key.Equals(entries[i].key)) + return ref entries[i].value; + if (collisionCount == entries.Length) + { + // The chain of entries forms a loop; which means a concurrent update has happened. + // Break out of the loop and throw, rather than looping forever. + ThrowHelper.ThrowInvalidOperationException_ConcurrentOperationsNotSupported(); + } + collisionCount++; + } + + return ref AddKey(key, bucketIndex); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + private ref TValue AddKey(TKey key, int bucketIndex) + { + Entry[] entries = _entries; + int entryIndex; + if (_freeList != -1) + { + entryIndex = _freeList; + _freeList = -3 - entries[_freeList].next; + } + else + { + if (_count == entries.Length || entries.Length == 1) + { + entries = Resize(); + bucketIndex = key.GetHashCode() & (_buckets.Length - 1); + // entry indexes were not changed by Resize + } + entryIndex = _count; + } + + entries[entryIndex].key = key; + entries[entryIndex].next = _buckets[bucketIndex] - 1; + _buckets[bucketIndex] = entryIndex + 1; + _count++; + return ref entries[entryIndex].value; + } + + private Entry[] Resize() + { + Debug.Assert(_entries.Length == _count || _entries.Length == 1); // We only copy _count, so if it's longer we will miss some + int count = _count; + int newSize = _entries.Length * 2; + if ((uint)newSize > (uint)int.MaxValue) // uint cast handles overflow + throw new InvalidOperationException("Arg_HTCapacityOverflow"); + + var entries = new Entry[newSize]; + Array.Copy(_entries, 0, entries, 0, count); + + var newBuckets = new int[entries.Length]; + while (count-- > 0) + { + int bucketIndex = entries[count].key.GetHashCode() & (newBuckets.Length - 1); + entries[count].next = newBuckets[bucketIndex] - 1; + newBuckets[bucketIndex] = count + 1; + } + + _buckets = newBuckets; + _entries = entries; + + return entries; + } + + /// + /// Gets an enumerator over the dictionary + /// + public Enumerator GetEnumerator() => new Enumerator(this); // avoid boxing + + /// + /// Gets an enumerator over the dictionary + /// + IEnumerator> IEnumerable>.GetEnumerator() => + new Enumerator(this); + + /// + /// Gets an enumerator over the dictionary + /// + IEnumerator IEnumerable.GetEnumerator() => new Enumerator(this); + + /// + /// Enumerator + /// + public struct Enumerator : IEnumerator> + { + private readonly DictionarySlim _dictionary; + private int _index; + private int _count; + private KeyValuePair _current; + + internal Enumerator(DictionarySlim dictionary) + { + _dictionary = dictionary; + _index = 0; + _count = _dictionary._count; + _current = default; + } + + /// + /// Move to next + /// + public bool MoveNext() + { + if (_count == 0) + { + _current = default; + return false; + } + + _count--; + + while (_dictionary._entries[_index].next < -1) + _index++; + + _current = new KeyValuePair( + _dictionary._entries[_index].key, + _dictionary._entries[_index++].value); + return true; + } + + /// + /// Get current value + /// + public KeyValuePair Current => _current; + + object IEnumerator.Current => _current; + + void IEnumerator.Reset() + { + _index = 0; + _count = _dictionary._count; + _current = default; + } + + /// + /// Dispose the enumerator + /// + public void Dispose() { } + } + } + + internal sealed class DictionarySlimDebugView where K : IEquatable + { + private readonly DictionarySlim _dictionary; + + public DictionarySlimDebugView(DictionarySlim dictionary) + { + _dictionary = dictionary ?? throw new ArgumentNullException(nameof(dictionary)); + } + + [DebuggerBrowsable(DebuggerBrowsableState.RootHidden)] + public KeyValuePair[] Items + { + get + { + return _dictionary.ToArray(); + } + } + } + + internal static partial class HashHelpers + { + internal static int PowerOf2(int v) + { + if ((v & (v - 1)) == 0) return v; + int i = 2; + while (i < v) i <<= 1; + return i; + } + + // must never be written to + internal static readonly int[] SizeOneIntArray = new int[1]; + + public const int HashCollisionThreshold = 100; + + // This is the maximum prime smaller than Array.MaxArrayLength + public const int MaxPrimeArrayLength = 0x7FEFFFFD; + + public const int HashPrime = 101; + + // Table of prime numbers to use as hash table sizes. + // A typical resize algorithm would pick the smallest prime number in this array + // that is larger than twice the previous capacity. + // Suppose our Hashtable currently has capacity x and enough elements are added + // such that a resize needs to occur. Resizing first computes 2x then finds the + // first prime in the table greater than 2x, i.e. if primes are ordered + // p_1, p_2, ..., p_i, ..., it finds p_n such that p_n-1 < 2x < p_n. + // Doubling is important for preserving the asymptotic complexity of the + // hashtable operations such as add. Having a prime guarantees that double + // hashing does not lead to infinite loops. IE, your hash function will be + // h1(key) + i*h2(key), 0 <= i < size. h2 and the size must be relatively prime. + // We prefer the low computation costs of higher prime numbers over the increased + // memory allocation of a fixed prime number i.e. when right sizing a HashSet. + public static readonly int[] primes = { + 3, 7, 11, 17, 23, 29, 37, 47, 59, 71, 89, 107, 131, 163, 197, 239, 293, 353, 431, 521, 631, 761, 919, + 1103, 1327, 1597, 1931, 2333, 2801, 3371, 4049, 4861, 5839, 7013, 8419, 10103, 12143, 14591, + 17519, 21023, 25229, 30293, 36353, 43627, 52361, 62851, 75431, 90523, 108631, 130363, 156437, + 187751, 225307, 270371, 324449, 389357, 467237, 560689, 672827, 807403, 968897, 1162687, 1395263, + 1674319, 2009191, 2411033, 2893249, 3471899, 4166287, 4999559, 5999471, 7199369 }; + + public static bool IsPrime(int candidate) + { + if ((candidate & 1) != 0) + { + int limit = (int)Math.Sqrt(candidate); + for (int divisor = 3; divisor <= limit; divisor += 2) + { + if ((candidate % divisor) == 0) + return false; + } + return true; + } + return (candidate == 2); + } + + public static int GetPrime(int min) + { + if (min < 0) + throw new ArgumentException("Hashtable's capacity overflowed and went negative. Check load factor, capacity and the current size of the table."); + + for (int i = 0; i < primes.Length; i++) + { + int prime = primes[i]; + if (prime >= min) + return prime; + } + + //outside of our predefined table. + //compute the hard way. + for (int i = (min | 1); i < int.MaxValue; i += 2) + { + if (IsPrime(i) && ((i - 1) % HashPrime != 0)) + return i; + } + return min; + } + + // Returns size of hashtable to grow to. + public static int ExpandPrime(int oldSize) + { + int newSize = 2 * oldSize; + + // Allow the hashtables to grow to maximum possible size (~2G elements) before encountering capacity overflow. + // Note that this check works even when _items.Length overflowed thanks to the (uint) cast + if ((uint)newSize > MaxPrimeArrayLength && MaxPrimeArrayLength > oldSize) + { + Debug.Assert(MaxPrimeArrayLength == GetPrime(MaxPrimeArrayLength), "Invalid MaxPrimeArrayLength"); + return MaxPrimeArrayLength; + } + + return GetPrime(newSize); + } + } + + internal static class ThrowHelper + { + [MethodImpl(MethodImplOptions.NoInlining)] + internal static void ThrowInvalidOperationException_ConcurrentOperationsNotSupported() + { + throw new InvalidOperationException("InvalidOperation_ConcurrentOperationsNotSupported"); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + internal static void ThrowKeyArgumentNullException() + { + throw new ArgumentNullException("key"); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + internal static void ThrowCapacityArgumentOutOfRangeException() + { + throw new ArgumentOutOfRangeException("capacity"); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + internal static bool ThrowNotSupportedException() + { + throw new NotSupportedException(); + } + } +} diff --git a/src/DotnetAddons.cs b/src/DotnetAddons.cs index f2e80f2..bc2d50c 100644 --- a/src/DotnetAddons.cs +++ b/src/DotnetAddons.cs @@ -40,16 +40,74 @@ public CallerArgumentExpressionAttribute(string parameterName) { } #if NETSTANDARD2_1 -internal unsafe readonly ref struct Ref +// internal unsafe readonly ref struct Ref +// { +// private readonly T* _value; +// +// internal Ref(ref T value) +// { +// _value = Unsafe.AsPointer(ref value); +// } +// +// public ref T Value => ref *_value; +// } + +namespace System.Runtime.InteropServices { - private readonly T* _value; + public static class NativeMemory + { + public static unsafe void* Realloc(void* data, nuint size) + { + return (void*)Marshal.ReAllocHGlobal((IntPtr)data, (IntPtr)((UIntPtr)size).ToPointer()); + } + + public static unsafe void Free(void* data) + { + Marshal.FreeHGlobal((IntPtr)data); + } + + public static unsafe void* AllocZeroed(nuint count, nuint typeSize) + { + var data = (void*) Marshal.AllocHGlobal((IntPtr)((UIntPtr)(count * typeSize)).ToPointer()); + + return data; + } + + public static unsafe void* Alloc(nuint count, nuint typeSize) + { + var data = (void*) Marshal.AllocHGlobal((IntPtr)((UIntPtr)(count * typeSize)).ToPointer()); + + return data; + } + } + + public static class CollectionsMarshal + { + public static Span AsSpan(List? list) + { + if (list == null) + return Span.Empty; - internal Ref(ref T value) - { - _value = Unsafe.AsPointer(ref value); - } + return new Span(Unsafe.As>(list).Value, 0, list.Count); + } + } - public ref T Value => ref *_value; + // public static class MemoryMarshal + // { + // // public static ref T GetReference(Span span) + // // { + // // if (span.IsEmpty) + // // return ref Unsafe.NullRef(); + // // return ref span[0]; + // // } + // // + // // public static ref T GetArrayDataReference(T[]? array) + // // { + // // if (array == null || array.Length == 0) + // // return ref Unsafe.NullRef(); + // // return ref array[0]; + // // } + // } } public static class SortExtensions @@ -70,5 +128,22 @@ public static void Sort(this Span span) where T : IComparable } } } + + public static void Sort(this Span span, IComparer comparer) + { + for (int i = 0; i < span.Length - 1; i++) + { + for (int j = 0; j < span.Length - i - 1; j++) + { + if (comparer.Compare(span[j], span[j + 1]) > 0) + { + // Swap the elements + T temp = span[j]; + span[j] = span[j + 1]; + span[j + 1] = temp; + } + } + } + } } #endif diff --git a/src/Iterator.cs b/src/Iterator.cs deleted file mode 100644 index d8a6a0e..0000000 --- a/src/Iterator.cs +++ /dev/null @@ -1,89 +0,0 @@ -// namespace TinyEcs; -// -// public delegate void IteratorDelegate(ref Iterator it); -// -// public readonly ref struct Iterator -// { -// private readonly Span _entities; -// private readonly Archetype _archetype; -// private readonly Span _columns; -// -// [MethodImpl(MethodImplOptions.AggressiveInlining)] -// internal Iterator( -// Commands commands, -// Archetype archetype, -// object? userData, -// Span columns -// ) -// : this( -// commands, -// archetype.Count, -// archetype, -// archetype.Entities, -// userData, -// columns -// ) { } -// -// [MethodImpl(MethodImplOptions.AggressiveInlining)] -// internal Iterator( -// Commands commands, -// int count, -// Archetype archetype, -// Span entities, -// object? userData, -// Span columns -// ) -// { -// Commands = commands; -// World = commands.World; -// UserData = userData; -// _archetype = archetype; -// _entities = entities; -// Count = count; -// DeltaTime = commands.World.DeltaTime; -// _columns = columns; -// } -// -// public readonly Commands Commands { get; } -// public readonly World World { get; } -// public readonly int Count { get; } -// public readonly float DeltaTime { get; } -// public readonly object? UserData { get; } -// -// -// [MethodImpl(MethodImplOptions.AggressiveInlining)] -// public unsafe Span Field() where T : struct -// { -// return _archetype.ComponentData(); -// } -// -// [MethodImpl(MethodImplOptions.AggressiveInlining)] -// public ref T FieldRef() where T : struct -// { -// return ref MemoryMarshal.GetReference(_archetype.ComponentData()); -// } -// -// [MethodImpl(MethodImplOptions.AggressiveInlining)] -// public readonly ref EntityView Entity(int row) => ref _entities[row]; -// -// [MethodImpl(MethodImplOptions.AggressiveInlining)] -// public readonly CommandEntityView EntityDeferred(int row) => Commands.Entity(_entities[row]); -// } -// -// [SkipLocalsInit] -// public unsafe readonly ref struct FieldIterator where T : struct -// { -// private readonly ref T _firstElement; -// -// [MethodImpl(MethodImplOptions.AggressiveInlining)] -// internal FieldIterator(Span elements) -// { -// _firstElement = ref MemoryMarshal.GetReference(elements); -// } -// -// public readonly ref T this[int index] -// { -// [MethodImpl(MethodImplOptions.AggressiveInlining)] -// get => ref Unsafe.Add(ref _firstElement, index); -// } -// } diff --git a/src/Query.cs b/src/Query.cs index e22d0bb..2efbef2 100644 --- a/src/Query.cs +++ b/src/Query.cs @@ -1,15 +1,13 @@ namespace TinyEcs; [SkipLocalsInit] -public sealed unsafe partial class Query : IDisposable +public sealed partial class Query : IDisposable { public const int TERMS_COUNT = 25; private readonly World _world; private readonly Vec _terms = Vec.Init(TERMS_COUNT); - private Span Terms => _terms.Span; - internal Query(World world) { _world = world; @@ -20,7 +18,8 @@ public void Dispose() _terms.Dispose(); } - public Query With() where T : struct => With(_world.Component().ID); + public Query With() where T : struct + => With(_world.Component().ID); private Query With(int id) { @@ -33,7 +32,8 @@ private Query With(int id) return this; } - public Query Without() where T : struct => Without(_world.Component().ID); + public Query Without() where T : struct + => Without(_world.Component().ID); private Query Without(int id) { @@ -59,11 +59,60 @@ private bool Exists(int id) public ArchetypeEnumerator GetEnumerator() { _cachedArchetypes.Clear(); - _world.FindArchetypes(Terms, _cachedArchetypes); + _world.FindArchetypes(_terms.Span, _cachedArchetypes); return new ArchetypeEnumerator(CollectionsMarshal.AsSpan(_cachedArchetypes)); } + static void QueryRec(Archetype root, Span sortedTerms) + { + var result = root.FindMatch(sortedTerms); + if (result < 0) + { + return; + } + + if (result == 0 && root.Count > 0) + { + // found + } + + var span = CollectionsMarshal.AsSpan(root._edgesRight); + if (span.IsEmpty) + { + return; + } + + ref var start = ref MemoryMarshal.GetReference(span); + ref var end = ref Unsafe.Add(ref start, span.Length); + + while (Unsafe.IsAddressLessThan(ref start, ref end)) + { + QueryRec(start.Archetype, sortedTerms); + + start = ref Unsafe.Add(ref start, 1); + } + } + + public delegate void QueryTemplateWithEntity(ref readonly EntityView entity); + public void EachWithEntity(QueryTemplateWithEntity fn) + { + foreach (var archetype in this) + { + foreach (ref readonly var chunk in archetype) + { + ref var firstEnt = ref chunk.Entities[0]; + ref var last = ref Unsafe.Add(ref firstEnt, chunk.Count); + while (Unsafe.IsAddressLessThan(ref firstEnt, ref last)) + { + fn(in firstEnt); + + firstEnt = ref Unsafe.Add(ref firstEnt, 1); + } + } + } + } + public ref struct ArchetypeEnumerator { private readonly Span _list; diff --git a/src/SparseSet.cs b/src/SparseSet.cs index f760c7a..0fcbef6 100644 --- a/src/SparseSet.cs +++ b/src/SparseSet.cs @@ -4,7 +4,7 @@ sealed class EntitySparseSet { private struct Chunk { - public int[] Sparse; + public int[]? Sparse; public T[] Values; } @@ -61,7 +61,7 @@ private ulong NewID(int dense) _dense.Add(0); ref var chunk = ref GetChunkOrCreate((int)index >> 12); - EcsAssert.Assert(chunk.Sparse[(int)index & 0xFFF] == 0); + EcsAssert.Assert(chunk.Sparse![(int)index & 0xFFF] == 0); SparseAssignIndex(ref chunk, index, dense); @@ -96,7 +96,7 @@ public ref T Add(ulong outerIdx, T value) var gen = SplitGeneration(ref outerIdx); var realID = (int)outerIdx & 0xFFF; ref var chunk = ref GetChunkOrCreate((int)outerIdx >> 12); - var dense = chunk.Sparse[realID]; + var dense = chunk.Sparse![realID]; if (dense != 0) { @@ -215,7 +215,7 @@ private void SwapDense(ref Chunk chunkA, int a, int b) [MethodImpl(MethodImplOptions.AggressiveInlining)] private void SparseAssignIndex(ref Chunk chunk, ulong index, int dense) { - chunk.Sparse[(int)index & 0xFFF] = dense; + chunk.Sparse![(int)index & 0xFFF] = dense; _dense[dense] = index; } @@ -364,7 +364,7 @@ public void EnsureCapacity(int newCapacity, bool initZero = false) if (newCapacity <= Capacity) return; - T* ptr = (T*) NativeMemory.Realloc(_data, (nuint) newCapacity * (nuint) sizeof(T)); + var ptr = (T*) NativeMemory.Realloc(_data, (nuint) newCapacity * (nuint) sizeof(T)); if (initZero) Unsafe.InitBlock(&ptr[Count], 0, (uint)((newCapacity - Count) * (uint)sizeof(T))); diff --git a/src/Term.cs b/src/Term.cs index a61d66a..d049369 100644 --- a/src/Term.cs +++ b/src/Term.cs @@ -49,3 +49,8 @@ public enum TermOp : byte { public static implicit operator Term(Without _) => Term.Without(Lookup.Entity.Component.ID); } + +public readonly struct Not where T : struct +{ + public static implicit operator Term(Not _) => Term.Without(Lookup.Entity.Component.ID); +} diff --git a/src/TinyEcs.csproj b/src/TinyEcs.csproj index 0556c47..c2ce02c 100644 --- a/src/TinyEcs.csproj +++ b/src/TinyEcs.csproj @@ -2,15 +2,19 @@ Library - net8.0 enable true preview enable + net8.0;netstandard2.1 + + + + diff --git a/src/UnsafeSpan.cs b/src/UnsafeSpan.cs deleted file mode 100644 index cc81cba..0000000 --- a/src/UnsafeSpan.cs +++ /dev/null @@ -1,58 +0,0 @@ -namespace TinyEcs; - -[SkipLocalsInit] -public ref struct UnsafeSpan where T : struct -{ - private ref T _start; - private readonly ref T _end; - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public UnsafeSpan(ref T start, ref T end) - { - _start = ref start; - _end = ref end; - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public UnsafeSpan(Span span) - { - _start = ref MemoryMarshal.GetReference(span); - _end = ref Unsafe.Add(ref _start, span.Length); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public UnsafeSpan(T[] span) - { - _start = ref MemoryMarshal.GetArrayDataReference(span); - _end = ref Unsafe.Add(ref _start, span.Length); - } - - public ref T Value => ref _start; - public readonly ref T End => ref _end; - public readonly Span Span => this; - - public readonly ref T this[int index] - { - [MethodImpl(MethodImplOptions.AggressiveInlining)] - get => ref Unsafe.Add(ref _start, index); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public readonly bool CanAdvance() => Unsafe.IsAddressLessThan(ref _start, ref _end); - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public ref T Advance() => ref _start = ref Unsafe.Add(ref _start, 1); - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public static implicit operator UnsafeSpan(T[] span) => new(span); - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public static implicit operator UnsafeSpan(Span span) => new(span); - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public static unsafe implicit operator Span(UnsafeSpan span) - { - var size = (T*)Unsafe.AsPointer(ref span.End) - (T*)Unsafe.AsPointer(ref span.Value); - return MemoryMarshal.CreateSpan(ref span.Value, (int)size); - } -} diff --git a/src/World.cs b/src/World.cs index f387511..e9e43f6 100644 --- a/src/World.cs +++ b/src/World.cs @@ -1,3 +1,5 @@ +using Microsoft.Collections.Extensions; + namespace TinyEcs; public sealed partial class World : IDisposable @@ -5,9 +7,9 @@ public sealed partial class World : IDisposable const ulong ECS_MAX_COMPONENT_FAST_ID = 256; const ulong ECS_START_USER_ENTITY_DEFINE = ECS_MAX_COMPONENT_FAST_ID; - private readonly Archetype _archRoot; + internal readonly Archetype _archRoot; private readonly EntitySparseSet _entities = new(); - private readonly Dictionary _typeIndex = new(); + private readonly DictionarySlim _typeIndex = new(); private readonly ComponentComparer _comparer; private readonly Commands _commands; private int _frame; @@ -259,11 +261,21 @@ static int getHash(Span terms, bool checkSize) var hash = getHash(components, false); var exists = false; - ref var arch = ref create ? ref CollectionsMarshal.GetValueRefOrAddDefault( - _typeIndex, - hash, - out exists - ) : ref CollectionsMarshal.GetValueRefOrNullRef(_typeIndex, hash); + ref var arch = ref Unsafe.NullRef(); + if (create) + { + arch = ref _typeIndex.GetOrAddValueRef(hash); + } + else if (_typeIndex.TryGetValue(hash, out arch)) + { + + } + + // ref var arch = ref create ? ref CollectionsMarshal.GetValueRefOrAddDefault( + // _typeIndex, + // hash, + // out exists + // ) : ref CollectionsMarshal.GetValueRefOrNullRef(_typeIndex, hash); return ref arch; @@ -439,6 +451,7 @@ internal static class Lookup private static int _index = -1; private static readonly Dictionary> _arrayCreator = new Dictionary>(); + private static readonly Dictionary _typesConvertion = new(); public static Array? GetArray(int hashcode, int count) { @@ -447,6 +460,13 @@ internal static class Lookup return fn?.Invoke(count) ?? null; } + public static int GetID(Type type) + { + var ok = _typesConvertion.TryGetValue(type, out var id); + EcsAssert.Assert(ok, $"invalid hashcode {type}"); + return id; + } + [SkipLocalsInit] internal static class Entity where T : struct { @@ -459,6 +479,7 @@ internal static class Entity where T : struct static Entity() { _arrayCreator.Add(Component.ID, count => Size > 0 ? new T[count] : Array.Empty()); + _typesConvertion.Add(typeof(T), Component.ID); } private static int GetSize()