From 2e8000fe7784b8a78209c4cdd0f277a9beee482e Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Thu, 10 Apr 2025 15:38:13 +0200 Subject: [PATCH 01/44] add caches --- src/Compiler/FSharp.Compiler.Service.fsproj | 1 + src/Compiler/Utilities/Caches.fs | 206 ++++++++++++++++++++ 2 files changed, 207 insertions(+) create mode 100644 src/Compiler/Utilities/Caches.fs diff --git a/src/Compiler/FSharp.Compiler.Service.fsproj b/src/Compiler/FSharp.Compiler.Service.fsproj index 74e59954e8f..af43f511094 100644 --- a/src/Compiler/FSharp.Compiler.Service.fsproj +++ b/src/Compiler/FSharp.Compiler.Service.fsproj @@ -146,6 +146,7 @@ + diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs new file mode 100644 index 00000000000..469f56a3b98 --- /dev/null +++ b/src/Compiler/Utilities/Caches.fs @@ -0,0 +1,206 @@ +namespace FSharp.Compiler + +open System +open System.Collections.Concurrent +open System.Threading +open System.Threading.Tasks +open System.Diagnostics + +[] +// Default Seq.* function have one issue - when doing `Seq.sortBy`, it will call a `ToArray` on the collection, +// which is *not* calling `ConcurrentDictionary.ToArray`, but uses a custom one instead (treating it as `ICollection`) +// this leads to and exception when trying to evict without locking (The index is equal to or greater than the length of the array, +// or the number of elements in the dictionary is greater than the available space from index to the end of the destination array.) +// this is casuedby insertions happened between reading the `Count` and doing the `CopyTo`. +// This solution introduces a custom `ConcurrentDictionary.sortBy` which will be calling a proper `CopyTo`, the one on the ConcurrentDictionary itself. +module ConcurrentDictionary = + + open System.Collections + open System.Collections.Generic + + let inline mkSeq f = + { new IEnumerable<'U> with + member _.GetEnumerator() = f () + + interface IEnumerable with + member _.GetEnumerator() = (f () :> IEnumerator) + } + + let inline mkDelayedSeq (f: unit -> IEnumerable<'T>) = mkSeq (fun () -> f().GetEnumerator()) + + let inline sortBy ([] projection) (source: ConcurrentDictionary<_, _>) = + mkDelayedSeq (fun () -> + let array = source.ToArray() + Array.sortInPlaceBy projection array + array :> seq<_>) + +[] +type CachingStrategy = + | LRU + | LFU + +[] +type EvictionMethod = + | Blocking + | Background + +[] +type CacheOptions = + { + MaximumCapacity: int + PercentageToEvict: int + Strategy: CachingStrategy + EvictionMethod: EvictionMethod + LevelOfConcurrency: int + } + + static member Default = + { + MaximumCapacity = 100 + PercentageToEvict = 5 + Strategy = CachingStrategy.LRU + LevelOfConcurrency = Environment.ProcessorCount + EvictionMethod = EvictionMethod.Blocking + } + +[] +type CachedEntity<'Value> = + val Value: 'Value + val mutable LastAccessed: int64 + val mutable AccessCount: int64 + + new(value: 'Value) = + { + Value = value + LastAccessed = DateTimeOffset.Now.Ticks + AccessCount = 0L + } + +[] +[] +type Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) = + + let cacheHit = Event<_ * _>() + let cacheMiss = Event<_>() + let eviction = Event<_>() + + [] + member val CacheHit = cacheHit.Publish + + [] + member val CacheMiss = cacheMiss.Publish + + [] + member val Eviction = eviction.Publish + + // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. + member val Store = ConcurrentDictionary<_, CachedEntity<'Value>>(options.LevelOfConcurrency, capacity) + + static member Create(options: CacheOptions) = + let capacity = + options.MaximumCapacity + + (options.MaximumCapacity * options.PercentageToEvict / 100) + + let cts = new CancellationTokenSource() + let cache = new Cache<'Key, 'Value>(options, capacity, cts) + + if options.EvictionMethod = EvictionMethod.Background then + Task.Run(cache.TryEvictTask, cts.Token) |> ignore + + cache + + member this.GetStats() = + {| + Capacity = options.MaximumCapacity + PercentageToEvict = options.PercentageToEvict + Strategy = options.Strategy + LevelOfConcurrency = options.LevelOfConcurrency + Count = this.Store.Count + MostRecentlyAccesssed = this.Store.Values |> Seq.maxBy _.LastAccessed |> _.LastAccessed + LeastRecentlyAccesssed = this.Store.Values |> Seq.minBy _.LastAccessed |> _.LastAccessed + MostFrequentlyAccessed = this.Store.Values |> Seq.maxBy _.AccessCount |> _.AccessCount + LeastFrequentlyAccessed = this.Store.Values |> Seq.minBy _.AccessCount |> _.AccessCount + |} + + member private this.CalculateEvictionCount() = + if this.Store.Count >= options.MaximumCapacity then + (this.Store.Count - options.MaximumCapacity) + + (options.MaximumCapacity * options.PercentageToEvict / 100) + else + 0 + + // TODO: All of these are proofs of concept, a very naive implementation of eviction strategies, it will always walk the dictionary to find the items to evict, this is not efficient. + member private this.TryGetPickToEvict() = + this.Store + |> match options.Strategy with + | CachingStrategy.LRU -> ConcurrentDictionary.sortBy _.Value.LastAccessed + | CachingStrategy.LFU -> ConcurrentDictionary.sortBy _.Value.AccessCount + |> Seq.take (this.CalculateEvictionCount()) + |> Seq.map (fun x -> x.Key) + + // TODO: Explore an eviction shortcut, some sort of list of keys to evict first, based on the strategy. + member private this.TryEvictItems() = + if this.CalculateEvictionCount() > 0 then + for key in this.TryGetPickToEvict() do + match this.Store.TryRemove(key) with + | true, _ -> eviction.Trigger(key) + | _ -> () // TODO: We probably want to count eviction misses as well? + + // TODO: Shall this be a safer task, wrapping everything in try .. with, so it's not crashing silently? + member private this.TryEvictTask() = + backgroundTask { + while not cts.Token.IsCancellationRequested do + let evictionCount = this.CalculateEvictionCount() + + if evictionCount > 0 then + this.TryEvictItems() + + let utilization = (this.Store.Count / options.MaximumCapacity) + // So, based on utilization this will scale the delay between 0 and 1 seconds. + // Worst case scenario would be when 1 second delay happens, + // if the cache will grow rapidly (or in bursts), it will go beyond the maximum capacity. + // In this case underlying dictionary will resize, AND we will have to evict items, which will likely be slow. + // In this case, cache stats should be used to adjust MaximumCapacity and PercentageToEvict. + let delay = 1000 - (1000 * utilization) + + if delay > 0 then + do! Task.Delay(delay) + } + + member this.TryEvict() = + if this.CalculateEvictionCount() > 0 then + match options.EvictionMethod with + | EvictionMethod.Blocking -> this.TryEvictItems() + | EvictionMethod.Background -> () + + member this.TryGet(key, value: outref<'Value>) = + match this.Store.TryGetValue(key) with + | true, cachedEntity -> + // this is fine to be non-atomic, I guess, we are okay with race if the time is within the time of multiple concurrent calls. + cachedEntity.LastAccessed <- DateTimeOffset.Now.Ticks + let _ = Interlocked.Increment(&cachedEntity.AccessCount) + cacheHit.Trigger(key, cachedEntity.Value) + value <- cachedEntity.Value + true + | _ -> + cacheMiss.Trigger(key) + value <- Unchecked.defaultof<'Value> + false + + member this.TryAdd(key, value: 'Value, ?update: bool) = + let update = defaultArg update false + + this.TryEvict() + + let value = CachedEntity<'Value>(value) + + if update then + let _ = this.Store.AddOrUpdate(key, value, (fun _ _ -> value)) + true + else + this.Store.TryAdd(key, value) + + interface IDisposable with + member _.Dispose() = cts.Cancel() + + member this.Dispose() = (this :> IDisposable).Dispose() From 17f2d9325c2792e2c0ba0d199d7ec6552dd893fc Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Thu, 10 Apr 2025 15:38:26 +0200 Subject: [PATCH 02/44] plug it in --- src/Compiler/Checking/TypeRelations.fs | 8 ++++---- src/Compiler/Checking/import.fs | 3 ++- src/Compiler/Checking/import.fsi | 2 +- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/Compiler/Checking/TypeRelations.fs b/src/Compiler/Checking/TypeRelations.fs index 2cb5dd4057a..daa5a656415 100644 --- a/src/Compiler/Checking/TypeRelations.fs +++ b/src/Compiler/Checking/TypeRelations.fs @@ -102,8 +102,8 @@ let TypesFeasiblyEquivStripMeasures g amap m ty1 ty2 = TypesFeasiblyEquivalent true 0 g amap m ty1 ty2 let inline TryGetCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key = - if g.compilationMode = CompilationMode.OneOff && g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then - match amap.TypeSubsumptionCache.TryGetValue(key) with + if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then + match amap.TypeSubsumptionCache.TryGet(key) with | true, subsumes -> ValueSome subsumes | false, _ -> @@ -112,8 +112,8 @@ let inline TryGetCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key = ValueNone let inline UpdateCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key subsumes : unit = - if g.compilationMode = CompilationMode.OneOff && g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then - amap.TypeSubsumptionCache[key] <- subsumes + if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then + amap.TypeSubsumptionCache.TryAdd(key, subsumes) |> ignore /// The feasible coercion relation. Part of the language spec. let rec TypeFeasiblySubsumesType ndeep (g: TcGlobals) (amap: ImportMap) m (ty1: TType) (canCoerce: CanCoerce) (ty2: TType) = diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index c87d6cdad03..836087f2534 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -106,7 +106,8 @@ type [] TTypeCacheKey = type ImportMap(g: TcGlobals, assemblyLoader: AssemblyLoader) = let typeRefToTyconRefCache = ConcurrentDictionary() - let typeSubsumptionCache = ConcurrentDictionary(System.Environment.ProcessorCount, 1024) + let typeSubsumptionCache = + Cache.Create({ CacheOptions.Default with MaximumCapacity = 1024 }) member _.g = g diff --git a/src/Compiler/Checking/import.fsi b/src/Compiler/Checking/import.fsi index c387558fcba..043692ac41c 100644 --- a/src/Compiler/Checking/import.fsi +++ b/src/Compiler/Checking/import.fsi @@ -73,7 +73,7 @@ type ImportMap = member g: TcGlobals /// Type subsumption cache - member TypeSubsumptionCache: ConcurrentDictionary + member TypeSubsumptionCache: Cache module Nullness = From 17d2cbbee54e6d732f04f76c2a488c7639f8586a Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Thu, 10 Apr 2025 18:04:17 +0200 Subject: [PATCH 03/44] internal --- src/Compiler/Utilities/Caches.fs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 469f56a3b98..6365aa660f5 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -13,7 +13,7 @@ open System.Diagnostics // or the number of elements in the dictionary is greater than the available space from index to the end of the destination array.) // this is casuedby insertions happened between reading the `Count` and doing the `CopyTo`. // This solution introduces a custom `ConcurrentDictionary.sortBy` which will be calling a proper `CopyTo`, the one on the ConcurrentDictionary itself. -module ConcurrentDictionary = +module internal ConcurrentDictionary = open System.Collections open System.Collections.Generic @@ -35,17 +35,17 @@ module ConcurrentDictionary = array :> seq<_>) [] -type CachingStrategy = +type internal CachingStrategy = | LRU | LFU [] -type EvictionMethod = +type internal EvictionMethod = | Blocking | Background [] -type CacheOptions = +type internal CacheOptions = { MaximumCapacity: int PercentageToEvict: int @@ -64,7 +64,7 @@ type CacheOptions = } [] -type CachedEntity<'Value> = +type internal CachedEntity<'Value> = val Value: 'Value val mutable LastAccessed: int64 val mutable AccessCount: int64 @@ -78,7 +78,7 @@ type CachedEntity<'Value> = [] [] -type Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) = +type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) = let cacheHit = Event<_ * _>() let cacheMiss = Event<_>() From ced43c564dd61ac84afed1a2106de460b82846cd Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Thu, 10 Apr 2025 18:49:58 +0200 Subject: [PATCH 04/44] ok --- src/Compiler/Utilities/Caches.fs | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 6365aa660f5..5e9ea27eb10 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -109,18 +109,18 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) cache - member this.GetStats() = - {| - Capacity = options.MaximumCapacity - PercentageToEvict = options.PercentageToEvict - Strategy = options.Strategy - LevelOfConcurrency = options.LevelOfConcurrency - Count = this.Store.Count - MostRecentlyAccesssed = this.Store.Values |> Seq.maxBy _.LastAccessed |> _.LastAccessed - LeastRecentlyAccesssed = this.Store.Values |> Seq.minBy _.LastAccessed |> _.LastAccessed - MostFrequentlyAccessed = this.Store.Values |> Seq.maxBy _.AccessCount |> _.AccessCount - LeastFrequentlyAccessed = this.Store.Values |> Seq.minBy _.AccessCount |> _.AccessCount - |} + //member this.GetStats() = + // {| + // Capacity = options.MaximumCapacity + // PercentageToEvict = options.PercentageToEvict + // Strategy = options.Strategy + // LevelOfConcurrency = options.LevelOfConcurrency + // Count = this.Store.Count + // MostRecentlyAccesssed = this.Store.Values |> Seq.maxBy _.LastAccessed |> _.LastAccessed + // LeastRecentlyAccesssed = this.Store.Values |> Seq.minBy _.LastAccessed |> _.LastAccessed + // MostFrequentlyAccessed = this.Store.Values |> Seq.maxBy _.AccessCount |> _.AccessCount + // LeastFrequentlyAccessed = this.Store.Values |> Seq.minBy _.AccessCount |> _.AccessCount + // |} member private this.CalculateEvictionCount() = if this.Store.Count >= options.MaximumCapacity then From 08d37300fcdcd02a998eb19b2bd84112cb05f56b Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Sat, 12 Apr 2025 11:42:09 +0200 Subject: [PATCH 05/44] trace count in incremental use --- src/Compiler/Checking/TypeRelations.fs | 12 ++--- src/Compiler/Checking/import.fs | 7 ++- src/Compiler/Utilities/Caches.fs | 44 +++++++++++++------ .../src/FSharp.Editor/Common/Logging.fs | 31 ++++++++----- .../LanguageService/LanguageService.fs | 8 ++-- 5 files changed, 63 insertions(+), 39 deletions(-) diff --git a/src/Compiler/Checking/TypeRelations.fs b/src/Compiler/Checking/TypeRelations.fs index daa5a656415..ffece895ccd 100644 --- a/src/Compiler/Checking/TypeRelations.fs +++ b/src/Compiler/Checking/TypeRelations.fs @@ -101,18 +101,18 @@ let TypesFeasiblyEquiv ndeep g amap m ty1 ty2 = let TypesFeasiblyEquivStripMeasures g amap m ty1 ty2 = TypesFeasiblyEquivalent true 0 g amap m ty1 ty2 -let inline TryGetCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key = - if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then +let inline TryGetCachedTypeSubsumption (_g: TcGlobals) (amap: ImportMap) key = + //if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then match amap.TypeSubsumptionCache.TryGet(key) with | true, subsumes -> ValueSome subsumes | false, _ -> ValueNone - else - ValueNone + //else + // ValueNone -let inline UpdateCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key subsumes : unit = - if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then +let inline UpdateCachedTypeSubsumption (_g: TcGlobals) (amap: ImportMap) key subsumes : unit = + //if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then amap.TypeSubsumptionCache.TryAdd(key, subsumes) |> ignore /// The feasible coercion relation. Part of the language spec. diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 836087f2534..e86191af86f 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -90,6 +90,8 @@ type [] TTypeCacheKey = combined +let typeSubsumptionCache = lazy Cache.Create({ CacheOptions.Default with EvictionMethod = EvictionMethod.Background }) + //------------------------------------------------------------------------- // Import an IL types as F# types. //------------------------------------------------------------------------- @@ -106,16 +108,13 @@ type [] TTypeCacheKey = type ImportMap(g: TcGlobals, assemblyLoader: AssemblyLoader) = let typeRefToTyconRefCache = ConcurrentDictionary() - let typeSubsumptionCache = - Cache.Create({ CacheOptions.Default with MaximumCapacity = 1024 }) - member _.g = g member _.assemblyLoader = assemblyLoader member _.ILTypeRefToTyconRefCache = typeRefToTyconRefCache - member _.TypeSubsumptionCache = typeSubsumptionCache + member _.TypeSubsumptionCache = typeSubsumptionCache.Value let CanImportILScopeRef (env: ImportMap) m scoref = diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 5e9ea27eb10..c0306b4b400 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -6,6 +6,8 @@ open System.Threading open System.Threading.Tasks open System.Diagnostics +open FSharp.Compiler.Diagnostics + [] // Default Seq.* function have one issue - when doing `Seq.sortBy`, it will call a `ToArray` on the collection, // which is *not* calling `ConcurrentDictionary.ToArray`, but uses a custom one instead (treating it as `ICollection`) @@ -56,7 +58,7 @@ type internal CacheOptions = static member Default = { - MaximumCapacity = 100 + MaximumCapacity = 500_000 PercentageToEvict = 5 Strategy = CachingStrategy.LRU LevelOfConcurrency = Environment.ProcessorCount @@ -84,6 +86,8 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) let cacheMiss = Event<_>() let eviction = Event<_>() + let mutable maxCount = 0 + [] member val CacheHit = cacheHit.Publish @@ -104,23 +108,25 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) let cts = new CancellationTokenSource() let cache = new Cache<'Key, 'Value>(options, capacity, cts) + Task.Run(cache.TraceSize, cts.Token) |> ignore + if options.EvictionMethod = EvictionMethod.Background then Task.Run(cache.TryEvictTask, cts.Token) |> ignore cache - //member this.GetStats() = - // {| - // Capacity = options.MaximumCapacity - // PercentageToEvict = options.PercentageToEvict - // Strategy = options.Strategy - // LevelOfConcurrency = options.LevelOfConcurrency - // Count = this.Store.Count - // MostRecentlyAccesssed = this.Store.Values |> Seq.maxBy _.LastAccessed |> _.LastAccessed - // LeastRecentlyAccesssed = this.Store.Values |> Seq.minBy _.LastAccessed |> _.LastAccessed - // MostFrequentlyAccessed = this.Store.Values |> Seq.maxBy _.AccessCount |> _.AccessCount - // LeastFrequentlyAccessed = this.Store.Values |> Seq.minBy _.AccessCount |> _.AccessCount - // |} + member this.GetStats(): obj = + {| + Capacity = options.MaximumCapacity + PercentageToEvict = options.PercentageToEvict + Strategy = options.Strategy + LevelOfConcurrency = options.LevelOfConcurrency + Count = this.Store.Count + MostRecentlyAccesssed = this.Store.Values |> Seq.maxBy _.LastAccessed |> _.LastAccessed + LeastRecentlyAccesssed = this.Store.Values |> Seq.minBy _.LastAccessed |> _.LastAccessed + MostFrequentlyAccessed = this.Store.Values |> Seq.maxBy _.AccessCount |> _.AccessCount + LeastFrequentlyAccessed = this.Store.Values |> Seq.minBy _.AccessCount |> _.AccessCount + |} member private this.CalculateEvictionCount() = if this.Store.Count >= options.MaximumCapacity then @@ -146,11 +152,21 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) | true, _ -> eviction.Trigger(key) | _ -> () // TODO: We probably want to count eviction misses as well? + member private this.TraceSize() = + backgroundTask { + while not cts.Token.IsCancellationRequested do + if this.Store.Count > maxCount then + maxCount <- this.Store.Count + use _ = Activity.start "CacheSize" (seq { "size", string maxCount }) + () + do! Task.Delay(1000) + } + // TODO: Shall this be a safer task, wrapping everything in try .. with, so it's not crashing silently? member private this.TryEvictTask() = backgroundTask { while not cts.Token.IsCancellationRequested do - let evictionCount = this.CalculateEvictionCount() + let evictionCount = 0 // this.CalculateEvictionCount() if evictionCount > 0 then this.TryEvictItems() diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index b0f56df3234..845c71cc73c 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -135,16 +135,16 @@ module Activity = String.replicate (loop activity 0) " " let collectTags (activity: Activity) = - [ for tag in activity.Tags -> $"{tag.Key}: %A{tag.Value}" ] + [ for tag in activity.Tags -> $"{tag.Key}: {tag.Value}" ] |> String.concat ", " let listener = new ActivityListener( - ShouldListenTo = (fun source -> source.Name = FSharp.Compiler.Diagnostics.ActivityNames.FscSourceName), + ShouldListenTo = (fun source -> source.Name = ActivityNames.FscSourceName), Sample = (fun context -> if context.Name.Contains(filter) then - ActivitySamplingResult.AllDataAndRecorded + ActivitySamplingResult.AllData else ActivitySamplingResult.None), ActivityStarted = (fun a -> logMsg $"{indent a}{a.OperationName} {collectTags a}") @@ -152,13 +152,24 @@ module Activity = ActivitySource.AddActivityListener(listener) - let export () = - OpenTelemetry.Sdk - .CreateTracerProviderBuilder() - .AddSource(ActivityNames.FscSourceName) - .SetResourceBuilder(ResourceBuilder.CreateDefault().AddService(serviceName = "F#", serviceVersion = "1.0.0")) - .AddOtlpExporter() - .Build() + let exportTraces() = + let provider = + // Configure OpenTelemetry export. Traces can be viewed in Jaeger or other compatible tools. + OpenTelemetry.Sdk.CreateTracerProviderBuilder() + .AddSource(ActivityNames.FscSourceName) + .ConfigureResource(fun r -> r.AddService("F#") |> ignore) + .AddOtlpExporter(fun o -> + // Empirical values to ensure no traces are lost and no significant delay at the end of test run. + o.TimeoutMilliseconds <- 200 + o.BatchExportProcessorOptions.MaxQueueSize <- 16384 + o.BatchExportProcessorOptions.ScheduledDelayMilliseconds <- 100 + ) + .Build() + let a = Activity.startNoTags "FSharpPackage" + fun () -> + a.Dispose() + provider.ForceFlush(5000) |> ignore + provider.Dispose() let listenToAll () = listen "" #endif diff --git a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs index 5cc9cec2943..30f62e2a4df 100644 --- a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs +++ b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs @@ -340,11 +340,9 @@ type internal FSharpPackage() as this = let mutable solutionEventsOpt = None -#if DEBUG - let _traceProvider = Logging.Activity.export () - let _logger = Logging.Activity.listenToAll () - // Logging.Activity.listen "IncrementalBuild" -#endif + #if DEBUG + do Logging.Activity.listen "CacheSize" + #endif // FSI-LINKAGE-POINT: unsited init do FSharp.Interactive.Hooks.fsiConsoleWindowPackageCtorUnsited (this :> Package) From ffd764df9ee8428b68dc14e589cd411f30dc4c5e Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Sat, 12 Apr 2025 12:23:54 +0200 Subject: [PATCH 06/44] show count in release config too --- vsintegration/src/FSharp.Editor/Common/Logging.fs | 9 ++++----- .../src/FSharp.Editor/LanguageService/LanguageService.fs | 2 -- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index 845c71cc73c..65e9f2d12b3 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -118,11 +118,8 @@ module Logging = let logExceptionWithContext (ex: Exception, context) = logErrorf "Context: %s\nException Message: %s\nStack Trace: %s" context ex.Message ex.StackTrace -#if DEBUG -module Activity = - open OpenTelemetry.Resources - open OpenTelemetry.Trace +module Activity = let listen filter = let indent (activity: Activity) = @@ -151,7 +148,9 @@ module Activity = ) ActivitySource.AddActivityListener(listener) - +#if DEBUG + open OpenTelemetry.Resources + open OpenTelemetry.Trace let exportTraces() = let provider = // Configure OpenTelemetry export. Traces can be viewed in Jaeger or other compatible tools. diff --git a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs index 30f62e2a4df..fb4508214d9 100644 --- a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs +++ b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs @@ -340,9 +340,7 @@ type internal FSharpPackage() as this = let mutable solutionEventsOpt = None - #if DEBUG do Logging.Activity.listen "CacheSize" - #endif // FSI-LINKAGE-POINT: unsited init do FSharp.Interactive.Hooks.fsiConsoleWindowPackageCtorUnsited (this :> Package) From 5f2c535e617b98c48d902a812416bb1a7428a592 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Sat, 12 Apr 2025 19:18:04 +0200 Subject: [PATCH 07/44] tune cache --- src/Compiler/Checking/import.fs | 6 ++--- src/Compiler/Checking/import.fsi | 2 +- src/Compiler/Driver/CompilerImports.fs | 8 ++++++- src/Compiler/Utilities/Caches.fs | 24 +++++++------------ .../LanguageService/LanguageService.fs | 2 +- 5 files changed, 20 insertions(+), 22 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index e86191af86f..f20164e97f2 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -90,8 +90,6 @@ type [] TTypeCacheKey = combined -let typeSubsumptionCache = lazy Cache.Create({ CacheOptions.Default with EvictionMethod = EvictionMethod.Background }) - //------------------------------------------------------------------------- // Import an IL types as F# types. //------------------------------------------------------------------------- @@ -105,7 +103,7 @@ let typeSubsumptionCache = lazy Cache.Create({ CacheOptions /// using tcImports.GetImportMap() if needed, and it is not harmful if multiple instances are used. The object /// serves as an interface through to the tables stored in the primary TcImports structures defined in CompileOps.fs. [] -type ImportMap(g: TcGlobals, assemblyLoader: AssemblyLoader) = +type ImportMap(g: TcGlobals, assemblyLoader: AssemblyLoader, typeSubsumptionCache: Cache) = let typeRefToTyconRefCache = ConcurrentDictionary() member _.g = g @@ -114,7 +112,7 @@ type ImportMap(g: TcGlobals, assemblyLoader: AssemblyLoader) = member _.ILTypeRefToTyconRefCache = typeRefToTyconRefCache - member _.TypeSubsumptionCache = typeSubsumptionCache.Value + member _.TypeSubsumptionCache = typeSubsumptionCache let CanImportILScopeRef (env: ImportMap) m scoref = diff --git a/src/Compiler/Checking/import.fsi b/src/Compiler/Checking/import.fsi index 043692ac41c..3a7bd6e8437 100644 --- a/src/Compiler/Checking/import.fsi +++ b/src/Compiler/Checking/import.fsi @@ -64,7 +64,7 @@ type TTypeCacheKey = /// serves as an interface through to the tables stored in the primary TcImports structures defined in CompileOps.fs. [] type ImportMap = - new: g: TcGlobals * assemblyLoader: AssemblyLoader -> ImportMap + new: g: TcGlobals * assemblyLoader: AssemblyLoader * typeSubsumptionCache: Cache -> ImportMap /// The AssemblyLoader for the import context member assemblyLoader: AssemblyLoader diff --git a/src/Compiler/Driver/CompilerImports.fs b/src/Compiler/Driver/CompilerImports.fs index 4ab1ca3d7e4..afb067ae6c9 100644 --- a/src/Compiler/Driver/CompilerImports.fs +++ b/src/Compiler/Driver/CompilerImports.fs @@ -1305,6 +1305,12 @@ and [] TcImports | None -> false | None -> false + let typeSubsumptionCache = lazy Cache.Create({ + CacheOptions.Default with + EvictionMethod = EvictionMethod.Background + PercentageToEvict = 20 + MaximumCapacity = 200_000 }) + member internal _.Base = CheckDisposed() importsBase @@ -1704,7 +1710,7 @@ and [] TcImports member _.RecordGeneratedTypeRoot root = tcImports.RecordGeneratedTypeRoot root } #endif - ImportMap(tcImports.GetTcGlobals(), loaderInterface) + ImportMap(tcImports.GetTcGlobals(), loaderInterface, typeSubsumptionCache.Value) // Note the tcGlobals are only available once mscorlib and fslib have been established. For TcImports, // they are logically only needed when converting AbsIL data structures into F# data structures, and diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index c0306b4b400..5203b15b954 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -58,7 +58,7 @@ type internal CacheOptions = static member Default = { - MaximumCapacity = 500_000 + MaximumCapacity = 10_000 PercentageToEvict = 5 Strategy = CachingStrategy.LRU LevelOfConcurrency = Environment.ProcessorCount @@ -86,7 +86,7 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) let cacheMiss = Event<_>() let eviction = Event<_>() - let mutable maxCount = 0 + let mutable currentCapacity = capacity [] member val CacheHit = cacheHit.Publish @@ -105,11 +105,11 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) options.MaximumCapacity + (options.MaximumCapacity * options.PercentageToEvict / 100) + use _ = Activity.start "Cache.Created" (seq { "capacity", string capacity }) + let cts = new CancellationTokenSource() let cache = new Cache<'Key, 'Value>(options, capacity, cts) - Task.Run(cache.TraceSize, cts.Token) |> ignore - if options.EvictionMethod = EvictionMethod.Background then Task.Run(cache.TryEvictTask, cts.Token) |> ignore @@ -152,23 +152,17 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) | true, _ -> eviction.Trigger(key) | _ -> () // TODO: We probably want to count eviction misses as well? - member private this.TraceSize() = - backgroundTask { - while not cts.Token.IsCancellationRequested do - if this.Store.Count > maxCount then - maxCount <- this.Store.Count - use _ = Activity.start "CacheSize" (seq { "size", string maxCount }) - () - do! Task.Delay(1000) - } - // TODO: Shall this be a safer task, wrapping everything in try .. with, so it's not crashing silently? member private this.TryEvictTask() = backgroundTask { while not cts.Token.IsCancellationRequested do - let evictionCount = 0 // this.CalculateEvictionCount() + let evictionCount = this.CalculateEvictionCount() if evictionCount > 0 then + let exceeded = this.Store.Count > currentCapacity + if exceeded then + currentCapacity <- this.Store.Count + use _ = Activity.start "Cache.Eviction" (seq { yield "Store.Count", string this.Store.Count; if exceeded then yield "RESIZE", "!" }) this.TryEvictItems() let utilization = (this.Store.Count / options.MaximumCapacity) diff --git a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs index fb4508214d9..f8a3d3e0e32 100644 --- a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs +++ b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs @@ -340,7 +340,7 @@ type internal FSharpPackage() as this = let mutable solutionEventsOpt = None - do Logging.Activity.listen "CacheSize" + do Logging.Activity.listen "Cache" // FSI-LINKAGE-POINT: unsited init do FSharp.Interactive.Hooks.fsiConsoleWindowPackageCtorUnsited (this :> Package) From a7d4605f30239f9190c67a2898f71fc7585a4a84 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Sat, 12 Apr 2025 19:41:49 +0200 Subject: [PATCH 08/44] fantomas --- src/Compiler/Driver/CompilerImports.fs | 15 ++++++++++----- src/Compiler/Utilities/Caches.fs | 15 +++++++++++++-- vsintegration/src/FSharp.Editor/Common/Logging.fs | 15 ++++++++------- 3 files changed, 31 insertions(+), 14 deletions(-) diff --git a/src/Compiler/Driver/CompilerImports.fs b/src/Compiler/Driver/CompilerImports.fs index afb067ae6c9..12418ad36cb 100644 --- a/src/Compiler/Driver/CompilerImports.fs +++ b/src/Compiler/Driver/CompilerImports.fs @@ -1305,11 +1305,16 @@ and [] TcImports | None -> false | None -> false - let typeSubsumptionCache = lazy Cache.Create({ - CacheOptions.Default with - EvictionMethod = EvictionMethod.Background - PercentageToEvict = 20 - MaximumCapacity = 200_000 }) + let typeSubsumptionCache = + lazy + Cache + .Create( + { CacheOptions.Default with + EvictionMethod = EvictionMethod.Background + PercentageToEvict = 20 + MaximumCapacity = 200_000 + } + ) member internal _.Base = CheckDisposed() diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 5203b15b954..c2f62538e16 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -115,7 +115,7 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) cache - member this.GetStats(): obj = + member this.GetStats() : obj = {| Capacity = options.MaximumCapacity PercentageToEvict = options.PercentageToEvict @@ -160,9 +160,20 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) if evictionCount > 0 then let exceeded = this.Store.Count > currentCapacity + if exceeded then currentCapacity <- this.Store.Count - use _ = Activity.start "Cache.Eviction" (seq { yield "Store.Count", string this.Store.Count; if exceeded then yield "RESIZE", "!" }) + + use _ = + Activity.start + "Cache.Eviction" + (seq { + yield "Store.Count", string this.Store.Count + + if exceeded then + yield "RESIZE", "!" + }) + this.TryEvictItems() let utilization = (this.Store.Count / options.MaximumCapacity) diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index 65e9f2d12b3..e978d1e19d9 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -118,7 +118,6 @@ module Logging = let logExceptionWithContext (ex: Exception, context) = logErrorf "Context: %s\nException Message: %s\nStack Trace: %s" context ex.Message ex.StackTrace - module Activity = let listen filter = @@ -132,8 +131,7 @@ module Activity = String.replicate (loop activity 0) " " let collectTags (activity: Activity) = - [ for tag in activity.Tags -> $"{tag.Key}: {tag.Value}" ] - |> String.concat ", " + [ for tag in activity.Tags -> $"{tag.Key}: {tag.Value}" ] |> String.concat ", " let listener = new ActivityListener( @@ -151,20 +149,23 @@ module Activity = #if DEBUG open OpenTelemetry.Resources open OpenTelemetry.Trace - let exportTraces() = + + let exportTraces () = let provider = // Configure OpenTelemetry export. Traces can be viewed in Jaeger or other compatible tools. - OpenTelemetry.Sdk.CreateTracerProviderBuilder() + OpenTelemetry.Sdk + .CreateTracerProviderBuilder() .AddSource(ActivityNames.FscSourceName) .ConfigureResource(fun r -> r.AddService("F#") |> ignore) .AddOtlpExporter(fun o -> // Empirical values to ensure no traces are lost and no significant delay at the end of test run. o.TimeoutMilliseconds <- 200 o.BatchExportProcessorOptions.MaxQueueSize <- 16384 - o.BatchExportProcessorOptions.ScheduledDelayMilliseconds <- 100 - ) + o.BatchExportProcessorOptions.ScheduledDelayMilliseconds <- 100) .Build() + let a = Activity.startNoTags "FSharpPackage" + fun () -> a.Dispose() provider.ForceFlush(5000) |> ignore From 7d9746a3b57a7c2066bab8496e22e5cb236164f9 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Sat, 12 Apr 2025 20:09:11 +0200 Subject: [PATCH 09/44] ilver --- tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_net9.0.bsl | 1 + .../ilverify_FSharp.Compiler.Service_Debug_netstandard2.0.bsl | 1 + .../ilverify_FSharp.Compiler.Service_Release_netstandard2.0.bsl | 1 + 3 files changed, 3 insertions(+) diff --git a/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_net9.0.bsl b/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_net9.0.bsl index 69842b9e059..219a0ec11db 100644 --- a/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_net9.0.bsl +++ b/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_net9.0.bsl @@ -5,6 +5,7 @@ [IL]: Error [UnmanagedPointer]: : FSharp.Compiler.IO.RawByteMemory::.ctor(uint8*, int32, object)][offset 0x00000009] Unmanaged pointers are not a verifiable type. [IL]: Error [StackByRef]: : FSharp.Compiler.IO.RawByteMemory::get_Item(int32)][offset 0x0000001E][found Native Int] Expected ByRef on the stack. [IL]: Error [StackByRef]: : FSharp.Compiler.IO.RawByteMemory::set_Item(int32, uint8)][offset 0x00000025][found Native Int] Expected ByRef on the stack. +[IL]: Error [StackUnexpected]: : FSharp.Compiler.Cache`2::TryGetPickToEvict()][offset 0x0000005A][found ref 'object'][expected ref '[S.P.CoreLib]System.Collections.Generic.IEnumerable`1>>'] Unexpected type on the stack. [IL]: Error [ReturnPtrToStack]: : Internal.Utilities.Text.Lexing.LexBuffer`1::get_LexemeView()][offset 0x00000019] Return type is ByRef, TypedReference, ArgHandle, or ArgIterator. [IL]: Error [StackUnexpected]: : Internal.Utilities.Text.Lexing.UnicodeTables::scanUntilSentinel([FSharp.Compiler.Service]Internal.Utilities.Text.Lexing.LexBuffer`1, int32)][offset 0x00000079][found Short] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Xml.XmlDoc::processLines([FSharp.Core]Microsoft.FSharp.Collections.FSharpList`1)][offset 0x00000031][found Char] Unexpected type on the stack. diff --git a/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_netstandard2.0.bsl b/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_netstandard2.0.bsl index 6e41547cd11..209cabb338b 100644 --- a/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_netstandard2.0.bsl +++ b/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_netstandard2.0.bsl @@ -5,6 +5,7 @@ [IL]: Error [UnmanagedPointer]: : FSharp.Compiler.IO.RawByteMemory::.ctor(uint8*, int32, object)][offset 0x00000009] Unmanaged pointers are not a verifiable type. [IL]: Error [StackByRef]: : FSharp.Compiler.IO.RawByteMemory::get_Item(int32)][offset 0x0000001E][found Native Int] Expected ByRef on the stack. [IL]: Error [StackByRef]: : FSharp.Compiler.IO.RawByteMemory::set_Item(int32, uint8)][offset 0x00000025][found Native Int] Expected ByRef on the stack. +[IL]: Error [StackUnexpected]: : FSharp.Compiler.Cache`2::TryGetPickToEvict()][offset 0x0000005A][found ref 'object'][expected ref '[S.P.CoreLib]System.Collections.Generic.IEnumerable`1>>'] Unexpected type on the stack. [IL]: Error [ReturnPtrToStack]: : Internal.Utilities.Text.Lexing.LexBuffer`1::get_LexemeView()][offset 0x00000019] Return type is ByRef, TypedReference, ArgHandle, or ArgIterator. [IL]: Error [StackUnexpected]: : Internal.Utilities.Text.Lexing.UnicodeTables::scanUntilSentinel([FSharp.Compiler.Service]Internal.Utilities.Text.Lexing.LexBuffer`1, int32)][offset 0x00000079][found Short] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Xml.XmlDoc::processLines([FSharp.Core]Microsoft.FSharp.Collections.FSharpList`1)][offset 0x00000031][found Char] Unexpected type on the stack. diff --git a/tests/ILVerify/ilverify_FSharp.Compiler.Service_Release_netstandard2.0.bsl b/tests/ILVerify/ilverify_FSharp.Compiler.Service_Release_netstandard2.0.bsl index 431d4e5512a..8d926f8c113 100644 --- a/tests/ILVerify/ilverify_FSharp.Compiler.Service_Release_netstandard2.0.bsl +++ b/tests/ILVerify/ilverify_FSharp.Compiler.Service_Release_netstandard2.0.bsl @@ -5,6 +5,7 @@ [IL]: Error [UnmanagedPointer]: : FSharp.Compiler.IO.RawByteMemory::.ctor(uint8*, int32, object)][offset 0x00000009] Unmanaged pointers are not a verifiable type. [IL]: Error [StackByRef]: : FSharp.Compiler.IO.RawByteMemory::get_Item(int32)][offset 0x0000001A][found Native Int] Expected ByRef on the stack. [IL]: Error [StackByRef]: : FSharp.Compiler.IO.RawByteMemory::set_Item(int32, uint8)][offset 0x0000001B][found Native Int] Expected ByRef on the stack. +[IL]: Error [StackUnexpected]: : FSharp.Compiler.Cache`2::TryGetPickToEvict()][offset 0x00000034][found ref 'object'][expected ref '[S.P.CoreLib]System.Collections.Generic.IEnumerable`1>>'] Unexpected type on the stack. [IL]: Error [ReturnPtrToStack]: : Internal.Utilities.Text.Lexing.LexBuffer`1::get_LexemeView()][offset 0x00000017] Return type is ByRef, TypedReference, ArgHandle, or ArgIterator. [IL]: Error [StackUnexpected]: : Internal.Utilities.Text.Lexing.UnicodeTables::scanUntilSentinel([FSharp.Compiler.Service]Internal.Utilities.Text.Lexing.LexBuffer`1, int32)][offset 0x0000008D][found Short] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Xml.XmlDoc::processLines([FSharp.Core]Microsoft.FSharp.Collections.FSharpList`1)][offset 0x0000002C][found Char] Unexpected type on the stack. From 238a92aa359d104b37e64027abcf8327c9cea823 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Sun, 13 Apr 2025 10:14:21 +0200 Subject: [PATCH 10/44] fix sa again --- src/Compiler/Utilities/Caches.fs | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index c2f62538e16..5b436fb1bee 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -115,18 +115,18 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) cache - member this.GetStats() : obj = - {| - Capacity = options.MaximumCapacity - PercentageToEvict = options.PercentageToEvict - Strategy = options.Strategy - LevelOfConcurrency = options.LevelOfConcurrency - Count = this.Store.Count - MostRecentlyAccesssed = this.Store.Values |> Seq.maxBy _.LastAccessed |> _.LastAccessed - LeastRecentlyAccesssed = this.Store.Values |> Seq.minBy _.LastAccessed |> _.LastAccessed - MostFrequentlyAccessed = this.Store.Values |> Seq.maxBy _.AccessCount |> _.AccessCount - LeastFrequentlyAccessed = this.Store.Values |> Seq.minBy _.AccessCount |> _.AccessCount - |} + //member this.GetStats() = + // {| + // Capacity = options.MaximumCapacity + // PercentageToEvict = options.PercentageToEvict + // Strategy = options.Strategy + // LevelOfConcurrency = options.LevelOfConcurrency + // Count = this.Store.Count + // MostRecentlyAccesssed = this.Store.Values |> Seq.maxBy _.LastAccessed |> _.LastAccessed + // LeastRecentlyAccesssed = this.Store.Values |> Seq.minBy _.LastAccessed |> _.LastAccessed + // MostFrequentlyAccessed = this.Store.Values |> Seq.maxBy _.AccessCount |> _.AccessCount + // LeastFrequentlyAccessed = this.Store.Values |> Seq.minBy _.AccessCount |> _.AccessCount + // |} member private this.CalculateEvictionCount() = if this.Store.Count >= options.MaximumCapacity then From 1d21c78818d2074e6a02cb6adc211a4308e51352 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Sun, 13 Apr 2025 10:15:56 +0200 Subject: [PATCH 11/44] just CWT for now decide where to attach the cache later --- src/Compiler/Checking/import.fs | 20 ++++++++++++++++++-- src/Compiler/Checking/import.fsi | 2 +- src/Compiler/Driver/CompilerImports.fs | 15 ++------------- 3 files changed, 21 insertions(+), 16 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index f20164e97f2..775564e3817 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -6,17 +6,20 @@ module internal FSharp.Compiler.Import open System.Collections.Concurrent open System.Collections.Generic open System.Collections.Immutable -open FSharp.Compiler.Text.Range +open System.Runtime.CompilerServices + open Internal.Utilities.Library open Internal.Utilities.Library.Extras open Internal.Utilities.TypeHashing open Internal.Utilities.TypeHashing.HashTypes + open FSharp.Compiler open FSharp.Compiler.AbstractIL.IL open FSharp.Compiler.CompilerGlobalState open FSharp.Compiler.DiagnosticsLogger open FSharp.Compiler.SyntaxTreeOps open FSharp.Compiler.Text +open FSharp.Compiler.Text.Range open FSharp.Compiler.Xml open FSharp.Compiler.TypedTree open FSharp.Compiler.TypedTreeBasics @@ -90,6 +93,8 @@ type [] TTypeCacheKey = combined +let typeSubsumptionCaches = ConditionalWeakTable<_, Cache>() + //------------------------------------------------------------------------- // Import an IL types as F# types. //------------------------------------------------------------------------- @@ -103,9 +108,20 @@ type [] TTypeCacheKey = /// using tcImports.GetImportMap() if needed, and it is not harmful if multiple instances are used. The object /// serves as an interface through to the tables stored in the primary TcImports structures defined in CompileOps.fs. [] -type ImportMap(g: TcGlobals, assemblyLoader: AssemblyLoader, typeSubsumptionCache: Cache) = +type ImportMap(g: TcGlobals, assemblyLoader: AssemblyLoader) = let typeRefToTyconRefCache = ConcurrentDictionary() + let typeSubsumptionCache = + typeSubsumptionCaches.GetValue(g, fun _ -> + Cache.Create( + { CacheOptions.Default with + EvictionMethod = EvictionMethod.Background + PercentageToEvict = 20 + MaximumCapacity = 200_000 + } + ) + ) + member _.g = g member _.assemblyLoader = assemblyLoader diff --git a/src/Compiler/Checking/import.fsi b/src/Compiler/Checking/import.fsi index 3a7bd6e8437..043692ac41c 100644 --- a/src/Compiler/Checking/import.fsi +++ b/src/Compiler/Checking/import.fsi @@ -64,7 +64,7 @@ type TTypeCacheKey = /// serves as an interface through to the tables stored in the primary TcImports structures defined in CompileOps.fs. [] type ImportMap = - new: g: TcGlobals * assemblyLoader: AssemblyLoader * typeSubsumptionCache: Cache -> ImportMap + new: g: TcGlobals * assemblyLoader: AssemblyLoader -> ImportMap /// The AssemblyLoader for the import context member assemblyLoader: AssemblyLoader diff --git a/src/Compiler/Driver/CompilerImports.fs b/src/Compiler/Driver/CompilerImports.fs index 12418ad36cb..41cffb15506 100644 --- a/src/Compiler/Driver/CompilerImports.fs +++ b/src/Compiler/Driver/CompilerImports.fs @@ -1304,18 +1304,7 @@ and [] TcImports true | None -> false | None -> false - - let typeSubsumptionCache = - lazy - Cache - .Create( - { CacheOptions.Default with - EvictionMethod = EvictionMethod.Background - PercentageToEvict = 20 - MaximumCapacity = 200_000 - } - ) - + member internal _.Base = CheckDisposed() importsBase @@ -1715,7 +1704,7 @@ and [] TcImports member _.RecordGeneratedTypeRoot root = tcImports.RecordGeneratedTypeRoot root } #endif - ImportMap(tcImports.GetTcGlobals(), loaderInterface, typeSubsumptionCache.Value) + ImportMap(tcImports.GetTcGlobals(), loaderInterface) // Note the tcGlobals are only available once mscorlib and fslib have been established. For TcImports, // they are logically only needed when converting AbsIL data structures into F# data structures, and From 03dc52b2851560a16c78db0dd1c0be49b31052de Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Sun, 13 Apr 2025 11:03:30 +0200 Subject: [PATCH 12/44] add some ids to see whats what --- src/Compiler/Utilities/Caches.fs | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 5b436fb1bee..2abf1b5a357 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -80,12 +80,14 @@ type internal CachedEntity<'Value> = [] [] -type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) = +type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts, name: string) = let cacheHit = Event<_ * _>() let cacheMiss = Event<_>() let eviction = Event<_>() + static let mutable cacheId = 0 + let mutable currentCapacity = capacity [] @@ -105,10 +107,12 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) options.MaximumCapacity + (options.MaximumCapacity * options.PercentageToEvict / 100) - use _ = Activity.start "Cache.Created" (seq { "capacity", string capacity }) + let name = $"Cache{Interlocked.Increment &cacheId}" + + use _ = Activity.start "Cache.Created" (seq {"name", name; "capacity", string capacity }) let cts = new CancellationTokenSource() - let cache = new Cache<'Key, 'Value>(options, capacity, cts) + let cache = new Cache<'Key, 'Value>(options, capacity, cts, name) if options.EvictionMethod = EvictionMethod.Background then Task.Run(cache.TryEvictTask, cts.Token) |> ignore @@ -168,6 +172,7 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) Activity.start "Cache.Eviction" (seq { + yield "name", name yield "Store.Count", string this.Store.Count if exceeded then From d524663bfc885f78a599d167be2f8a6457b9819b Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Sun, 13 Apr 2025 12:51:18 +0200 Subject: [PATCH 13/44] for some reason it didnt work --- src/Compiler/Checking/import.fs | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 775564e3817..28f9bc1b1d4 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -93,7 +93,18 @@ type [] TTypeCacheKey = combined -let typeSubsumptionCaches = ConditionalWeakTable<_, Cache>() +//let typeSubsumptionCaches = ConditionalWeakTable<_, Cache>() + +let typeSubsumptionCache = + //typeSubsumptionCaches.GetValue(g, fun _ -> + Cache.Create( + { CacheOptions.Default with + EvictionMethod = EvictionMethod.Background + PercentageToEvict = 15 + MaximumCapacity = 500_000 + } + ) + //) //------------------------------------------------------------------------- // Import an IL types as F# types. @@ -111,17 +122,6 @@ let typeSubsumptionCaches = ConditionalWeakTable<_, Cache>( type ImportMap(g: TcGlobals, assemblyLoader: AssemblyLoader) = let typeRefToTyconRefCache = ConcurrentDictionary() - let typeSubsumptionCache = - typeSubsumptionCaches.GetValue(g, fun _ -> - Cache.Create( - { CacheOptions.Default with - EvictionMethod = EvictionMethod.Background - PercentageToEvict = 20 - MaximumCapacity = 200_000 - } - ) - ) - member _.g = g member _.assemblyLoader = assemblyLoader From e130e011141e0a8d52a78757d78466ce4b56a723 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Mon, 14 Apr 2025 12:10:10 +0200 Subject: [PATCH 14/44] metrics --- src/Compiler/Utilities/Caches.fs | 38 +++++----------- .../src/FSharp.Editor/Common/Logging.fs | 44 ++++++++++++++----- .../LanguageService/LanguageService.fs | 12 ++++- 3 files changed, 53 insertions(+), 41 deletions(-) diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 2abf1b5a357..9a35507a6a4 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -5,8 +5,7 @@ open System.Collections.Concurrent open System.Threading open System.Threading.Tasks open System.Diagnostics - -open FSharp.Compiler.Diagnostics +open System.Diagnostics.Metrics [] // Default Seq.* function have one issue - when doing `Seq.sortBy`, it will call a `ToArray` on the collection, @@ -78,18 +77,19 @@ type internal CachedEntity<'Value> = AccessCount = 0L } +module internal CacheMetrics = + let mutable cacheId = 0 + let meter = new Meter("FSharp.Compiler.Caches") + // let _count = meter.CreateObservableCounter("Count", (fun _ -> cache.Store.Count)) + [] [] -type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts, name: string) = +type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) = let cacheHit = Event<_ * _>() let cacheMiss = Event<_>() let eviction = Event<_>() - static let mutable cacheId = 0 - - let mutable currentCapacity = capacity - [] member val CacheHit = cacheHit.Publish @@ -107,12 +107,10 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts, options.MaximumCapacity + (options.MaximumCapacity * options.PercentageToEvict / 100) - let name = $"Cache{Interlocked.Increment &cacheId}" - - use _ = Activity.start "Cache.Created" (seq {"name", name; "capacity", string capacity }) - let cts = new CancellationTokenSource() - let cache = new Cache<'Key, 'Value>(options, capacity, cts, name) + let cache = new Cache<'Key, 'Value>(options, capacity, cts) + + CacheMetrics.meter.CreateObservableGauge($"count-{Interlocked.Increment &CacheMetrics.cacheId}", (fun () -> cache.Store.Count)) |> ignore if options.EvictionMethod = EvictionMethod.Background then Task.Run(cache.TryEvictTask, cts.Token) |> ignore @@ -163,22 +161,6 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts, let evictionCount = this.CalculateEvictionCount() if evictionCount > 0 then - let exceeded = this.Store.Count > currentCapacity - - if exceeded then - currentCapacity <- this.Store.Count - - use _ = - Activity.start - "Cache.Eviction" - (seq { - yield "name", name - yield "Store.Count", string this.Store.Count - - if exceeded then - yield "RESIZE", "!" - }) - this.TryEvictItems() let utilization = (this.Store.Count / options.MaximumCapacity) diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index e978d1e19d9..6bc530652fe 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -30,6 +30,7 @@ module Config = let fsharpOutputGuid = Guid fsharpOutputGuidString open Config +open System.Diagnostics.Metrics [] type Logger [] ([)>] serviceProvider: IServiceProvider) = @@ -118,7 +119,7 @@ module Logging = let logExceptionWithContext (ex: Exception, context) = logErrorf "Context: %s\nException Message: %s\nStack Trace: %s" context ex.Message ex.StackTrace -module Activity = +module FSharpServiceTelemetry = let listen filter = let indent (activity: Activity) = @@ -146,30 +147,51 @@ module Activity = ) ActivitySource.AddActivityListener(listener) + + let logCacheMetricsToOutput () = + let listener = new MeterListener( + InstrumentPublished = fun instrument l -> + if instrument.Meter.Name = "FSharp.Compiler.Caches" then + l.EnableMeasurementEvents(instrument) + ) + let callBack = MeasurementCallback(fun instr v _ _ -> logMsg $"{instr.Name}: {v}") + listener.SetMeasurementEventCallback callBack + listener.Start() + + backgroundTask { + while true do + do! System.Threading.Tasks.Task.Delay(1000) + listener.RecordObservableInstruments() + } |> ignore + #if DEBUG open OpenTelemetry.Resources open OpenTelemetry.Trace + open OpenTelemetry.Metrics - let exportTraces () = - let provider = + let export () = + let meterProvider = + // Configure OpenTelemetry metrics. Metrics can be viewed in Prometheus or other compatible tools. + OpenTelemetry.Sdk + .CreateMeterProviderBuilder() + .AddOtlpExporter() + .Build() + let tracerProvider = // Configure OpenTelemetry export. Traces can be viewed in Jaeger or other compatible tools. OpenTelemetry.Sdk .CreateTracerProviderBuilder() .AddSource(ActivityNames.FscSourceName) .ConfigureResource(fun r -> r.AddService("F#") |> ignore) - .AddOtlpExporter(fun o -> - // Empirical values to ensure no traces are lost and no significant delay at the end of test run. - o.TimeoutMilliseconds <- 200 - o.BatchExportProcessorOptions.MaxQueueSize <- 16384 - o.BatchExportProcessorOptions.ScheduledDelayMilliseconds <- 100) + .AddOtlpExporter() .Build() let a = Activity.startNoTags "FSharpPackage" fun () -> a.Dispose() - provider.ForceFlush(5000) |> ignore - provider.Dispose() + tracerProvider.ForceFlush(5000) |> ignore + tracerProvider.Dispose() + meterProvider.Dispose() let listenToAll () = listen "" -#endif +#endif \ No newline at end of file diff --git a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs index f8a3d3e0e32..738cf2f6135 100644 --- a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs +++ b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs @@ -340,11 +340,19 @@ type internal FSharpPackage() as this = let mutable solutionEventsOpt = None - do Logging.Activity.listen "Cache" - // FSI-LINKAGE-POINT: unsited init do FSharp.Interactive.Hooks.fsiConsoleWindowPackageCtorUnsited (this :> Package) + do Logging.FSharpServiceTelemetry.logCacheMetricsToOutput() + + #if DEBUG + let flushTelemetry = Logging.FSharpServiceTelemetry.export() + + override this.Dispose (disposing: bool) = + base.Dispose(disposing: bool) + if disposing then flushTelemetry() + #endif + override this.InitializeAsync(cancellationToken: CancellationToken, progress: IProgress) : Tasks.Task = // `base.` methods can't be called in the `async` builder, so we have to cache it let baseInitializeAsync = base.InitializeAsync(cancellationToken, progress) From 47b4165af177ccb61ae5232143c5cd04f22f7f28 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Mon, 14 Apr 2025 12:33:56 +0200 Subject: [PATCH 15/44] metrics --- vsintegration/src/FSharp.Editor/Common/Logging.fs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index 6bc530652fe..74085e7288e 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -154,10 +154,15 @@ module FSharpServiceTelemetry = if instrument.Meter.Name = "FSharp.Compiler.Caches" then l.EnableMeasurementEvents(instrument) ) - let callBack = MeasurementCallback(fun instr v _ _ -> logMsg $"{instr.Name}: {v}") + + let msg = Event() + + let callBack = MeasurementCallback(fun instr v _ _ -> msg.Trigger $"{instr.Name}: {v}") listener.SetMeasurementEventCallback callBack listener.Start() + msg.Publish |> Event.pairwise |> Event.filter (fun (x, y) -> x <> y) |> Event.map snd |> Event.add logMsg + backgroundTask { while true do do! System.Threading.Tasks.Task.Delay(1000) From 90eaa02173bdc668174d4e03540c4c72524ca5bc Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Mon, 14 Apr 2025 13:30:19 +0200 Subject: [PATCH 16/44] one cache instance per TcGlobals --- src/Compiler/Checking/import.fs | 27 ++++++++++--------- src/Compiler/Utilities/Caches.fs | 3 +-- .../src/FSharp.Editor/Common/Logging.fs | 21 ++++++++++----- 3 files changed, 29 insertions(+), 22 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 28f9bc1b1d4..24d2c313150 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -93,18 +93,7 @@ type [] TTypeCacheKey = combined -//let typeSubsumptionCaches = ConditionalWeakTable<_, Cache>() - -let typeSubsumptionCache = - //typeSubsumptionCaches.GetValue(g, fun _ -> - Cache.Create( - { CacheOptions.Default with - EvictionMethod = EvictionMethod.Background - PercentageToEvict = 15 - MaximumCapacity = 500_000 - } - ) - //) +let typeSubsumptionCaches = ConditionalWeakTable<_, Cache>() //------------------------------------------------------------------------- // Import an IL types as F# types. @@ -122,13 +111,25 @@ let typeSubsumptionCache = type ImportMap(g: TcGlobals, assemblyLoader: AssemblyLoader) = let typeRefToTyconRefCache = ConcurrentDictionary() + let typeSubsumptionCache = + lazy + typeSubsumptionCaches.GetValue(g, fun g -> + Cache.Create( + { CacheOptions.Default with + // EvictionMethod = EvictionMethod.Background + PercentageToEvict = 15 + MaximumCapacity = if g.compilationMode = CompilationMode.OneOff then System.Int32.MaxValue else 500_000 + } + ) + ) + member _.g = g member _.assemblyLoader = assemblyLoader member _.ILTypeRefToTyconRefCache = typeRefToTyconRefCache - member _.TypeSubsumptionCache = typeSubsumptionCache + member _.TypeSubsumptionCache = typeSubsumptionCache.Value let CanImportILScopeRef (env: ImportMap) m scoref = diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 9a35507a6a4..7e5024ca8ca 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -80,7 +80,6 @@ type internal CachedEntity<'Value> = module internal CacheMetrics = let mutable cacheId = 0 let meter = new Meter("FSharp.Compiler.Caches") - // let _count = meter.CreateObservableCounter("Count", (fun _ -> cache.Store.Count)) [] [] @@ -110,7 +109,7 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) let cts = new CancellationTokenSource() let cache = new Cache<'Key, 'Value>(options, capacity, cts) - CacheMetrics.meter.CreateObservableGauge($"count-{Interlocked.Increment &CacheMetrics.cacheId}", (fun () -> cache.Store.Count)) |> ignore + CacheMetrics.meter.CreateObservableGauge($"count{Interlocked.Increment &CacheMetrics.cacheId}", (fun () -> cache.Store.Count)) |> ignore if options.EvictionMethod = EvictionMethod.Background then Task.Run(cache.TryEvictTask, cts.Token) |> ignore diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index 74085e7288e..6c5f75eb71b 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -149,26 +149,33 @@ module FSharpServiceTelemetry = ActivitySource.AddActivityListener(listener) let logCacheMetricsToOutput () = + let cacheCounts = Collections.Generic.Dictionary() let listener = new MeterListener( InstrumentPublished = fun instrument l -> if instrument.Meter.Name = "FSharp.Compiler.Caches" then - l.EnableMeasurementEvents(instrument) + cacheCounts[instrument.Name] <- 0 + l.EnableMeasurementEvents(instrument) ) - let msg = Event() - - let callBack = MeasurementCallback(fun instr v _ _ -> msg.Trigger $"{instr.Name}: {v}") - listener.SetMeasurementEventCallback callBack + let callBack = MeasurementCallback(fun instr v _ _ -> cacheCounts[instr.Name] <- v) + listener.SetMeasurementEventCallback callBack listener.Start() - - msg.Publish |> Event.pairwise |> Event.filter (fun (x, y) -> x <> y) |> Event.map snd |> Event.add logMsg + + let msg = Event() backgroundTask { while true do do! System.Threading.Tasks.Task.Delay(1000) listener.RecordObservableInstruments() + if cacheCounts.Count > 0 then + let details = + [ for kvp in cacheCounts -> $"{kvp.Key}: {kvp.Value}"] + |> String.concat ", " + msg.Trigger $"total: {cacheCounts.Values |> Seq.sum} | {details}" } |> ignore + msg.Publish |> Event.pairwise |> Event.filter (fun (x, y) -> x <> y) |> Event.map snd |> Event.add logMsg + #if DEBUG open OpenTelemetry.Resources open OpenTelemetry.Trace From 165ea2482287e71f8e472888876a56fd52765fb0 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Mon, 14 Apr 2025 15:37:54 +0200 Subject: [PATCH 17/44] fix no eviction when OneOff --- src/Compiler/Checking/import.fs | 22 ++++++++++++---------- src/Compiler/Utilities/Caches.fs | 4 +++- 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 24d2c313150..33bd2ca1d0b 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -112,16 +112,18 @@ type ImportMap(g: TcGlobals, assemblyLoader: AssemblyLoader) = let typeRefToTyconRefCache = ConcurrentDictionary() let typeSubsumptionCache = - lazy - typeSubsumptionCaches.GetValue(g, fun g -> - Cache.Create( - { CacheOptions.Default with - // EvictionMethod = EvictionMethod.Background - PercentageToEvict = 15 - MaximumCapacity = if g.compilationMode = CompilationMode.OneOff then System.Int32.MaxValue else 500_000 - } - ) - ) + lazy + let options = + if g.compilationMode = CompilationMode.OneOff then + { CacheOptions.Default with + PercentageToEvict = 0 + EvictionMethod = EvictionMethod.KeepAll } + else + { CacheOptions.Default with + EvictionMethod = EvictionMethod.Blocking // EvictionMethod.Background + PercentageToEvict = 15 + MaximumCapacity = 500_000 } + typeSubsumptionCaches.GetValue(g, fun _ -> Cache.Create(options)) member _.g = g diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 7e5024ca8ca..1038b77ef0a 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -44,6 +44,7 @@ type internal CachingStrategy = type internal EvictionMethod = | Blocking | Background + | KeepAll [] type internal CacheOptions = @@ -178,7 +179,8 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) if this.CalculateEvictionCount() > 0 then match options.EvictionMethod with | EvictionMethod.Blocking -> this.TryEvictItems() - | EvictionMethod.Background -> () + | EvictionMethod.Background + | EvictionMethod.KeepAll -> () member this.TryGet(key, value: outref<'Value>) = match this.Store.TryGetValue(key) with From 83208acb2eeaec1dc734cc3c1a1adafc7ced926c Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Mon, 14 Apr 2025 19:46:49 +0200 Subject: [PATCH 18/44] restore LanguageFeature --- src/Compiler/Checking/TypeRelations.fs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/Compiler/Checking/TypeRelations.fs b/src/Compiler/Checking/TypeRelations.fs index ffece895ccd..daa5a656415 100644 --- a/src/Compiler/Checking/TypeRelations.fs +++ b/src/Compiler/Checking/TypeRelations.fs @@ -101,18 +101,18 @@ let TypesFeasiblyEquiv ndeep g amap m ty1 ty2 = let TypesFeasiblyEquivStripMeasures g amap m ty1 ty2 = TypesFeasiblyEquivalent true 0 g amap m ty1 ty2 -let inline TryGetCachedTypeSubsumption (_g: TcGlobals) (amap: ImportMap) key = - //if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then +let inline TryGetCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key = + if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then match amap.TypeSubsumptionCache.TryGet(key) with | true, subsumes -> ValueSome subsumes | false, _ -> ValueNone - //else - // ValueNone + else + ValueNone -let inline UpdateCachedTypeSubsumption (_g: TcGlobals) (amap: ImportMap) key subsumes : unit = - //if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then +let inline UpdateCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key subsumes : unit = + if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then amap.TypeSubsumptionCache.TryAdd(key, subsumes) |> ignore /// The feasible coercion relation. Part of the language spec. From 4659934d7c170e6f573e2542c439b3a32413f3f9 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Mon, 14 Apr 2025 19:47:51 +0200 Subject: [PATCH 19/44] singleton, but compilationMode aware --- src/Compiler/Checking/import.fs | 38 +++++++++++++++++++-------------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 33bd2ca1d0b..5a4e454654f 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -93,7 +93,27 @@ type [] TTypeCacheKey = combined -let typeSubsumptionCaches = ConditionalWeakTable<_, Cache>() +let getOrCreateTypeSubsumptionCache = + let mutable lockObj = obj() + let mutable cache = None + + fun compilationMode -> + lock lockObj <| fun () -> + match cache with + | Some c -> c + | _ -> + let options = + if compilationMode = CompilationMode.OneOff then + { CacheOptions.Default with + PercentageToEvict = 0 + EvictionMethod = EvictionMethod.KeepAll } + else + { CacheOptions.Default with + EvictionMethod = EvictionMethod.Background + PercentageToEvict = 15 + MaximumCapacity = 100_000 } + cache <- Some (Cache.Create(options)) + cache.Value //------------------------------------------------------------------------- // Import an IL types as F# types. @@ -111,27 +131,13 @@ let typeSubsumptionCaches = ConditionalWeakTable<_, Cache>( type ImportMap(g: TcGlobals, assemblyLoader: AssemblyLoader) = let typeRefToTyconRefCache = ConcurrentDictionary() - let typeSubsumptionCache = - lazy - let options = - if g.compilationMode = CompilationMode.OneOff then - { CacheOptions.Default with - PercentageToEvict = 0 - EvictionMethod = EvictionMethod.KeepAll } - else - { CacheOptions.Default with - EvictionMethod = EvictionMethod.Blocking // EvictionMethod.Background - PercentageToEvict = 15 - MaximumCapacity = 500_000 } - typeSubsumptionCaches.GetValue(g, fun _ -> Cache.Create(options)) - member _.g = g member _.assemblyLoader = assemblyLoader member _.ILTypeRefToTyconRefCache = typeRefToTyconRefCache - member _.TypeSubsumptionCache = typeSubsumptionCache.Value + member val TypeSubsumptionCache = getOrCreateTypeSubsumptionCache g.compilationMode let CanImportILScopeRef (env: ImportMap) m scoref = From 5abed612fa658d7c9d8c91063005d8e6ff1478b0 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Mon, 14 Apr 2025 19:55:33 +0200 Subject: [PATCH 20/44] fix background eviction --- src/Compiler/Utilities/Caches.fs | 37 ++++++++++++++++---------------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 1038b77ef0a..e68e5dbe69e 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -58,7 +58,7 @@ type internal CacheOptions = static member Default = { - MaximumCapacity = 10_000 + MaximumCapacity = 100 PercentageToEvict = 5 Strategy = CachingStrategy.LRU LevelOfConcurrency = Environment.ProcessorCount @@ -80,7 +80,7 @@ type internal CachedEntity<'Value> = module internal CacheMetrics = let mutable cacheId = 0 - let meter = new Meter("FSharp.Compiler.Caches") + let createMeter () = new Meter("FSharp.Compiler.Caches") [] [] @@ -89,6 +89,12 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) let cacheHit = Event<_ * _>() let cacheMiss = Event<_>() let eviction = Event<_>() + + // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. + let store = ConcurrentDictionary<_, CachedEntity<'Value>>(options.LevelOfConcurrency, capacity) + + let meter = CacheMetrics.createMeter() + let _ = meter.CreateObservableGauge($"cache{Interlocked.Increment &CacheMetrics.cacheId}", (fun () -> store.Count)) [] member val CacheHit = cacheHit.Publish @@ -99,9 +105,6 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) [] member val Eviction = eviction.Publish - // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. - member val Store = ConcurrentDictionary<_, CachedEntity<'Value>>(options.LevelOfConcurrency, capacity) - static member Create(options: CacheOptions) = let capacity = options.MaximumCapacity @@ -110,8 +113,6 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) let cts = new CancellationTokenSource() let cache = new Cache<'Key, 'Value>(options, capacity, cts) - CacheMetrics.meter.CreateObservableGauge($"count{Interlocked.Increment &CacheMetrics.cacheId}", (fun () -> cache.Store.Count)) |> ignore - if options.EvictionMethod = EvictionMethod.Background then Task.Run(cache.TryEvictTask, cts.Token) |> ignore @@ -131,15 +132,15 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) // |} member private this.CalculateEvictionCount() = - if this.Store.Count >= options.MaximumCapacity then - (this.Store.Count - options.MaximumCapacity) + if store.Count >= options.MaximumCapacity then + (store.Count - options.MaximumCapacity) + (options.MaximumCapacity * options.PercentageToEvict / 100) else 0 // TODO: All of these are proofs of concept, a very naive implementation of eviction strategies, it will always walk the dictionary to find the items to evict, this is not efficient. member private this.TryGetPickToEvict() = - this.Store + store |> match options.Strategy with | CachingStrategy.LRU -> ConcurrentDictionary.sortBy _.Value.LastAccessed | CachingStrategy.LFU -> ConcurrentDictionary.sortBy _.Value.AccessCount @@ -150,7 +151,7 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) member private this.TryEvictItems() = if this.CalculateEvictionCount() > 0 then for key in this.TryGetPickToEvict() do - match this.Store.TryRemove(key) with + match store.TryRemove(key) with | true, _ -> eviction.Trigger(key) | _ -> () // TODO: We probably want to count eviction misses as well? @@ -163,16 +164,16 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) if evictionCount > 0 then this.TryEvictItems() - let utilization = (this.Store.Count / options.MaximumCapacity) + let utilization = (float store.Count / float options.MaximumCapacity) // So, based on utilization this will scale the delay between 0 and 1 seconds. // Worst case scenario would be when 1 second delay happens, // if the cache will grow rapidly (or in bursts), it will go beyond the maximum capacity. // In this case underlying dictionary will resize, AND we will have to evict items, which will likely be slow. // In this case, cache stats should be used to adjust MaximumCapacity and PercentageToEvict. - let delay = 1000 - (1000 * utilization) + let delay = 1000.0 - (1000.0 * utilization) - if delay > 0 then - do! Task.Delay(delay) + if delay > 0.0 then + do! Task.Delay(int delay) } member this.TryEvict() = @@ -183,7 +184,7 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) | EvictionMethod.KeepAll -> () member this.TryGet(key, value: outref<'Value>) = - match this.Store.TryGetValue(key) with + match store.TryGetValue(key) with | true, cachedEntity -> // this is fine to be non-atomic, I guess, we are okay with race if the time is within the time of multiple concurrent calls. cachedEntity.LastAccessed <- DateTimeOffset.Now.Ticks @@ -204,10 +205,10 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) let value = CachedEntity<'Value>(value) if update then - let _ = this.Store.AddOrUpdate(key, value, (fun _ _ -> value)) + let _ = store.AddOrUpdate(key, value, (fun _ _ -> value)) true else - this.Store.TryAdd(key, value) + store.TryAdd(key, value) interface IDisposable with member _.Dispose() = cts.Cancel() From e1a48ff2ff8184f0da1fa255701735918d709a60 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Mon, 14 Apr 2025 22:44:51 +0200 Subject: [PATCH 21/44] wip --- src/Compiler/Utilities/Caches.fs | 36 ++++++++++++------- .../src/FSharp.Editor/Common/Logging.fs | 17 +++++---- 2 files changed, 32 insertions(+), 21 deletions(-) diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index e68e5dbe69e..80213e76809 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -80,7 +80,20 @@ type internal CachedEntity<'Value> = module internal CacheMetrics = let mutable cacheId = 0 - let createMeter () = new Meter("FSharp.Compiler.Caches") + let addInstrumentation (store: ConcurrentDictionary<_, CachedEntity<'Value>>) = + let meter = new Meter("FSharp.Compiler.Caches") + let uid = Interlocked.Increment &cacheId + + let orZero f = fun () -> + let vs = store.Values + if vs |> Seq.isEmpty then 0L else f vs + + let _ = meter.CreateObservableGauge($"cache{uid}", (fun () -> int64 store.Count)) + //let _ = meter.CreateObservableGauge($"MRA{uid}", orZero (Seq.map _.LastAccessed >> Seq.max)) + //let _ = meter.CreateObservableGauge($"LRA{uid}", orZero (Seq.map _.LastAccessed >> Seq.min)) + let _ = meter.CreateObservableGauge($"MFA{uid}", orZero (Seq.map _.AccessCount >> Seq.max)) + let _ = meter.CreateObservableGauge($"LFA{uid}", orZero (Seq.map _.AccessCount >> Seq.min)) + () [] [] @@ -93,8 +106,7 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. let store = ConcurrentDictionary<_, CachedEntity<'Value>>(options.LevelOfConcurrency, capacity) - let meter = CacheMetrics.createMeter() - let _ = meter.CreateObservableGauge($"cache{Interlocked.Increment &CacheMetrics.cacheId}", (fun () -> store.Count)) + do CacheMetrics.addInstrumentation store [] member val CacheHit = cacheHit.Publish @@ -164,16 +176,16 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) if evictionCount > 0 then this.TryEvictItems() - let utilization = (float store.Count / float options.MaximumCapacity) - // So, based on utilization this will scale the delay between 0 and 1 seconds. - // Worst case scenario would be when 1 second delay happens, - // if the cache will grow rapidly (or in bursts), it will go beyond the maximum capacity. - // In this case underlying dictionary will resize, AND we will have to evict items, which will likely be slow. - // In this case, cache stats should be used to adjust MaximumCapacity and PercentageToEvict. - let delay = 1000.0 - (1000.0 * utilization) + let utilization = (float store.Count / float options.MaximumCapacity) + // So, based on utilization this will scale the delay between 0 and 1 seconds. + // Worst case scenario would be when 1 second delay happens, + // if the cache will grow rapidly (or in bursts), it will go beyond the maximum capacity. + // In this case underlying dictionary will resize, AND we will have to evict items, which will likely be slow. + // In this case, cache stats should be used to adjust MaximumCapacity and PercentageToEvict. + let delay = 1000.0 - (1000.0 * utilization) - if delay > 0.0 then - do! Task.Delay(int delay) + if delay > 0.0 then + do! Task.Delay(int delay) } member this.TryEvict() = diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index 6c5f75eb71b..beddfb8f823 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -149,16 +149,16 @@ module FSharpServiceTelemetry = ActivitySource.AddActivityListener(listener) let logCacheMetricsToOutput () = - let cacheCounts = Collections.Generic.Dictionary() + let instruments = Collections.Generic.Dictionary() let listener = new MeterListener( InstrumentPublished = fun instrument l -> if instrument.Meter.Name = "FSharp.Compiler.Caches" then - cacheCounts[instrument.Name] <- 0 + instruments[instrument.Name] <- 0L l.EnableMeasurementEvents(instrument) ) - let callBack = MeasurementCallback(fun instr v _ _ -> cacheCounts[instr.Name] <- v) - listener.SetMeasurementEventCallback callBack + let callBack = MeasurementCallback(fun instr v _ _ -> instruments[instr.Name] <- v) + listener.SetMeasurementEventCallback callBack listener.Start() let msg = Event() @@ -167,11 +167,10 @@ module FSharpServiceTelemetry = while true do do! System.Threading.Tasks.Task.Delay(1000) listener.RecordObservableInstruments() - if cacheCounts.Count > 0 then - let details = - [ for kvp in cacheCounts -> $"{kvp.Key}: {kvp.Value}"] - |> String.concat ", " - msg.Trigger $"total: {cacheCounts.Values |> Seq.sum} | {details}" + if instruments.Count > 0 then + [ for kvp in instruments -> $"{kvp.Key}: {kvp.Value}"] + |> String.concat ", " + |> msg.Trigger } |> ignore msg.Publish |> Event.pairwise |> Event.filter (fun (x, y) -> x <> y) |> Event.map snd |> Event.add logMsg From e1cd30b56e33893296d32643d1e8b3c5cc3feaa9 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Mon, 14 Apr 2025 23:53:35 +0200 Subject: [PATCH 22/44] more metrics --- src/Compiler/Utilities/Caches.fs | 45 ++++++++++++++++++++------------ 1 file changed, 28 insertions(+), 17 deletions(-) diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 80213e76809..255ed46aed8 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -93,7 +93,21 @@ module internal CacheMetrics = //let _ = meter.CreateObservableGauge($"LRA{uid}", orZero (Seq.map _.LastAccessed >> Seq.min)) let _ = meter.CreateObservableGauge($"MFA{uid}", orZero (Seq.map _.AccessCount >> Seq.max)) let _ = meter.CreateObservableGauge($"LFA{uid}", orZero (Seq.map _.AccessCount >> Seq.min)) - () + + let mutable evictions = 0L + let mutable hits = 0L + let mutable misses = 0L + + fun eviction hit miss -> + + eviction |> Event.add (fun _ -> Interlocked.Increment &evictions |> ignore) + hit |> Event.add (fun _ -> Interlocked.Increment &hits |> ignore) + miss |> Event.add (fun _ -> Interlocked.Increment &misses |> ignore) + + let _ = meter.CreateObservableGauge($"evicted{uid}", fun () -> Interlocked.Exchange(&evictions, 0L)) + let _ = meter.CreateObservableGauge($"hits{uid}", fun () -> Interlocked.Exchange(&hits, 0L)) + let _ = meter.CreateObservableGauge($"misses{uid}", fun () -> Interlocked.Exchange(&misses, 0L)) + () [] [] @@ -106,7 +120,7 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. let store = ConcurrentDictionary<_, CachedEntity<'Value>>(options.LevelOfConcurrency, capacity) - do CacheMetrics.addInstrumentation store + do CacheMetrics.addInstrumentation store eviction.Publish cacheHit.Publish cacheMiss.Publish [] member val CacheHit = cacheHit.Publish @@ -171,21 +185,18 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) member private this.TryEvictTask() = backgroundTask { while not cts.Token.IsCancellationRequested do - let evictionCount = this.CalculateEvictionCount() - - if evictionCount > 0 then - this.TryEvictItems() - - let utilization = (float store.Count / float options.MaximumCapacity) - // So, based on utilization this will scale the delay between 0 and 1 seconds. - // Worst case scenario would be when 1 second delay happens, - // if the cache will grow rapidly (or in bursts), it will go beyond the maximum capacity. - // In this case underlying dictionary will resize, AND we will have to evict items, which will likely be slow. - // In this case, cache stats should be used to adjust MaximumCapacity and PercentageToEvict. - let delay = 1000.0 - (1000.0 * utilization) - - if delay > 0.0 then - do! Task.Delay(int delay) + this.TryEvictItems() + + let utilization = (float store.Count / float options.MaximumCapacity) + // So, based on utilization this will scale the delay between 0 and 1 seconds. + // Worst case scenario would be when 1 second delay happens, + // if the cache will grow rapidly (or in bursts), it will go beyond the maximum capacity. + // In this case underlying dictionary will resize, AND we will have to evict items, which will likely be slow. + // In this case, cache stats should be used to adjust MaximumCapacity and PercentageToEvict. + let delay = 1000.0 - (1000.0 * utilization) + + if delay > 0.0 then + do! Task.Delay(int delay) } member this.TryEvict() = From 7bdde6ad3c69091bd3eefdcafcfa5a1a6521b966 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Tue, 15 Apr 2025 10:27:49 +0200 Subject: [PATCH 23/44] background eviction needs work --- src/Compiler/Checking/import.fs | 2 +- src/Compiler/Utilities/Caches.fs | 11 ++++++++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 5a4e454654f..2b76e9aaa50 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -109,7 +109,7 @@ let getOrCreateTypeSubsumptionCache = EvictionMethod = EvictionMethod.KeepAll } else { CacheOptions.Default with - EvictionMethod = EvictionMethod.Background + EvictionMethod = EvictionMethod.Blocking // EvictionMethod.Background PercentageToEvict = 15 MaximumCapacity = 100_000 } cache <- Some (Cache.Create(options)) diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 255ed46aed8..0e327a23b70 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -1,6 +1,7 @@ namespace FSharp.Compiler open System +open System.Collections.Generic open System.Collections.Concurrent open System.Threading open System.Threading.Tasks @@ -95,16 +96,19 @@ module internal CacheMetrics = let _ = meter.CreateObservableGauge($"LFA{uid}", orZero (Seq.map _.AccessCount >> Seq.min)) let mutable evictions = 0L + let mutable fails = 0L let mutable hits = 0L let mutable misses = 0L - fun eviction hit miss -> + fun eviction hit miss evictionFail -> eviction |> Event.add (fun _ -> Interlocked.Increment &evictions |> ignore) + evictionFail |> Event.add (fun _ -> Interlocked.Increment &fails |> ignore) hit |> Event.add (fun _ -> Interlocked.Increment &hits |> ignore) miss |> Event.add (fun _ -> Interlocked.Increment &misses |> ignore) let _ = meter.CreateObservableGauge($"evicted{uid}", fun () -> Interlocked.Exchange(&evictions, 0L)) + let _ = meter.CreateObservableGauge($"fails{uid}", fun () -> Interlocked.Exchange(&fails, 0L)) let _ = meter.CreateObservableGauge($"hits{uid}", fun () -> Interlocked.Exchange(&hits, 0L)) let _ = meter.CreateObservableGauge($"misses{uid}", fun () -> Interlocked.Exchange(&misses, 0L)) () @@ -116,11 +120,12 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) let cacheHit = Event<_ * _>() let cacheMiss = Event<_>() let eviction = Event<_>() + let evictionFail = Event<_>() // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. let store = ConcurrentDictionary<_, CachedEntity<'Value>>(options.LevelOfConcurrency, capacity) - do CacheMetrics.addInstrumentation store eviction.Publish cacheHit.Publish cacheMiss.Publish + do CacheMetrics.addInstrumentation store eviction.Publish cacheHit.Publish cacheMiss.Publish evictionFail.Publish [] member val CacheHit = cacheHit.Publish @@ -179,7 +184,7 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) for key in this.TryGetPickToEvict() do match store.TryRemove(key) with | true, _ -> eviction.Trigger(key) - | _ -> () // TODO: We probably want to count eviction misses as well? + | _ -> evictionFail.Trigger(key) // TODO: We probably want to count eviction misses as well? // TODO: Shall this be a safer task, wrapping everything in try .. with, so it's not crashing silently? member private this.TryEvictTask() = From e6ba27e77e92bbf7ac5591399c0af481f46991e4 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Thu, 17 Apr 2025 23:32:54 +0200 Subject: [PATCH 24/44] fix stampEquals --- src/Compiler/Utilities/TypeHashing.fs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/Compiler/Utilities/TypeHashing.fs b/src/Compiler/Utilities/TypeHashing.fs index bcdface38be..1475635972d 100644 --- a/src/Compiler/Utilities/TypeHashing.fs +++ b/src/Compiler/Utilities/TypeHashing.fs @@ -126,9 +126,13 @@ module HashAccessibility = module rec HashTypes = open Microsoft.FSharp.Core.LanguagePrimitives - let stampEquals g ty1 ty2 = + let rec stampEquals g ty1 ty2 = match (stripTyEqns g ty1), (stripTyEqns g ty2) with - | TType_app(tcref1, _, _), TType_app(tcref2, _, _) -> tcref1.Stamp.Equals(tcref2.Stamp) + | TType_app(tcref1, tinst1, _), TType_app(tcref2, tinst2, _) -> + tcref1.Stamp = tcref2.Stamp && + tinst1.Length = tinst2.Length && + tinst1 |> List.zip tinst2 |> List.forall (fun (t1, t2) -> stampEquals g t1 t2) + | TType_var(r1, _), TType_var(r2, _) -> r1.Stamp.Equals(r2.Stamp) | _ -> false From 5b8735662e8090f528ccfbda17f508c7ae2c0012 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Thu, 17 Apr 2025 23:33:29 +0200 Subject: [PATCH 25/44] fix hash --- src/Compiler/Checking/import.fs | 32 +++++++++++++++++++++++--------- 1 file changed, 23 insertions(+), 9 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 2b76e9aaa50..34dea504cab 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -6,6 +6,7 @@ module internal FSharp.Compiler.Import open System.Collections.Concurrent open System.Collections.Generic open System.Collections.Immutable +open System.Diagnostics open System.Runtime.CompilerServices open Internal.Utilities.Library @@ -55,7 +56,8 @@ type CanCoerce = | CanCoerce | NoCoerce -type [] TTypeCacheKey = +[] +type TTypeCacheKey = val ty1: TType val ty2: TType @@ -84,14 +86,26 @@ type [] TTypeCacheKey = | _ -> false override this.GetHashCode() : int = - let g = this.tcGlobals - - let ty1Hash = combineHash (hashStamp g this.ty1) (hashTType g this.ty1) - let ty2Hash = combineHash (hashStamp g this.ty2) (hashTType g this.ty2) - - let combined = combineHash (combineHash ty1Hash ty2Hash) (hash this.canCoerce) - - combined + // TODO: we need reasonable uniformity + // The idea is to keep the illusion of immutability of TType. + // This hash must be stable during compilation, otherwise we won't be able to find the keys in the cache. + let rec simpleTypeHash ty = + match ty with + | TType_ucase (u, tinst) -> tinst |> hashListOrderMatters (simpleTypeHash) |> pipeToHash (hash u.CaseName) + | TType_app(tcref, tinst, _) -> tinst |> hashListOrderMatters (simpleTypeHash) |> pipeToHash (hash tcref.Stamp) + | TType_anon(info, tys) -> tys |> hashListOrderMatters (simpleTypeHash) |> pipeToHash (hash info.Stamp) + | TType_tuple(_ , tys) -> tys |> hashListOrderMatters (simpleTypeHash) + | TType_forall(tps, tau) -> tps |> Seq.map _.Stamp |> hashListOrderMatters (hash) |> pipeToHash (simpleTypeHash tau) + | TType_fun (d, r, _) -> simpleTypeHash d |> pipeToHash (simpleTypeHash r) + | TType_var _ + | TType_measure _ -> 0 + + hash this.tcGlobals + |> pipeToHash (simpleTypeHash this.ty1) + |> pipeToHash (simpleTypeHash this.ty2) + |> pipeToHash (hash this.canCoerce) + + override this.ToString () = $"{this.ty1.DebugText}-{this.ty2.DebugText}" let getOrCreateTypeSubsumptionCache = let mutable lockObj = obj() From 039cc9ae0615d5db5f48211c011d53704c17101c Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Thu, 17 Apr 2025 23:34:48 +0200 Subject: [PATCH 26/44] improve allocations etc --- src/Compiler/Checking/TypeRelations.fs | 24 +- src/Compiler/Checking/import.fs | 7 +- src/Compiler/Utilities/Caches.fs | 294 +++++++++++++------------ 3 files changed, 172 insertions(+), 153 deletions(-) diff --git a/src/Compiler/Checking/TypeRelations.fs b/src/Compiler/Checking/TypeRelations.fs index daa5a656415..fd6e12bc4d7 100644 --- a/src/Compiler/Checking/TypeRelations.fs +++ b/src/Compiler/Checking/TypeRelations.fs @@ -103,7 +103,7 @@ let TypesFeasiblyEquivStripMeasures g amap m ty1 ty2 = let inline TryGetCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key = if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then - match amap.TypeSubsumptionCache.TryGet(key) with + match amap.TypeSubsumptionCache.TryGetValue(key) with | true, subsumes -> ValueSome subsumes | false, _ -> @@ -113,7 +113,10 @@ let inline TryGetCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key = let inline UpdateCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key subsumes : unit = if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then - amap.TypeSubsumptionCache.TryAdd(key, subsumes) |> ignore + amap.TypeSubsumptionCache.AddOrUpdate(key, subsumes) + +[] +type ResultWorthCaching = Yes | No /// The feasible coercion relation. Part of the language spec. let rec TypeFeasiblySubsumesType ndeep (g: TcGlobals) (amap: ImportMap) m (ty1: TType) (canCoerce: CanCoerce) (ty2: TType) = @@ -131,32 +134,33 @@ let rec TypeFeasiblySubsumesType ndeep (g: TcGlobals) (amap: ImportMap) m (ty1: | ValueSome subsumes -> subsumes | ValueNone -> - let subsumes = + let subsumes, worthCaching = match ty1, ty2 with | TType_measure _, TType_measure _ | TType_var _, _ | _, TType_var _ -> - true + true, ResultWorthCaching.No | TType_app (tc1, l1, _), TType_app (tc2, l2, _) when tyconRefEq g tc1 tc2 -> - List.lengthsEqAndForall2 (TypesFeasiblyEquiv ndeep g amap m) l1 l2 + List.lengthsEqAndForall2 (TypesFeasiblyEquiv ndeep g amap m) l1 l2, ResultWorthCaching.Yes | TType_tuple _, TType_tuple _ | TType_anon _, TType_anon _ | TType_fun _, TType_fun _ -> - TypesFeasiblyEquiv ndeep g amap m ty1 ty2 + TypesFeasiblyEquiv ndeep g amap m ty1 ty2, ResultWorthCaching.Yes | _ -> // F# reference types are subtypes of type 'obj' if isObjTyAnyNullness g ty1 && (canCoerce = CanCoerce || isRefTy g ty2) then - true + true, ResultWorthCaching.No elif isAppTy g ty2 && (canCoerce = CanCoerce || isRefTy g ty2) && TypeFeasiblySubsumesTypeWithSupertypeCheck g amap m ndeep ty1 ty2 then - true + true, ResultWorthCaching.Yes else let interfaces = GetImmediateInterfacesOfType SkipUnrefInterfaces.Yes g amap m ty2 // See if any interface in type hierarchy of ty2 is a supertype of ty1 - List.exists (TypeFeasiblySubsumesType (ndeep + 1) g amap m ty1 NoCoerce) interfaces + List.exists (TypeFeasiblySubsumesType (ndeep + 1) g amap m ty1 NoCoerce) interfaces, ResultWorthCaching.Yes - UpdateCachedTypeSubsumption g amap key subsumes + if worthCaching = ResultWorthCaching.Yes then + UpdateCachedTypeSubsumption g amap key subsumes subsumes diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 34dea504cab..d96ce8b182a 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -120,11 +120,12 @@ let getOrCreateTypeSubsumptionCache = if compilationMode = CompilationMode.OneOff then { CacheOptions.Default with PercentageToEvict = 0 - EvictionMethod = EvictionMethod.KeepAll } + EvictionMethod = EvictionMethod.NoEviction } else { CacheOptions.Default with - EvictionMethod = EvictionMethod.Blocking // EvictionMethod.Background - PercentageToEvict = 15 + EvictionMethod = EvictionMethod.Background + Strategy = CachingStrategy.LRU + PercentageToEvict = 5 MaximumCapacity = 100_000 } cache <- Some (Cache.Create(options)) cache.Value diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 0e327a23b70..372750de78f 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -1,40 +1,14 @@ +// LinkedList uses nulls, so we need to disable the nullability warnings for this file. +#nowarn 3261 namespace FSharp.Compiler open System open System.Collections.Generic open System.Collections.Concurrent open System.Threading -open System.Threading.Tasks open System.Diagnostics open System.Diagnostics.Metrics - -[] -// Default Seq.* function have one issue - when doing `Seq.sortBy`, it will call a `ToArray` on the collection, -// which is *not* calling `ConcurrentDictionary.ToArray`, but uses a custom one instead (treating it as `ICollection`) -// this leads to and exception when trying to evict without locking (The index is equal to or greater than the length of the array, -// or the number of elements in the dictionary is greater than the available space from index to the end of the destination array.) -// this is casuedby insertions happened between reading the `Count` and doing the `CopyTo`. -// This solution introduces a custom `ConcurrentDictionary.sortBy` which will be calling a proper `CopyTo`, the one on the ConcurrentDictionary itself. -module internal ConcurrentDictionary = - - open System.Collections - open System.Collections.Generic - - let inline mkSeq f = - { new IEnumerable<'U> with - member _.GetEnumerator() = f () - - interface IEnumerable with - member _.GetEnumerator() = (f () :> IEnumerator) - } - - let inline mkDelayedSeq (f: unit -> IEnumerable<'T>) = mkSeq (fun () -> f().GetEnumerator()) - - let inline sortBy ([] projection) (source: ConcurrentDictionary<_, _>) = - mkDelayedSeq (fun () -> - let array = source.ToArray() - Array.sortInPlaceBy projection array - array :> seq<_>) +open Internal.Utilities.Library [] type internal CachingStrategy = @@ -45,7 +19,7 @@ type internal CachingStrategy = type internal EvictionMethod = | Blocking | Background - | KeepAll + | NoEviction [] type internal CacheOptions = @@ -67,21 +41,78 @@ type internal CacheOptions = } [] -type internal CachedEntity<'Value> = - val Value: 'Value - val mutable LastAccessed: int64 +[] +type internal CachedEntity<'Key, 'Value> = + val mutable Key: 'Key + val mutable Value: 'Value val mutable AccessCount: int64 + val mutable Node: LinkedListNode> - new(value: 'Value) = - { - Value = value - LastAccessed = DateTimeOffset.Now.Ticks - AccessCount = 0L - } + private new(key, value) = { Key = key; Value = value; AccessCount = 0L; Node = Unchecked.defaultof<_> } + + static member Create(key, value) = + let entity = CachedEntity(key, value) + entity.Node <- LinkedListNode(entity) + entity + + member this.ReUse(key, value) = + this.Key <- key + this.Value <- value + this.AccessCount <- 0L + this + + override this.ToString() = $"{this.Key}" + +type internal EvictionQueue<'Key, 'Value>() = + + let list = LinkedList>() + let pool = Queue>() + + member _.Acquire(key, value) = + lock pool <| fun () -> + if pool.Count > 0 then + pool.Dequeue().ReUse(key, value) + else + CachedEntity.Create<_, _>(key, value) + + member _.Add(entity: CachedEntity<'Key, 'Value>) = + lock list <| fun () -> + if isNull entity.Node.List then + list.AddLast(entity.Node) + + member _.Update(entity: CachedEntity<'Key, 'Value>, strategy: CachingStrategy) = + lock list <| fun () -> + entity.AccessCount <- entity.AccessCount + 1L + + let node = entity.Node + + match strategy with + | CachingStrategy.LRU -> + // Just move this node to the end of the list. + list.Remove(node) + list.AddLast(node) + | CachingStrategy.LFU -> + // Bubble up the node in the list, linear time. + // TODO: frequency list approach would be faster. + while (isNotNull node.Next) && (node.Next.Value.AccessCount < node.Value.AccessCount) do + list.Remove(node) + list.AddAfter(node.Next, node) + + member _.GetKeysToEvict(count) = + lock list <| fun () -> + list |> Seq.map _.Key |> Seq.truncate count |> Seq.toArray + + member _.Remove(entity: CachedEntity<_,_>) = + lock list <| fun () -> list.Remove(entity.Node) + // Return to the pool for reuse. + lock pool <| fun () -> pool.Enqueue(entity) + + member _.Count = list.Count module internal CacheMetrics = + let mutable cacheId = 0 - let addInstrumentation (store: ConcurrentDictionary<_, CachedEntity<'Value>>) = + let addInstrumentation (store: ConcurrentDictionary<_, CachedEntity<_,_>>) = let meter = new Meter("FSharp.Compiler.Caches") let uid = Interlocked.Increment &cacheId @@ -90,8 +121,6 @@ module internal CacheMetrics = if vs |> Seq.isEmpty then 0L else f vs let _ = meter.CreateObservableGauge($"cache{uid}", (fun () -> int64 store.Count)) - //let _ = meter.CreateObservableGauge($"MRA{uid}", orZero (Seq.map _.LastAccessed >> Seq.max)) - //let _ = meter.CreateObservableGauge($"LRA{uid}", orZero (Seq.map _.LastAccessed >> Seq.min)) let _ = meter.CreateObservableGauge($"MFA{uid}", orZero (Seq.map _.AccessCount >> Seq.max)) let _ = meter.CreateObservableGauge($"LFA{uid}", orZero (Seq.map _.AccessCount >> Seq.min)) @@ -115,18 +144,92 @@ module internal CacheMetrics = [] [] -type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) = +type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> private (options: CacheOptions, capacity, cts: CancellationTokenSource) = let cacheHit = Event<_ * _>() let cacheMiss = Event<_>() let eviction = Event<_>() let evictionFail = Event<_>() - // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. - let store = ConcurrentDictionary<_, CachedEntity<'Value>>(options.LevelOfConcurrency, capacity) + let store = ConcurrentDictionary<'Key, CachedEntity<'Key, 'Value>>(options.LevelOfConcurrency, capacity) + + let evictionQueue = EvictionQueue<'Key, 'Value>() + + let tryEvictItems () = + let count = if store.Count > options.MaximumCapacity then store.Count - options.MaximumCapacity else 0 + for key in evictionQueue.GetKeysToEvict(count) do + match store.TryRemove(key) with + | true, removed -> + evictionQueue.Remove(removed) + eviction.Trigger(key) + | _ -> + evictionFail.Trigger(key) + + let rec backgroundEviction () = + async { + tryEvictItems () + + let utilization = (float store.Count / float options.MaximumCapacity) + // So, based on utilization this will scale the delay between 0 and 1 seconds. + // Worst case scenario would be when 1 second delay happens, + // if the cache will grow rapidly (or in bursts), it will go beyond the maximum capacity. + // In this case underlying dictionary will resize, AND we will have to evict items, which will likely be slow. + // In this case, cache stats should be used to adjust MaximumCapacity and PercentageToEvict. + let delay = 1000.0 - (1000.0 * utilization) + + if delay > 0.0 then + do! Async.Sleep (int delay) + + return! backgroundEviction () + } + + do if options.EvictionMethod = EvictionMethod.Background then + Async.Start(backgroundEviction (), cancellationToken = cts.Token) do CacheMetrics.addInstrumentation store eviction.Publish cacheHit.Publish cacheMiss.Publish evictionFail.Publish + let tryEvict () = + if options.EvictionMethod.IsBlocking then tryEvictItems() + + let tryGet (key: 'Key) = + match store.TryGetValue(key) with + | true, cachedEntity -> + evictionQueue.Update(cachedEntity, options.Strategy) + Some cachedEntity + | _ -> + None + + member _.TryGetValue (key: 'Key, value: outref<'Value>) = + match tryGet key with + | Some cachedEntity -> + cacheHit.Trigger(key, cachedEntity.Value) + value <- cachedEntity.Value + true + | _ -> + cacheMiss.Trigger(key) + value <- Unchecked.defaultof<'Value> + false + + member _.TryAdd(key: 'Key, value: 'Value) = + tryEvict() + + let cachedEntity = evictionQueue.Acquire(key, value) + if store.TryAdd(key, cachedEntity) then + evictionQueue.Add(cachedEntity) + true + else + false + + member _.AddOrUpdate(key: 'Key, value: 'Value) = + tryEvict() + + let entity = store.AddOrUpdate( + key, + (fun _ -> evictionQueue.Acquire(key, value)), + (fun _ (current: CachedEntity<_, _>) -> current.Value <- value; current) + ) + evictionQueue.Add(entity) + [] member val CacheHit = cacheHit.Publish @@ -136,109 +239,20 @@ type internal Cache<'Key, 'Value> private (options: CacheOptions, capacity, cts) [] member val Eviction = eviction.Publish + [] + member val EvictionFail = evictionFail.Publish + static member Create(options: CacheOptions) = + // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. let capacity = options.MaximumCapacity + (options.MaximumCapacity * options.PercentageToEvict / 100) let cts = new CancellationTokenSource() - let cache = new Cache<'Key, 'Value>(options, capacity, cts) - - if options.EvictionMethod = EvictionMethod.Background then - Task.Run(cache.TryEvictTask, cts.Token) |> ignore - - cache - - //member this.GetStats() = - // {| - // Capacity = options.MaximumCapacity - // PercentageToEvict = options.PercentageToEvict - // Strategy = options.Strategy - // LevelOfConcurrency = options.LevelOfConcurrency - // Count = this.Store.Count - // MostRecentlyAccesssed = this.Store.Values |> Seq.maxBy _.LastAccessed |> _.LastAccessed - // LeastRecentlyAccesssed = this.Store.Values |> Seq.minBy _.LastAccessed |> _.LastAccessed - // MostFrequentlyAccessed = this.Store.Values |> Seq.maxBy _.AccessCount |> _.AccessCount - // LeastFrequentlyAccessed = this.Store.Values |> Seq.minBy _.AccessCount |> _.AccessCount - // |} - - member private this.CalculateEvictionCount() = - if store.Count >= options.MaximumCapacity then - (store.Count - options.MaximumCapacity) - + (options.MaximumCapacity * options.PercentageToEvict / 100) - else - 0 - - // TODO: All of these are proofs of concept, a very naive implementation of eviction strategies, it will always walk the dictionary to find the items to evict, this is not efficient. - member private this.TryGetPickToEvict() = - store - |> match options.Strategy with - | CachingStrategy.LRU -> ConcurrentDictionary.sortBy _.Value.LastAccessed - | CachingStrategy.LFU -> ConcurrentDictionary.sortBy _.Value.AccessCount - |> Seq.take (this.CalculateEvictionCount()) - |> Seq.map (fun x -> x.Key) - - // TODO: Explore an eviction shortcut, some sort of list of keys to evict first, based on the strategy. - member private this.TryEvictItems() = - if this.CalculateEvictionCount() > 0 then - for key in this.TryGetPickToEvict() do - match store.TryRemove(key) with - | true, _ -> eviction.Trigger(key) - | _ -> evictionFail.Trigger(key) // TODO: We probably want to count eviction misses as well? - - // TODO: Shall this be a safer task, wrapping everything in try .. with, so it's not crashing silently? - member private this.TryEvictTask() = - backgroundTask { - while not cts.Token.IsCancellationRequested do - this.TryEvictItems() - - let utilization = (float store.Count / float options.MaximumCapacity) - // So, based on utilization this will scale the delay between 0 and 1 seconds. - // Worst case scenario would be when 1 second delay happens, - // if the cache will grow rapidly (or in bursts), it will go beyond the maximum capacity. - // In this case underlying dictionary will resize, AND we will have to evict items, which will likely be slow. - // In this case, cache stats should be used to adjust MaximumCapacity and PercentageToEvict. - let delay = 1000.0 - (1000.0 * utilization) - - if delay > 0.0 then - do! Task.Delay(int delay) - } - - member this.TryEvict() = - if this.CalculateEvictionCount() > 0 then - match options.EvictionMethod with - | EvictionMethod.Blocking -> this.TryEvictItems() - | EvictionMethod.Background - | EvictionMethod.KeepAll -> () - - member this.TryGet(key, value: outref<'Value>) = - match store.TryGetValue(key) with - | true, cachedEntity -> - // this is fine to be non-atomic, I guess, we are okay with race if the time is within the time of multiple concurrent calls. - cachedEntity.LastAccessed <- DateTimeOffset.Now.Ticks - let _ = Interlocked.Increment(&cachedEntity.AccessCount) - cacheHit.Trigger(key, cachedEntity.Value) - value <- cachedEntity.Value - true - | _ -> - cacheMiss.Trigger(key) - value <- Unchecked.defaultof<'Value> - false - - member this.TryAdd(key, value: 'Value, ?update: bool) = - let update = defaultArg update false - - this.TryEvict() - - let value = CachedEntity<'Value>(value) - - if update then - let _ = store.AddOrUpdate(key, value, (fun _ _ -> value)) - true - else - store.TryAdd(key, value) + new Cache<'Key, 'Value>(options, capacity, cts) interface IDisposable with member _.Dispose() = cts.Cancel() member this.Dispose() = (this :> IDisposable).Dispose() + From 0e8e6cbf36a2575fbb2d31aa48eefc2efe08ca99 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Thu, 17 Apr 2025 23:45:31 +0200 Subject: [PATCH 27/44] format --- src/Compiler/Driver/CompilerImports.fs | 2 +- src/Compiler/Utilities/Caches.fs | 126 +++++++++++------- src/Compiler/Utilities/TypeHashing.fs | 6 +- .../src/FSharp.Editor/Common/Logging.fs | 37 ++--- .../LanguageService/LanguageService.fs | 14 +- 5 files changed, 114 insertions(+), 71 deletions(-) diff --git a/src/Compiler/Driver/CompilerImports.fs b/src/Compiler/Driver/CompilerImports.fs index 41cffb15506..4ab1ca3d7e4 100644 --- a/src/Compiler/Driver/CompilerImports.fs +++ b/src/Compiler/Driver/CompilerImports.fs @@ -1304,7 +1304,7 @@ and [] TcImports true | None -> false | None -> false - + member internal _.Base = CheckDisposed() importsBase diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 372750de78f..0e4be27ecdc 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -48,7 +48,13 @@ type internal CachedEntity<'Key, 'Value> = val mutable AccessCount: int64 val mutable Node: LinkedListNode> - private new(key, value) = { Key = key; Value = value; AccessCount = 0L; Node = Unchecked.defaultof<_> } + private new(key, value) = + { + Key = key + Value = value + AccessCount = 0L + Node = Unchecked.defaultof<_> + } static member Create(key, value) = let entity = CachedEntity(key, value) @@ -69,19 +75,22 @@ type internal EvictionQueue<'Key, 'Value>() = let pool = Queue>() member _.Acquire(key, value) = - lock pool <| fun () -> - if pool.Count > 0 then - pool.Dequeue().ReUse(key, value) - else - CachedEntity.Create<_, _>(key, value) + lock pool + <| fun () -> + if pool.Count > 0 then + pool.Dequeue().ReUse(key, value) + else + CachedEntity.Create<_, _>(key, value) member _.Add(entity: CachedEntity<'Key, 'Value>) = - lock list <| fun () -> + lock list + <| fun () -> if isNull entity.Node.List then list.AddLast(entity.Node) member _.Update(entity: CachedEntity<'Key, 'Value>, strategy: CachingStrategy) = - lock list <| fun () -> + lock list + <| fun () -> entity.AccessCount <- entity.AccessCount + 1L let node = entity.Node @@ -95,34 +104,40 @@ type internal EvictionQueue<'Key, 'Value>() = // Bubble up the node in the list, linear time. // TODO: frequency list approach would be faster. while (isNotNull node.Next) && (node.Next.Value.AccessCount < node.Value.AccessCount) do - list.Remove(node) - list.AddAfter(node.Next, node) + list.Remove(node) + list.AddAfter(node.Next, node) member _.GetKeysToEvict(count) = - lock list <| fun () -> - list |> Seq.map _.Key |> Seq.truncate count |> Seq.toArray + lock list + <| fun () -> list |> Seq.map _.Key |> Seq.truncate count |> Seq.toArray - member _.Remove(entity: CachedEntity<_,_>) = + member _.Remove(entity: CachedEntity<_, _>) = lock list <| fun () -> list.Remove(entity.Node) // Return to the pool for reuse. lock pool <| fun () -> pool.Enqueue(entity) - member _.Count = list.Count + member _.Count = list.Count module internal CacheMetrics = let mutable cacheId = 0 - let addInstrumentation (store: ConcurrentDictionary<_, CachedEntity<_,_>>) = + + let addInstrumentation (store: ConcurrentDictionary<_, CachedEntity<_, _>>) = let meter = new Meter("FSharp.Compiler.Caches") let uid = Interlocked.Increment &cacheId - let orZero f = fun () -> - let vs = store.Values - if vs |> Seq.isEmpty then 0L else f vs + let orZero f = + fun () -> + let vs = store.Values + if vs |> Seq.isEmpty then 0L else f vs let _ = meter.CreateObservableGauge($"cache{uid}", (fun () -> int64 store.Count)) - let _ = meter.CreateObservableGauge($"MFA{uid}", orZero (Seq.map _.AccessCount >> Seq.max)) - let _ = meter.CreateObservableGauge($"LFA{uid}", orZero (Seq.map _.AccessCount >> Seq.min)) + + let _ = + meter.CreateObservableGauge($"MFA{uid}", orZero (Seq.map _.AccessCount >> Seq.max)) + + let _ = + meter.CreateObservableGauge($"LFA{uid}", orZero (Seq.map _.AccessCount >> Seq.min)) let mutable evictions = 0L let mutable fails = 0L @@ -136,36 +151,50 @@ module internal CacheMetrics = hit |> Event.add (fun _ -> Interlocked.Increment &hits |> ignore) miss |> Event.add (fun _ -> Interlocked.Increment &misses |> ignore) - let _ = meter.CreateObservableGauge($"evicted{uid}", fun () -> Interlocked.Exchange(&evictions, 0L)) - let _ = meter.CreateObservableGauge($"fails{uid}", fun () -> Interlocked.Exchange(&fails, 0L)) - let _ = meter.CreateObservableGauge($"hits{uid}", fun () -> Interlocked.Exchange(&hits, 0L)) - let _ = meter.CreateObservableGauge($"misses{uid}", fun () -> Interlocked.Exchange(&misses, 0L)) + let _ = + meter.CreateObservableGauge($"evicted{uid}", fun () -> Interlocked.Exchange(&evictions, 0L)) + + let _ = + meter.CreateObservableGauge($"fails{uid}", fun () -> Interlocked.Exchange(&fails, 0L)) + + let _ = + meter.CreateObservableGauge($"hits{uid}", fun () -> Interlocked.Exchange(&hits, 0L)) + + let _ = + meter.CreateObservableGauge($"misses{uid}", fun () -> Interlocked.Exchange(&misses, 0L)) + () [] [] -type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> private (options: CacheOptions, capacity, cts: CancellationTokenSource) = +type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> + private (options: CacheOptions, capacity, cts: CancellationTokenSource) = let cacheHit = Event<_ * _>() let cacheMiss = Event<_>() let eviction = Event<_>() let evictionFail = Event<_>() - - let store = ConcurrentDictionary<'Key, CachedEntity<'Key, 'Value>>(options.LevelOfConcurrency, capacity) + + let store = + ConcurrentDictionary<'Key, CachedEntity<'Key, 'Value>>(options.LevelOfConcurrency, capacity) let evictionQueue = EvictionQueue<'Key, 'Value>() let tryEvictItems () = - let count = if store.Count > options.MaximumCapacity then store.Count - options.MaximumCapacity else 0 + let count = + if store.Count > options.MaximumCapacity then + store.Count - options.MaximumCapacity + else + 0 + for key in evictionQueue.GetKeysToEvict(count) do match store.TryRemove(key) with | true, removed -> evictionQueue.Remove(removed) eviction.Trigger(key) - | _ -> - evictionFail.Trigger(key) + | _ -> evictionFail.Trigger(key) - let rec backgroundEviction () = + let rec backgroundEviction () = async { tryEvictItems () @@ -178,28 +207,29 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> private let delay = 1000.0 - (1000.0 * utilization) if delay > 0.0 then - do! Async.Sleep (int delay) + do! Async.Sleep(int delay) return! backgroundEviction () } - do if options.EvictionMethod = EvictionMethod.Background then - Async.Start(backgroundEviction (), cancellationToken = cts.Token) + do + if options.EvictionMethod = EvictionMethod.Background then + Async.Start(backgroundEviction (), cancellationToken = cts.Token) do CacheMetrics.addInstrumentation store eviction.Publish cacheHit.Publish cacheMiss.Publish evictionFail.Publish let tryEvict () = - if options.EvictionMethod.IsBlocking then tryEvictItems() + if options.EvictionMethod.IsBlocking then + tryEvictItems () let tryGet (key: 'Key) = match store.TryGetValue(key) with | true, cachedEntity -> evictionQueue.Update(cachedEntity, options.Strategy) Some cachedEntity - | _ -> - None + | _ -> None - member _.TryGetValue (key: 'Key, value: outref<'Value>) = + member _.TryGetValue(key: 'Key, value: outref<'Value>) = match tryGet key with | Some cachedEntity -> cacheHit.Trigger(key, cachedEntity.Value) @@ -211,9 +241,10 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> private false member _.TryAdd(key: 'Key, value: 'Value) = - tryEvict() + tryEvict () let cachedEntity = evictionQueue.Acquire(key, value) + if store.TryAdd(key, cachedEntity) then evictionQueue.Add(cachedEntity) true @@ -221,13 +252,17 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> private false member _.AddOrUpdate(key: 'Key, value: 'Value) = - tryEvict() + tryEvict () + + let entity = + store.AddOrUpdate( + key, + (fun _ -> evictionQueue.Acquire(key, value)), + (fun _ (current: CachedEntity<_, _>) -> + current.Value <- value + current) + ) - let entity = store.AddOrUpdate( - key, - (fun _ -> evictionQueue.Acquire(key, value)), - (fun _ (current: CachedEntity<_, _>) -> current.Value <- value; current) - ) evictionQueue.Add(entity) [] @@ -255,4 +290,3 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> private member _.Dispose() = cts.Cancel() member this.Dispose() = (this :> IDisposable).Dispose() - diff --git a/src/Compiler/Utilities/TypeHashing.fs b/src/Compiler/Utilities/TypeHashing.fs index 1475635972d..37c59b48207 100644 --- a/src/Compiler/Utilities/TypeHashing.fs +++ b/src/Compiler/Utilities/TypeHashing.fs @@ -129,9 +129,9 @@ module rec HashTypes = let rec stampEquals g ty1 ty2 = match (stripTyEqns g ty1), (stripTyEqns g ty2) with | TType_app(tcref1, tinst1, _), TType_app(tcref2, tinst2, _) -> - tcref1.Stamp = tcref2.Stamp && - tinst1.Length = tinst2.Length && - tinst1 |> List.zip tinst2 |> List.forall (fun (t1, t2) -> stampEquals g t1 t2) + tcref1.Stamp = tcref2.Stamp + && tinst1.Length = tinst2.Length + && tinst1 |> List.zip tinst2 |> List.forall (fun (t1, t2) -> stampEquals g t1 t2) | TType_var(r1, _), TType_var(r2, _) -> r1.Stamp.Equals(r2.Stamp) | _ -> false diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index beddfb8f823..0ddca9ed070 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -150,30 +150,39 @@ module FSharpServiceTelemetry = let logCacheMetricsToOutput () = let instruments = Collections.Generic.Dictionary() - let listener = new MeterListener( - InstrumentPublished = fun instrument l -> - if instrument.Meter.Name = "FSharp.Compiler.Caches" then - instruments[instrument.Name] <- 0L - l.EnableMeasurementEvents(instrument) - ) + + let listener = + new MeterListener( + InstrumentPublished = + fun instrument l -> + if instrument.Meter.Name = "FSharp.Compiler.Caches" then + instruments[instrument.Name] <- 0L + l.EnableMeasurementEvents(instrument) + ) let callBack = MeasurementCallback(fun instr v _ _ -> instruments[instr.Name] <- v) listener.SetMeasurementEventCallback callBack listener.Start() - + let msg = Event() backgroundTask { while true do do! System.Threading.Tasks.Task.Delay(1000) listener.RecordObservableInstruments() + if instruments.Count > 0 then - [ for kvp in instruments -> $"{kvp.Key}: {kvp.Value}"] + [ for kvp in instruments -> $"{kvp.Key}: {kvp.Value}" ] |> String.concat ", " |> msg.Trigger - } |> ignore + } + |> ignore - msg.Publish |> Event.pairwise |> Event.filter (fun (x, y) -> x <> y) |> Event.map snd |> Event.add logMsg + msg.Publish + |> Event.pairwise + |> Event.filter (fun (x, y) -> x <> y) + |> Event.map snd + |> Event.add logMsg #if DEBUG open OpenTelemetry.Resources @@ -183,10 +192,8 @@ module FSharpServiceTelemetry = let export () = let meterProvider = // Configure OpenTelemetry metrics. Metrics can be viewed in Prometheus or other compatible tools. - OpenTelemetry.Sdk - .CreateMeterProviderBuilder() - .AddOtlpExporter() - .Build() + OpenTelemetry.Sdk.CreateMeterProviderBuilder().AddOtlpExporter().Build() + let tracerProvider = // Configure OpenTelemetry export. Traces can be viewed in Jaeger or other compatible tools. OpenTelemetry.Sdk @@ -205,4 +212,4 @@ module FSharpServiceTelemetry = meterProvider.Dispose() let listenToAll () = listen "" -#endif \ No newline at end of file +#endif diff --git a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs index 738cf2f6135..0c75a92552e 100644 --- a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs +++ b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs @@ -343,15 +343,17 @@ type internal FSharpPackage() as this = // FSI-LINKAGE-POINT: unsited init do FSharp.Interactive.Hooks.fsiConsoleWindowPackageCtorUnsited (this :> Package) - do Logging.FSharpServiceTelemetry.logCacheMetricsToOutput() + do Logging.FSharpServiceTelemetry.logCacheMetricsToOutput () - #if DEBUG - let flushTelemetry = Logging.FSharpServiceTelemetry.export() +#if DEBUG + let flushTelemetry = Logging.FSharpServiceTelemetry.export () - override this.Dispose (disposing: bool) = + override this.Dispose(disposing: bool) = base.Dispose(disposing: bool) - if disposing then flushTelemetry() - #endif + + if disposing then + flushTelemetry () +#endif override this.InitializeAsync(cancellationToken: CancellationToken, progress: IProgress) : Tasks.Task = // `base.` methods can't be called in the `async` builder, so we have to cache it From 474081ae669b856d21384c07c25a037bd8b8770d Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Thu, 17 Apr 2025 23:46:57 +0200 Subject: [PATCH 28/44] ilverify --- ...lverify_FSharp.Compiler.Service_Debug_net9.0.bsl | 13 ++++++------- ...FSharp.Compiler.Service_Debug_netstandard2.0.bsl | 13 ++++++------- ...erify_FSharp.Compiler.Service_Release_net9.0.bsl | 12 ++++++------ ...harp.Compiler.Service_Release_netstandard2.0.bsl | 13 ++++++------- 4 files changed, 24 insertions(+), 27 deletions(-) diff --git a/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_net9.0.bsl b/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_net9.0.bsl index 219a0ec11db..bd581b3d765 100644 --- a/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_net9.0.bsl +++ b/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_net9.0.bsl @@ -5,7 +5,6 @@ [IL]: Error [UnmanagedPointer]: : FSharp.Compiler.IO.RawByteMemory::.ctor(uint8*, int32, object)][offset 0x00000009] Unmanaged pointers are not a verifiable type. [IL]: Error [StackByRef]: : FSharp.Compiler.IO.RawByteMemory::get_Item(int32)][offset 0x0000001E][found Native Int] Expected ByRef on the stack. [IL]: Error [StackByRef]: : FSharp.Compiler.IO.RawByteMemory::set_Item(int32, uint8)][offset 0x00000025][found Native Int] Expected ByRef on the stack. -[IL]: Error [StackUnexpected]: : FSharp.Compiler.Cache`2::TryGetPickToEvict()][offset 0x0000005A][found ref 'object'][expected ref '[S.P.CoreLib]System.Collections.Generic.IEnumerable`1>>'] Unexpected type on the stack. [IL]: Error [ReturnPtrToStack]: : Internal.Utilities.Text.Lexing.LexBuffer`1::get_LexemeView()][offset 0x00000019] Return type is ByRef, TypedReference, ArgHandle, or ArgIterator. [IL]: Error [StackUnexpected]: : Internal.Utilities.Text.Lexing.UnicodeTables::scanUntilSentinel([FSharp.Compiler.Service]Internal.Utilities.Text.Lexing.LexBuffer`1, int32)][offset 0x00000079][found Short] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Xml.XmlDoc::processLines([FSharp.Core]Microsoft.FSharp.Collections.FSharpList`1)][offset 0x00000031][found Char] Unexpected type on the stack. @@ -22,14 +21,14 @@ [IL]: Error [StackUnexpected]: : FSharp.Compiler.CodeAnalysis.Hosted.CompilerHelpers::fscCompile([FSharp.Compiler.Service]FSharp.Compiler.CodeAnalysis.LegacyReferenceResolver, string, string[])][offset 0x00000082][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.CodeAnalysis.Hosted.CompilerHelpers::fscCompile([FSharp.Compiler.Service]FSharp.Compiler.CodeAnalysis.LegacyReferenceResolver, string, string[])][offset 0x0000008B][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+MagicAssemblyResolution::ResolveAssemblyCore([FSharp.Compiler.Service]Internal.Utilities.Library.CompilationThreadToken, [FSharp.Compiler.Service]FSharp.Compiler.Text.Range, [FSharp.Compiler.Service]FSharp.Compiler.CompilerConfig+TcConfigBuilder, [FSharp.Compiler.Service]FSharp.Compiler.CompilerImports+TcImports, [FSharp.Compiler.Service]FSharp.Compiler.Interactive.Shell+FsiDynamicCompiler, [FSharp.Compiler.Service]FSharp.Compiler.Interactive.Shell+FsiConsoleOutput, string)][offset 0x00000015][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+clo@3502-805::Invoke([S.P.CoreLib]System.Tuple`3)][offset 0x000001E5][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+clo@3502-812::Invoke([S.P.CoreLib]System.Tuple`3)][offset 0x000001E5][found Char] Unexpected type on the stack. [IL]: Error [UnmanagedPointer]: : FSharp.Compiler.Interactive.Shell+Utilities+pointerToNativeInt@110::Invoke(object)][offset 0x00000007] Unmanaged pointers are not a verifiable type. [IL]: Error [StackUnexpected]: : .$FSharpCheckerResults+dataTipOfReferences@2225::Invoke([FSharp.Core]Microsoft.FSharp.Core.Unit)][offset 0x00000084][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-509::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000032][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-509::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000003B][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-509::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000082][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-509::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000008B][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-509::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000094][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-516::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000032][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-516::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000003B][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-516::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000082][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-516::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000008B][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-516::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000094][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.StaticLinking+TypeForwarding::followTypeForwardForILTypeRef([FSharp.Compiler.Service]FSharp.Compiler.AbstractIL.IL+ILTypeRef)][offset 0x00000010][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.CompilerOptions::getCompilerOption([FSharp.Compiler.Service]FSharp.Compiler.CompilerOptions+CompilerOption, [FSharp.Core]Microsoft.FSharp.Core.FSharpOption`1)][offset 0x000000E6][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.CompilerOptions::AddPathMapping([FSharp.Compiler.Service]FSharp.Compiler.CompilerConfig+TcConfigBuilder, string)][offset 0x0000000B][found Char] Unexpected type on the stack. diff --git a/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_netstandard2.0.bsl b/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_netstandard2.0.bsl index 209cabb338b..752b1e98415 100644 --- a/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_netstandard2.0.bsl +++ b/tests/ILVerify/ilverify_FSharp.Compiler.Service_Debug_netstandard2.0.bsl @@ -5,7 +5,6 @@ [IL]: Error [UnmanagedPointer]: : FSharp.Compiler.IO.RawByteMemory::.ctor(uint8*, int32, object)][offset 0x00000009] Unmanaged pointers are not a verifiable type. [IL]: Error [StackByRef]: : FSharp.Compiler.IO.RawByteMemory::get_Item(int32)][offset 0x0000001E][found Native Int] Expected ByRef on the stack. [IL]: Error [StackByRef]: : FSharp.Compiler.IO.RawByteMemory::set_Item(int32, uint8)][offset 0x00000025][found Native Int] Expected ByRef on the stack. -[IL]: Error [StackUnexpected]: : FSharp.Compiler.Cache`2::TryGetPickToEvict()][offset 0x0000005A][found ref 'object'][expected ref '[S.P.CoreLib]System.Collections.Generic.IEnumerable`1>>'] Unexpected type on the stack. [IL]: Error [ReturnPtrToStack]: : Internal.Utilities.Text.Lexing.LexBuffer`1::get_LexemeView()][offset 0x00000019] Return type is ByRef, TypedReference, ArgHandle, or ArgIterator. [IL]: Error [StackUnexpected]: : Internal.Utilities.Text.Lexing.UnicodeTables::scanUntilSentinel([FSharp.Compiler.Service]Internal.Utilities.Text.Lexing.LexBuffer`1, int32)][offset 0x00000079][found Short] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Xml.XmlDoc::processLines([FSharp.Core]Microsoft.FSharp.Collections.FSharpList`1)][offset 0x00000031][found Char] Unexpected type on the stack. @@ -29,18 +28,18 @@ [IL]: Error [StackUnexpected]: : FSharp.Compiler.CodeAnalysis.Hosted.CompilerHelpers::fscCompile([FSharp.Compiler.Service]FSharp.Compiler.CodeAnalysis.LegacyReferenceResolver, string, string[])][offset 0x0000008B][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+FsiStdinSyphon::GetLine(string, int32)][offset 0x00000039][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+MagicAssemblyResolution::ResolveAssemblyCore([FSharp.Compiler.Service]Internal.Utilities.Library.CompilationThreadToken, [FSharp.Compiler.Service]FSharp.Compiler.Text.Range, [FSharp.Compiler.Service]FSharp.Compiler.CompilerConfig+TcConfigBuilder, [FSharp.Compiler.Service]FSharp.Compiler.CompilerImports+TcImports, [FSharp.Compiler.Service]FSharp.Compiler.Interactive.Shell+FsiDynamicCompiler, [FSharp.Compiler.Service]FSharp.Compiler.Interactive.Shell+FsiConsoleOutput, string)][offset 0x00000015][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+clo@3502-805::Invoke([S.P.CoreLib]System.Tuple`3)][offset 0x000001E5][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+clo@3502-812::Invoke([S.P.CoreLib]System.Tuple`3)][offset 0x000001E5][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+FsiInteractionProcessor::CompletionsForPartialLID([FSharp.Compiler.Service]FSharp.Compiler.Interactive.Shell+FsiDynamicCompilerState, string)][offset 0x0000001B][found Char] Unexpected type on the stack. [IL]: Error [UnmanagedPointer]: : FSharp.Compiler.Interactive.Shell+Utilities+pointerToNativeInt@110::Invoke(object)][offset 0x00000007] Unmanaged pointers are not a verifiable type. [IL]: Error [StackUnexpected]: : .$FSharpCheckerResults+dataTipOfReferences@2225::Invoke([FSharp.Core]Microsoft.FSharp.Core.Unit)][offset 0x00000084][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.EditorServices.AssemblyContent+traverseMemberFunctionAndValues@176::Invoke([FSharp.Compiler.Service]FSharp.Compiler.Symbols.FSharpMemberOrFunctionOrValue)][offset 0x00000059][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.EditorServices.AssemblyContent+traverseEntity@218::GenerateNext([S.P.CoreLib]System.Collections.Generic.IEnumerable`1&)][offset 0x000000DA][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.EditorServices.ParsedInput+visitor@1424-6::VisitExpr([FSharp.Core]Microsoft.FSharp.Collections.FSharpList`1, [FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2>, [FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2>, [FSharp.Compiler.Service]FSharp.Compiler.Syntax.SynExpr)][offset 0x00000605][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-509::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000032][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-509::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000003B][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-509::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000082][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-509::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000008B][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-509::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000094][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-516::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000032][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-516::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000003B][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-516::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000082][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-516::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000008B][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-516::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000094][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : .$Symbols+fullName@2495-1::Invoke([FSharp.Core]Microsoft.FSharp.Core.Unit)][offset 0x00000015][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.CreateILModule+MainModuleBuilder::ConvertProductVersionToILVersionInfo(string)][offset 0x00000011][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.StaticLinking+TypeForwarding::followTypeForwardForILTypeRef([FSharp.Compiler.Service]FSharp.Compiler.AbstractIL.IL+ILTypeRef)][offset 0x00000010][found Char] Unexpected type on the stack. diff --git a/tests/ILVerify/ilverify_FSharp.Compiler.Service_Release_net9.0.bsl b/tests/ILVerify/ilverify_FSharp.Compiler.Service_Release_net9.0.bsl index 4e7b5396676..d171cb2277a 100644 --- a/tests/ILVerify/ilverify_FSharp.Compiler.Service_Release_net9.0.bsl +++ b/tests/ILVerify/ilverify_FSharp.Compiler.Service_Release_net9.0.bsl @@ -21,13 +21,13 @@ [IL]: Error [StackUnexpected]: : FSharp.Compiler.CodeAnalysis.Hosted.CompilerHelpers::fscCompile([FSharp.Compiler.Service]FSharp.Compiler.CodeAnalysis.LegacyReferenceResolver, string, string[])][offset 0x00000082][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.CodeAnalysis.Hosted.CompilerHelpers::fscCompile([FSharp.Compiler.Service]FSharp.Compiler.CodeAnalysis.LegacyReferenceResolver, string, string[])][offset 0x0000008B][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+MagicAssemblyResolution::ResolveAssemblyCore([FSharp.Compiler.Service]Internal.Utilities.Library.CompilationThreadToken, [FSharp.Compiler.Service]FSharp.Compiler.Text.Range, [FSharp.Compiler.Service]FSharp.Compiler.CompilerConfig+TcConfigBuilder, [FSharp.Compiler.Service]FSharp.Compiler.CompilerImports+TcImports, [FSharp.Compiler.Service]FSharp.Compiler.Interactive.Shell+FsiDynamicCompiler, [FSharp.Compiler.Service]FSharp.Compiler.Interactive.Shell+FsiConsoleOutput, string)][offset 0x00000015][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+clo@3502-849::Invoke([S.P.CoreLib]System.Tuple`3)][offset 0x000001C7][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+clo@3502-856::Invoke([S.P.CoreLib]System.Tuple`3)][offset 0x000001C7][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : .$FSharpCheckerResults+GetReferenceResolutionStructuredToolTipText@2225::Invoke([FSharp.Core]Microsoft.FSharp.Core.Unit)][offset 0x00000076][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-530::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000032][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-530::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000003B][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-530::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000064][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-530::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000006D][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-530::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000076][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-537::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000032][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-537::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000003B][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-537::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000064][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-537::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000006D][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-537::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000076][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Driver+ProcessCommandLineFlags@291-1::Invoke(string)][offset 0x0000000B][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Driver+ProcessCommandLineFlags@291-1::Invoke(string)][offset 0x00000014][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.StaticLinking+TypeForwarding::followTypeForwardForILTypeRef([FSharp.Compiler.Service]FSharp.Compiler.AbstractIL.IL+ILTypeRef)][offset 0x00000010][found Char] Unexpected type on the stack. diff --git a/tests/ILVerify/ilverify_FSharp.Compiler.Service_Release_netstandard2.0.bsl b/tests/ILVerify/ilverify_FSharp.Compiler.Service_Release_netstandard2.0.bsl index 8d926f8c113..3b30273904b 100644 --- a/tests/ILVerify/ilverify_FSharp.Compiler.Service_Release_netstandard2.0.bsl +++ b/tests/ILVerify/ilverify_FSharp.Compiler.Service_Release_netstandard2.0.bsl @@ -5,7 +5,6 @@ [IL]: Error [UnmanagedPointer]: : FSharp.Compiler.IO.RawByteMemory::.ctor(uint8*, int32, object)][offset 0x00000009] Unmanaged pointers are not a verifiable type. [IL]: Error [StackByRef]: : FSharp.Compiler.IO.RawByteMemory::get_Item(int32)][offset 0x0000001A][found Native Int] Expected ByRef on the stack. [IL]: Error [StackByRef]: : FSharp.Compiler.IO.RawByteMemory::set_Item(int32, uint8)][offset 0x0000001B][found Native Int] Expected ByRef on the stack. -[IL]: Error [StackUnexpected]: : FSharp.Compiler.Cache`2::TryGetPickToEvict()][offset 0x00000034][found ref 'object'][expected ref '[S.P.CoreLib]System.Collections.Generic.IEnumerable`1>>'] Unexpected type on the stack. [IL]: Error [ReturnPtrToStack]: : Internal.Utilities.Text.Lexing.LexBuffer`1::get_LexemeView()][offset 0x00000017] Return type is ByRef, TypedReference, ArgHandle, or ArgIterator. [IL]: Error [StackUnexpected]: : Internal.Utilities.Text.Lexing.UnicodeTables::scanUntilSentinel([FSharp.Compiler.Service]Internal.Utilities.Text.Lexing.LexBuffer`1, int32)][offset 0x0000008D][found Short] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Xml.XmlDoc::processLines([FSharp.Core]Microsoft.FSharp.Collections.FSharpList`1)][offset 0x0000002C][found Char] Unexpected type on the stack. @@ -29,17 +28,17 @@ [IL]: Error [StackUnexpected]: : FSharp.Compiler.CodeAnalysis.Hosted.CompilerHelpers::fscCompile([FSharp.Compiler.Service]FSharp.Compiler.CodeAnalysis.LegacyReferenceResolver, string, string[])][offset 0x0000008B][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+FsiStdinSyphon::GetLine(string, int32)][offset 0x00000032][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+MagicAssemblyResolution::ResolveAssemblyCore([FSharp.Compiler.Service]Internal.Utilities.Library.CompilationThreadToken, [FSharp.Compiler.Service]FSharp.Compiler.Text.Range, [FSharp.Compiler.Service]FSharp.Compiler.CompilerConfig+TcConfigBuilder, [FSharp.Compiler.Service]FSharp.Compiler.CompilerImports+TcImports, [FSharp.Compiler.Service]FSharp.Compiler.Interactive.Shell+FsiDynamicCompiler, [FSharp.Compiler.Service]FSharp.Compiler.Interactive.Shell+FsiConsoleOutput, string)][offset 0x00000015][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+clo@3502-849::Invoke([S.P.CoreLib]System.Tuple`3)][offset 0x000001C7][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+clo@3502-856::Invoke([S.P.CoreLib]System.Tuple`3)][offset 0x000001C7][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Interactive.Shell+FsiInteractionProcessor::CompletionsForPartialLID([FSharp.Compiler.Service]FSharp.Compiler.Interactive.Shell+FsiDynamicCompilerState, string)][offset 0x00000024][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : .$FSharpCheckerResults+GetReferenceResolutionStructuredToolTipText@2225::Invoke([FSharp.Core]Microsoft.FSharp.Core.Unit)][offset 0x00000076][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.EditorServices.AssemblyContent+traverseMemberFunctionAndValues@176::Invoke([FSharp.Compiler.Service]FSharp.Compiler.Symbols.FSharpMemberOrFunctionOrValue)][offset 0x0000002B][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.EditorServices.AssemblyContent+traverseEntity@218::GenerateNext([S.P.CoreLib]System.Collections.Generic.IEnumerable`1&)][offset 0x000000BB][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.EditorServices.ParsedInput+visitor@1424-11::VisitExpr([FSharp.Core]Microsoft.FSharp.Collections.FSharpList`1, [FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2>, [FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2>, [FSharp.Compiler.Service]FSharp.Compiler.Syntax.SynExpr)][offset 0x00000620][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-530::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000032][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-530::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000003B][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-530::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000064][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-530::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000006D][found Char] Unexpected type on the stack. -[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-530::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000076][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-537::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000032][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-537::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000003B][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-537::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000064][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-537::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x0000006D][found Char] Unexpected type on the stack. +[IL]: Error [StackUnexpected]: : .$ServiceLexing+clo@921-537::Invoke([FSharp.Core]Microsoft.FSharp.Core.FSharpFunc`2,Microsoft.FSharp.Core.Unit>)][offset 0x00000076][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : .$Symbols+fullName@2495-3::Invoke([FSharp.Core]Microsoft.FSharp.Core.Unit)][offset 0x00000030][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Driver+ProcessCommandLineFlags@291-1::Invoke(string)][offset 0x0000000B][found Char] Unexpected type on the stack. [IL]: Error [StackUnexpected]: : FSharp.Compiler.Driver+ProcessCommandLineFlags@291-1::Invoke(string)][offset 0x00000014][found Char] Unexpected type on the stack. From c0ddd924903869cc964640b64c9c3b4516a26e5c Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Fri, 18 Apr 2025 09:20:03 +0200 Subject: [PATCH 29/44] fix --- src/Compiler/Utilities/Caches.fs | 64 ++++++++++++++++++++------------ 1 file changed, 40 insertions(+), 24 deletions(-) diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 0e4be27ecdc..50fbde44db6 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -69,7 +69,7 @@ type internal CachedEntity<'Key, 'Value> = override this.ToString() = $"{this.Key}" -type internal EvictionQueue<'Key, 'Value>() = +type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy) = let list = LinkedList>() let pool = Queue>() @@ -82,30 +82,46 @@ type internal EvictionQueue<'Key, 'Value>() = else CachedEntity.Create<_, _>(key, value) - member _.Add(entity: CachedEntity<'Key, 'Value>) = + member _.Add(entity: CachedEntity<'Key, 'Value>, strategy) = lock list <| fun () -> if isNull entity.Node.List then - list.AddLast(entity.Node) - - member _.Update(entity: CachedEntity<'Key, 'Value>, strategy: CachingStrategy) = + match strategy with + | CachingStrategy.LRU -> + list.AddLast(entity.Node) + | CachingStrategy.LFU -> + list.AddLast(entity.Node) + // list.AddFirst(entity.Node) + + member _.Update(entity: CachedEntity<'Key, 'Value>) = lock list <| fun () -> entity.AccessCount <- entity.AccessCount + 1L let node = entity.Node - match strategy with - | CachingStrategy.LRU -> - // Just move this node to the end of the list. - list.Remove(node) - list.AddLast(node) - | CachingStrategy.LFU -> - // Bubble up the node in the list, linear time. - // TODO: frequency list approach would be faster. - while (isNotNull node.Next) && (node.Next.Value.AccessCount < node.Value.AccessCount) do + // Sync between store and the eviction queue is not atomic. It might be already evicted or not yet added. + if node.List = list then + + match strategy with + | CachingStrategy.LRU -> + // Just move this node to the end of the list. list.Remove(node) - list.AddAfter(node.Next, node) + list.AddLast(node) + | CachingStrategy.LFU -> + // Bubble up the node in the list, linear time. + // TODO: frequency list approach would be faster. + let rec bubbleUp (current: LinkedListNode>) = + if isNotNull current.Next && current.Next.Value.AccessCount < entity.AccessCount then + bubbleUp current.Next + else + current + + let next = bubbleUp node + + if next <> node then + list.Remove(node) + list.AddAfter(next, node) member _.GetKeysToEvict(count) = lock list @@ -126,18 +142,18 @@ module internal CacheMetrics = let meter = new Meter("FSharp.Compiler.Caches") let uid = Interlocked.Increment &cacheId - let orZero f = + let _orZero f = fun () -> let vs = store.Values if vs |> Seq.isEmpty then 0L else f vs let _ = meter.CreateObservableGauge($"cache{uid}", (fun () -> int64 store.Count)) - let _ = - meter.CreateObservableGauge($"MFA{uid}", orZero (Seq.map _.AccessCount >> Seq.max)) + //let _ = + // meter.CreateObservableGauge($"MFA{uid}", orZero (Seq.map _.AccessCount >> Seq.max)) - let _ = - meter.CreateObservableGauge($"LFA{uid}", orZero (Seq.map _.AccessCount >> Seq.min)) + //let _ = + // meter.CreateObservableGauge($"LFA{uid}", orZero (Seq.map _.AccessCount >> Seq.min)) let mutable evictions = 0L let mutable fails = 0L @@ -178,7 +194,7 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> let store = ConcurrentDictionary<'Key, CachedEntity<'Key, 'Value>>(options.LevelOfConcurrency, capacity) - let evictionQueue = EvictionQueue<'Key, 'Value>() + let evictionQueue = EvictionQueue<'Key, 'Value>(options.Strategy) let tryEvictItems () = let count = @@ -225,7 +241,7 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> let tryGet (key: 'Key) = match store.TryGetValue(key) with | true, cachedEntity -> - evictionQueue.Update(cachedEntity, options.Strategy) + evictionQueue.Update(cachedEntity) Some cachedEntity | _ -> None @@ -246,7 +262,7 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> let cachedEntity = evictionQueue.Acquire(key, value) if store.TryAdd(key, cachedEntity) then - evictionQueue.Add(cachedEntity) + evictionQueue.Add(cachedEntity, options.Strategy) true else false @@ -263,7 +279,7 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> current) ) - evictionQueue.Add(entity) + evictionQueue.Add(entity, options.Strategy) [] member val CacheHit = cacheHit.Publish From fdf191640f5dd2b2429dabf8a4a726c5bd6203e6 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Fri, 18 Apr 2025 09:21:25 +0200 Subject: [PATCH 30/44] smaller cache --- src/Compiler/Checking/TypeRelations.fs | 10 +++++----- src/Compiler/Checking/import.fs | 25 +++++++++++++------------ src/Compiler/Checking/import.fsi | 6 +++--- src/Compiler/Utilities/TypeHashing.fs | 6 +++--- 4 files changed, 24 insertions(+), 23 deletions(-) diff --git a/src/Compiler/Checking/TypeRelations.fs b/src/Compiler/Checking/TypeRelations.fs index fd6e12bc4d7..5a183005030 100644 --- a/src/Compiler/Checking/TypeRelations.fs +++ b/src/Compiler/Checking/TypeRelations.fs @@ -101,8 +101,8 @@ let TypesFeasiblyEquiv ndeep g amap m ty1 ty2 = let TypesFeasiblyEquivStripMeasures g amap m ty1 ty2 = TypesFeasiblyEquivalent true 0 g amap m ty1 ty2 -let inline TryGetCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key = - if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then +let inline TryGetCachedTypeSubsumption (_g: TcGlobals) (amap: ImportMap) key = + if true (* g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache *) then match amap.TypeSubsumptionCache.TryGetValue(key) with | true, subsumes -> ValueSome subsumes @@ -111,8 +111,8 @@ let inline TryGetCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key = else ValueNone -let inline UpdateCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key subsumes : unit = - if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then +let inline UpdateCachedTypeSubsumption (_g: TcGlobals) (amap: ImportMap) key subsumes : unit = + if true (* g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache *) then amap.TypeSubsumptionCache.AddOrUpdate(key, subsumes) [] @@ -128,7 +128,7 @@ let rec TypeFeasiblySubsumesType ndeep (g: TcGlobals) (amap: ImportMap) m (ty1: let ty2 = stripTyEqns g ty2 // Check if language feature supported - let key = TTypeCacheKey.FromStrippedTypes (ty1, ty2, canCoerce, g) + let key = TTypeCacheKey.FromStrippedTypes (ty1, ty2, canCoerce) match TryGetCachedTypeSubsumption g amap key with | ValueSome subsumes -> diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index d96ce8b182a..79849458ba2 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -62,13 +62,13 @@ type TTypeCacheKey = val ty1: TType val ty2: TType val canCoerce: CanCoerce - val tcGlobals: TcGlobals + //val tcGlobals: TcGlobals - private new (ty1, ty2, canCoerce, tcGlobals) = - { ty1 = ty1; ty2 = ty2; canCoerce = canCoerce; tcGlobals = tcGlobals } + private new (ty1, ty2, canCoerce) = + { ty1 = ty1; ty2 = ty2; canCoerce = canCoerce } - static member FromStrippedTypes (ty1, ty2, canCoerce, tcGlobals) = - TTypeCacheKey(ty1, ty2, canCoerce, tcGlobals) + static member FromStrippedTypes (ty1, ty2, canCoerce) = + TTypeCacheKey(ty1, ty2, canCoerce) interface System.IEquatable with member this.Equals other = @@ -77,8 +77,8 @@ type TTypeCacheKey = elif this.ty1 === other.ty1 && this.ty2 === other.ty2 then true else - stampEquals this.tcGlobals this.ty1 other.ty1 - && stampEquals this.tcGlobals this.ty2 other.ty2 + stampEquals this.ty1 other.ty1 + && stampEquals this.ty2 other.ty2 override this.Equals(other:objnull) = match other with @@ -100,8 +100,7 @@ type TTypeCacheKey = | TType_var _ | TType_measure _ -> 0 - hash this.tcGlobals - |> pipeToHash (simpleTypeHash this.ty1) + simpleTypeHash this.ty1 |> pipeToHash (simpleTypeHash this.ty2) |> pipeToHash (hash this.canCoerce) @@ -119,14 +118,16 @@ let getOrCreateTypeSubsumptionCache = let options = if compilationMode = CompilationMode.OneOff then { CacheOptions.Default with - PercentageToEvict = 0 - EvictionMethod = EvictionMethod.NoEviction } + PercentageToEvict = 5 + Strategy = CachingStrategy.LRU + MaximumCapacity = 8192 + EvictionMethod = EvictionMethod.Background } else { CacheOptions.Default with EvictionMethod = EvictionMethod.Background Strategy = CachingStrategy.LRU PercentageToEvict = 5 - MaximumCapacity = 100_000 } + MaximumCapacity = 8192 } cache <- Some (Cache.Create(options)) cache.Value diff --git a/src/Compiler/Checking/import.fsi b/src/Compiler/Checking/import.fsi index 043692ac41c..4ce9b25f755 100644 --- a/src/Compiler/Checking/import.fsi +++ b/src/Compiler/Checking/import.fsi @@ -45,15 +45,15 @@ type CanCoerce = [] type TTypeCacheKey = interface System.IEquatable - private new: ty1: TType * ty2: TType * canCoerce: CanCoerce * tcGlobals: TcGlobals -> TTypeCacheKey + private new: ty1: TType * ty2: TType * canCoerce: CanCoerce -> TTypeCacheKey static member FromStrippedTypes: - ty1: TType * ty2: TType * canCoerce: CanCoerce * tcGlobals: TcGlobals -> TTypeCacheKey + ty1: TType * ty2: TType * canCoerce: CanCoerce -> TTypeCacheKey val ty1: TType val ty2: TType val canCoerce: CanCoerce - val tcGlobals: TcGlobals + //val tcGlobals: TcGlobals override GetHashCode: unit -> int /// Represents a context used for converting AbstractIL .NET and provided types to F# internal compiler data structures. diff --git a/src/Compiler/Utilities/TypeHashing.fs b/src/Compiler/Utilities/TypeHashing.fs index 37c59b48207..e1e3a927617 100644 --- a/src/Compiler/Utilities/TypeHashing.fs +++ b/src/Compiler/Utilities/TypeHashing.fs @@ -126,12 +126,12 @@ module HashAccessibility = module rec HashTypes = open Microsoft.FSharp.Core.LanguagePrimitives - let rec stampEquals g ty1 ty2 = - match (stripTyEqns g ty1), (stripTyEqns g ty2) with + let rec stampEquals ty1 ty2 = + match ty1, ty2 with | TType_app(tcref1, tinst1, _), TType_app(tcref2, tinst2, _) -> tcref1.Stamp = tcref2.Stamp && tinst1.Length = tinst2.Length - && tinst1 |> List.zip tinst2 |> List.forall (fun (t1, t2) -> stampEquals g t1 t2) + && tinst1 |> List.zip tinst2 |> List.forall (fun (t1, t2) -> stampEquals t1 t2) | TType_var(r1, _), TType_var(r2, _) -> r1.Stamp.Equals(r2.Stamp) | _ -> false From 520c7707be933bafa4a699fcaa15e6005037f25a Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Fri, 18 Apr 2025 18:11:48 +0200 Subject: [PATCH 31/44] wip --- src/Compiler/Checking/TypeRelations.fs | 8 ++-- src/Compiler/Checking/import.fs | 45 ++++++++----------- src/Compiler/Utilities/Caches.fs | 20 +++++---- .../src/FSharp.Editor/Common/Logging.fs | 2 +- 4 files changed, 35 insertions(+), 40 deletions(-) diff --git a/src/Compiler/Checking/TypeRelations.fs b/src/Compiler/Checking/TypeRelations.fs index 5a183005030..7cfcb4900a5 100644 --- a/src/Compiler/Checking/TypeRelations.fs +++ b/src/Compiler/Checking/TypeRelations.fs @@ -101,8 +101,8 @@ let TypesFeasiblyEquiv ndeep g amap m ty1 ty2 = let TypesFeasiblyEquivStripMeasures g amap m ty1 ty2 = TypesFeasiblyEquivalent true 0 g amap m ty1 ty2 -let inline TryGetCachedTypeSubsumption (_g: TcGlobals) (amap: ImportMap) key = - if true (* g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache *) then +let inline TryGetCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key = + if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then match amap.TypeSubsumptionCache.TryGetValue(key) with | true, subsumes -> ValueSome subsumes @@ -111,8 +111,8 @@ let inline TryGetCachedTypeSubsumption (_g: TcGlobals) (amap: ImportMap) key = else ValueNone -let inline UpdateCachedTypeSubsumption (_g: TcGlobals) (amap: ImportMap) key subsumes : unit = - if true (* g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache *) then +let inline UpdateCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key subsumes : unit = + if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then amap.TypeSubsumptionCache.AddOrUpdate(key, subsumes) [] diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 79849458ba2..478da38998e 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -91,7 +91,7 @@ type TTypeCacheKey = // This hash must be stable during compilation, otherwise we won't be able to find the keys in the cache. let rec simpleTypeHash ty = match ty with - | TType_ucase (u, tinst) -> tinst |> hashListOrderMatters (simpleTypeHash) |> pipeToHash (hash u.CaseName) + | TType_ucase (_, tinst) -> tinst |> hashListOrderMatters (simpleTypeHash) // |> pipeToHash (hash u.CaseName) | TType_app(tcref, tinst, _) -> tinst |> hashListOrderMatters (simpleTypeHash) |> pipeToHash (hash tcref.Stamp) | TType_anon(info, tys) -> tys |> hashListOrderMatters (simpleTypeHash) |> pipeToHash (hash info.Stamp) | TType_tuple(_ , tys) -> tys |> hashListOrderMatters (simpleTypeHash) @@ -106,30 +106,23 @@ type TTypeCacheKey = override this.ToString () = $"{this.ty1.DebugText}-{this.ty2.DebugText}" -let getOrCreateTypeSubsumptionCache = - let mutable lockObj = obj() - let mutable cache = None - - fun compilationMode -> - lock lockObj <| fun () -> - match cache with - | Some c -> c - | _ -> - let options = - if compilationMode = CompilationMode.OneOff then - { CacheOptions.Default with - PercentageToEvict = 5 - Strategy = CachingStrategy.LRU - MaximumCapacity = 8192 - EvictionMethod = EvictionMethod.Background } - else - { CacheOptions.Default with - EvictionMethod = EvictionMethod.Background - Strategy = CachingStrategy.LRU - PercentageToEvict = 5 - MaximumCapacity = 8192 } - cache <- Some (Cache.Create(options)) - cache.Value +let createTypeSubsumptionCache (g: TcGlobals) = + let options = + if g.compilationMode = CompilationMode.OneOff then + { CacheOptions.Default with + PercentageToEvict = 5 + Strategy = CachingStrategy.LRU + MaximumCapacity = 8192 + EvictionMethod = EvictionMethod.Background } + else + { CacheOptions.Default with + EvictionMethod = EvictionMethod.Background + Strategy = CachingStrategy.LRU + PercentageToEvict = 5 + MaximumCapacity = 8192 } + Cache.Create(options) + +let typeSubsumptionCaches = ConditionalWeakTable>() //------------------------------------------------------------------------- // Import an IL types as F# types. @@ -153,7 +146,7 @@ type ImportMap(g: TcGlobals, assemblyLoader: AssemblyLoader) = member _.ILTypeRefToTyconRefCache = typeRefToTyconRefCache - member val TypeSubsumptionCache = getOrCreateTypeSubsumptionCache g.compilationMode + member _.TypeSubsumptionCache = typeSubsumptionCaches.GetValue(g, createTypeSubsumptionCache) // getOrCreateTypeSubsumptionCache g.compilationMode let CanImportILScopeRef (env: ImportMap) m scoref = diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 50fbde44db6..595890630ed 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -72,15 +72,12 @@ type internal CachedEntity<'Key, 'Value> = type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy) = let list = LinkedList>() - let pool = Queue>() + let pool = ConcurrentBag>() member _.Acquire(key, value) = - lock pool - <| fun () -> - if pool.Count > 0 then - pool.Dequeue().ReUse(key, value) - else - CachedEntity.Create<_, _>(key, value) + match pool.TryTake() with + | true , entity -> entity.ReUse(key, value) + | _ -> CachedEntity.Create<_, _>(key, value) member _.Add(entity: CachedEntity<'Key, 'Value>, strategy) = lock list @@ -130,7 +127,7 @@ type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy) = member _.Remove(entity: CachedEntity<_, _>) = lock list <| fun () -> list.Remove(entity.Node) // Return to the pool for reuse. - lock pool <| fun () -> pool.Enqueue(entity) + pool.Add(entity) member _.Count = list.Count @@ -303,6 +300,11 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> new Cache<'Key, 'Value>(options, capacity, cts) interface IDisposable with - member _.Dispose() = cts.Cancel() + member this.Dispose() = + cts.Cancel() + GC.SuppressFinalize(this) member this.Dispose() = (this :> IDisposable).Dispose() + + override this.Finalize (): unit = + this.Dispose() diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index 0ddca9ed070..2b7c09e8c77 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -172,7 +172,7 @@ module FSharpServiceTelemetry = listener.RecordObservableInstruments() if instruments.Count > 0 then - [ for kvp in instruments -> $"{kvp.Key}: {kvp.Value}" ] + [ for kvp in instruments do if kvp.Value > 0L then $"{kvp.Key}: {kvp.Value}" ] |> String.concat ", " |> msg.Trigger } From f01ac19e7963440facca8d1d7c26f4fd1ee544ee Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Fri, 18 Apr 2025 22:46:23 +0200 Subject: [PATCH 32/44] hit ratio --- src/Compiler/Checking/import.fs | 9 +- src/Compiler/Utilities/Caches.fs | 125 +++++++++--------- .../src/FSharp.Editor/Common/Logging.fs | 42 +++--- .../LanguageService/LanguageService.fs | 2 +- 4 files changed, 88 insertions(+), 90 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 478da38998e..201458b48c7 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -110,17 +110,16 @@ let createTypeSubsumptionCache (g: TcGlobals) = let options = if g.compilationMode = CompilationMode.OneOff then { CacheOptions.Default with - PercentageToEvict = 5 - Strategy = CachingStrategy.LRU - MaximumCapacity = 8192 - EvictionMethod = EvictionMethod.Background } + PercentageToEvict = 0 + MaximumCapacity = 100_000 + EvictionMethod = EvictionMethod.NoEviction } else { CacheOptions.Default with EvictionMethod = EvictionMethod.Background Strategy = CachingStrategy.LRU PercentageToEvict = 5 MaximumCapacity = 8192 } - Cache.Create(options) + Cache.Create(options) let typeSubsumptionCaches = ConditionalWeakTable>() diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 595890630ed..240cf24ac9f 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -131,57 +131,10 @@ type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy) = member _.Count = list.Count -module internal CacheMetrics = - - let mutable cacheId = 0 - - let addInstrumentation (store: ConcurrentDictionary<_, CachedEntity<_, _>>) = - let meter = new Meter("FSharp.Compiler.Caches") - let uid = Interlocked.Increment &cacheId - - let _orZero f = - fun () -> - let vs = store.Values - if vs |> Seq.isEmpty then 0L else f vs - - let _ = meter.CreateObservableGauge($"cache{uid}", (fun () -> int64 store.Count)) - - //let _ = - // meter.CreateObservableGauge($"MFA{uid}", orZero (Seq.map _.AccessCount >> Seq.max)) - - //let _ = - // meter.CreateObservableGauge($"LFA{uid}", orZero (Seq.map _.AccessCount >> Seq.min)) - - let mutable evictions = 0L - let mutable fails = 0L - let mutable hits = 0L - let mutable misses = 0L - - fun eviction hit miss evictionFail -> - - eviction |> Event.add (fun _ -> Interlocked.Increment &evictions |> ignore) - evictionFail |> Event.add (fun _ -> Interlocked.Increment &fails |> ignore) - hit |> Event.add (fun _ -> Interlocked.Increment &hits |> ignore) - miss |> Event.add (fun _ -> Interlocked.Increment &misses |> ignore) - - let _ = - meter.CreateObservableGauge($"evicted{uid}", fun () -> Interlocked.Exchange(&evictions, 0L)) - - let _ = - meter.CreateObservableGauge($"fails{uid}", fun () -> Interlocked.Exchange(&fails, 0L)) - - let _ = - meter.CreateObservableGauge($"hits{uid}", fun () -> Interlocked.Exchange(&hits, 0L)) - - let _ = - meter.CreateObservableGauge($"misses{uid}", fun () -> Interlocked.Exchange(&misses, 0L)) - - () - [] [] type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> - private (options: CacheOptions, capacity, cts: CancellationTokenSource) = + internal (options: CacheOptions, capacity, cts: CancellationTokenSource) = let cacheHit = Event<_ * _>() let cacheMiss = Event<_>() @@ -229,8 +182,6 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> if options.EvictionMethod = EvictionMethod.Background then Async.Start(backgroundEviction (), cancellationToken = cts.Token) - do CacheMetrics.addInstrumentation store eviction.Publish cacheHit.Publish cacheMiss.Publish evictionFail.Publish - let tryEvict () = if options.EvictionMethod.IsBlocking then tryEvictItems () @@ -290,15 +241,6 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> [] member val EvictionFail = evictionFail.Publish - static member Create(options: CacheOptions) = - // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. - let capacity = - options.MaximumCapacity - + (options.MaximumCapacity * options.PercentageToEvict / 100) - - let cts = new CancellationTokenSource() - new Cache<'Key, 'Value>(options, capacity, cts) - interface IDisposable with member this.Dispose() = cts.Cancel() @@ -308,3 +250,68 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> override this.Finalize (): unit = this.Dispose() + + +module internal CacheMetrics = + + let mutable cacheId = 0 + + [] + let cachesMetricsName = "FSharp.Compiler.Caches" + + let addInstrumentation (cache: Cache<_, _>) = + let meter = new Meter(cachesMetricsName) + let cacheId = Interlocked.Increment &cacheId + + let mutable evictions = 0L + let mutable fails = 0L + let mutable hits = 0L + let mutable misses = 0L + + let mutable allEvictions = 0L + let mutable allFails = 0L + let mutable allHits = 0L + let mutable allMisses = 0L + + cache.CacheHit |> Event.add (fun _ -> + Interlocked.Increment &hits |> ignore + Interlocked.Increment &allHits |> ignore + ) + + cache.CacheMiss |> Event.add (fun _ -> + Interlocked.Increment &misses |> ignore + Interlocked.Increment &allMisses |> ignore + ) + + cache.Eviction |> Event.add (fun _ -> + Interlocked.Increment &evictions |> ignore + Interlocked.Increment &allEvictions |> ignore + ) + + cache.EvictionFail |> Event.add (fun _ -> + Interlocked.Increment &fails |> ignore + Interlocked.Increment &allFails |> ignore + ) + + + let hitRatio () = + let misses = Interlocked.Exchange(&misses, 0L) + let hits = Interlocked.Exchange(&hits, 0L) + float hits / float (hits + misses) + + meter.CreateObservableGauge($"hit ratio {cacheId}", hitRatio) |> ignore + +module internal Cache = + let Create<'Key, 'Value when 'Key: not null and 'Key: equality>(options: CacheOptions) = + // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. + let capacity = + options.MaximumCapacity + + (options.MaximumCapacity * options.PercentageToEvict / 100) + + let cts = new CancellationTokenSource() + let cache = new Cache<'Key, 'Value>(options, capacity, cts) + #if DEBUG + CacheMetrics.addInstrumentation cache + #endif + cache + diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index 2b7c09e8c77..a3ee7648bf3 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -31,6 +31,7 @@ module Config = open Config open System.Diagnostics.Metrics +open System.Text [] type Logger [] ([)>] serviceProvider: IServiceProvider) = @@ -148,43 +149,34 @@ module FSharpServiceTelemetry = ActivitySource.AddActivityListener(listener) +#if DEBUG let logCacheMetricsToOutput () = - let instruments = Collections.Generic.Dictionary() - let listener = new MeterListener( InstrumentPublished = fun instrument l -> if instrument.Meter.Name = "FSharp.Compiler.Caches" then - instruments[instrument.Name] <- 0L l.EnableMeasurementEvents(instrument) ) - - let callBack = MeasurementCallback(fun instr v _ _ -> instruments[instr.Name] <- v) + let measurements = Collections.Generic.Dictionary<_, _>() + let changed = ResizeArray() + let callBack = MeasurementCallback(fun i v _ _ -> + let v = if Double.IsNaN v then "-" else $"%.1f{v * 100.}%%" + if measurements.ContainsKey(i.Name) && measurements[i.Name] = v then () + else + measurements[i.Name] <- v + changed.Add i.Name) listener.SetMeasurementEventCallback callBack listener.Start() - let msg = Event() - - backgroundTask { - while true do - do! System.Threading.Tasks.Task.Delay(1000) - listener.RecordObservableInstruments() + let timer = new System.Timers.Timer(1000.0, AutoReset = true) + timer.Elapsed.Add (fun _ -> + changed.Clear() + listener.RecordObservableInstruments() + let msg = seq { for k in changed -> $"{k}: {measurements[k]}" } |> String.concat ", " + if msg <> "" then logMsg msg) + timer.Start() - if instruments.Count > 0 then - [ for kvp in instruments do if kvp.Value > 0L then $"{kvp.Key}: {kvp.Value}" ] - |> String.concat ", " - |> msg.Trigger - } - |> ignore - - msg.Publish - |> Event.pairwise - |> Event.filter (fun (x, y) -> x <> y) - |> Event.map snd - |> Event.add logMsg - -#if DEBUG open OpenTelemetry.Resources open OpenTelemetry.Trace open OpenTelemetry.Metrics diff --git a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs index 0c75a92552e..12d534b4b8e 100644 --- a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs +++ b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs @@ -343,9 +343,9 @@ type internal FSharpPackage() as this = // FSI-LINKAGE-POINT: unsited init do FSharp.Interactive.Hooks.fsiConsoleWindowPackageCtorUnsited (this :> Package) +#if DEBUG do Logging.FSharpServiceTelemetry.logCacheMetricsToOutput () -#if DEBUG let flushTelemetry = Logging.FSharpServiceTelemetry.export () override this.Dispose(disposing: bool) = From 43cbdceecdc509d9b4df98cf9b812badfa4e4137 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Sat, 19 Apr 2025 22:39:37 +0200 Subject: [PATCH 33/44] wip --- src/Compiler/Checking/import.fs | 11 ++++--- src/Compiler/Facilities/LanguageFeatures.fs | 4 ++- src/Compiler/Utilities/Caches.fs | 32 +++++++++---------- .../src/FSharp.Editor/Common/Logging.fs | 9 +++++- 4 files changed, 32 insertions(+), 24 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 201458b48c7..7e5f08e01ce 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -91,13 +91,13 @@ type TTypeCacheKey = // This hash must be stable during compilation, otherwise we won't be able to find the keys in the cache. let rec simpleTypeHash ty = match ty with - | TType_ucase (_, tinst) -> tinst |> hashListOrderMatters (simpleTypeHash) // |> pipeToHash (hash u.CaseName) + | TType_ucase (_, tinst) -> tinst |> hashListOrderMatters (simpleTypeHash) | TType_app(tcref, tinst, _) -> tinst |> hashListOrderMatters (simpleTypeHash) |> pipeToHash (hash tcref.Stamp) | TType_anon(info, tys) -> tys |> hashListOrderMatters (simpleTypeHash) |> pipeToHash (hash info.Stamp) | TType_tuple(_ , tys) -> tys |> hashListOrderMatters (simpleTypeHash) | TType_forall(tps, tau) -> tps |> Seq.map _.Stamp |> hashListOrderMatters (hash) |> pipeToHash (simpleTypeHash tau) | TType_fun (d, r, _) -> simpleTypeHash d |> pipeToHash (simpleTypeHash r) - | TType_var _ + | TType_var (r, _) -> hash r.Stamp | TType_measure _ -> 0 simpleTypeHash this.ty1 @@ -110,9 +110,10 @@ let createTypeSubsumptionCache (g: TcGlobals) = let options = if g.compilationMode = CompilationMode.OneOff then { CacheOptions.Default with - PercentageToEvict = 0 - MaximumCapacity = 100_000 - EvictionMethod = EvictionMethod.NoEviction } + EvictionMethod = EvictionMethod.Blocking + Strategy = CachingStrategy.LRU + PercentageToEvict = 5 + MaximumCapacity = 8192 } else { CacheOptions.Default with EvictionMethod = EvictionMethod.Background diff --git a/src/Compiler/Facilities/LanguageFeatures.fs b/src/Compiler/Facilities/LanguageFeatures.fs index 7a9a14b8602..a17c030ae0b 100644 --- a/src/Compiler/Facilities/LanguageFeatures.fs +++ b/src/Compiler/Facilities/LanguageFeatures.fs @@ -220,7 +220,6 @@ type LanguageVersion(versionText) = // F# preview LanguageFeature.EnforceAttributeTargets, previewVersion // Not enabled due to a number of external library dependencies on unenforced attributes - LanguageFeature.UseTypeSubsumptionCache, previewVersion LanguageFeature.UnmanagedConstraintCsharpInterop, previewVersion // not enabled because: https://github.com/dotnet/fsharp/issues/17509 LanguageFeature.FromEndSlicing, previewVersion // Unfinished features --- needs work LanguageFeature.AllowAccessModifiersToAutoPropertiesGettersAndSetters, previewVersion @@ -229,6 +228,9 @@ type LanguageVersion(versionText) = LanguageFeature.DeprecatePlacesWhereSeqCanBeOmitted, previewVersion LanguageFeature.SupportValueOptionsAsOptionalParameters, previewVersion LanguageFeature.WarnWhenUnitPassedToObjArg, previewVersion + + // Just to see if it works. + LanguageFeature.UseTypeSubsumptionCache, languageVersion46 ] static let defaultLanguageVersion = LanguageVersion("default") diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 240cf24ac9f..77818beaca5 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -252,56 +252,54 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> this.Dispose() -module internal CacheMetrics = - +module internal Cache = let mutable cacheId = 0 - - [] - let cachesMetricsName = "FSharp.Compiler.Caches" + [] + let MeterName = "FSharp.Compiler.Caches" + let addInstrumentation (cache: Cache<_, _>) = - let meter = new Meter(cachesMetricsName) + let meter = new Meter(MeterName) let cacheId = Interlocked.Increment &cacheId - + let mutable evictions = 0L let mutable fails = 0L let mutable hits = 0L let mutable misses = 0L - + let mutable allEvictions = 0L let mutable allFails = 0L let mutable allHits = 0L let mutable allMisses = 0L - + cache.CacheHit |> Event.add (fun _ -> Interlocked.Increment &hits |> ignore Interlocked.Increment &allHits |> ignore ) - + cache.CacheMiss |> Event.add (fun _ -> Interlocked.Increment &misses |> ignore Interlocked.Increment &allMisses |> ignore ) - + cache.Eviction |> Event.add (fun _ -> Interlocked.Increment &evictions |> ignore Interlocked.Increment &allEvictions |> ignore ) - + cache.EvictionFail |> Event.add (fun _ -> Interlocked.Increment &fails |> ignore Interlocked.Increment &allFails |> ignore ) - - + + let hitRatio () = let misses = Interlocked.Exchange(&misses, 0L) let hits = Interlocked.Exchange(&hits, 0L) float hits / float (hits + misses) - + meter.CreateObservableGauge($"hit ratio {cacheId}", hitRatio) |> ignore -module internal Cache = let Create<'Key, 'Value when 'Key: not null and 'Key: equality>(options: CacheOptions) = // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. let capacity = @@ -311,7 +309,7 @@ module internal Cache = let cts = new CancellationTokenSource() let cache = new Cache<'Key, 'Value>(options, capacity, cts) #if DEBUG - CacheMetrics.addInstrumentation cache + addInstrumentation cache #endif cache diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index a3ee7648bf3..051fc17fdc5 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -184,7 +184,14 @@ module FSharpServiceTelemetry = let export () = let meterProvider = // Configure OpenTelemetry metrics. Metrics can be viewed in Prometheus or other compatible tools. - OpenTelemetry.Sdk.CreateMeterProviderBuilder().AddOtlpExporter().Build() + OpenTelemetry.Sdk.CreateMeterProviderBuilder() + .ConfigureResource(fun r -> r.AddService("F#") |> ignore) + .AddMeter(FSharp.Compiler.Cache.MeterName) + .AddOtlpExporter(fun _e m -> + m.PeriodicExportingMetricReaderOptions.ExportIntervalMilliseconds <- 1000 + m.TemporalityPreference <- MetricReaderTemporalityPreference.Cumulative + ) + .Build() let tracerProvider = // Configure OpenTelemetry export. Traces can be viewed in Jaeger or other compatible tools. From 8677d4f246882bbbf9979133a481b6c93a9f80af Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Tue, 22 Apr 2025 01:16:46 +0200 Subject: [PATCH 34/44] wip --- src/Compiler/Checking/import.fs | 57 ++++--- src/Compiler/Facilities/LanguageFeatures.fs | 4 +- src/Compiler/Utilities/Caches.fs | 166 +++++++++++--------- src/Compiler/Utilities/TypeHashing.fs | 20 --- 4 files changed, 126 insertions(+), 121 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 7e5f08e01ce..1fafe1b958b 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -56,6 +56,33 @@ type CanCoerce = | CanCoerce | NoCoerce +module StampHashing = + let rec stampEquals ty1 ty2 = + match ty1, ty2 with + | TType_app(tcref1, tinst1, _), TType_app(tcref2, tinst2, _) -> + tcref1.Stamp = tcref2.Stamp + && tinst1.Length = tinst2.Length + && (tinst1, tinst2) ||> Seq.zip |> Seq.forall (fun (t1, t2) -> stampEquals t1 t2) + + | TType_var(r1, _), TType_var(r2, _) -> r1.Stamp = r2.Stamp + | _ -> false + + let inline hashStamp (x: int64) = + uint x * 2654435761u |> int + + // The idea is to keep the illusion of immutability of TType. + // This hash must be stable during compilation, otherwise we won't be able to find keys or evict from the cache. + let rec simpleTypeHash ty = + match ty with + | TType_ucase (_, tinst) -> tinst |> hashListOrderMatters (simpleTypeHash) + | TType_app(tcref, tinst, _) -> tinst |> hashListOrderMatters (simpleTypeHash) |> pipeToHash (hashStamp tcref.Stamp) + | TType_anon(info, tys) -> tys |> hashListOrderMatters (simpleTypeHash) |> pipeToHash (hashStamp info.Stamp) + | TType_tuple(_ , tys) -> tys |> hashListOrderMatters (simpleTypeHash) + | TType_forall(tps, tau) -> tps |> Seq.map _.Stamp |> hashListOrderMatters (hashStamp) |> pipeToHash (simpleTypeHash tau) + | TType_fun (d, r, _) -> simpleTypeHash d |> pipeToHash (simpleTypeHash r) + | TType_var (r, _) -> hashStamp r.Stamp + | TType_measure _ -> 0 + [] type TTypeCacheKey = @@ -77,8 +104,8 @@ type TTypeCacheKey = elif this.ty1 === other.ty1 && this.ty2 === other.ty2 then true else - stampEquals this.ty1 other.ty1 - && stampEquals this.ty2 other.ty2 + StampHashing.stampEquals this.ty1 other.ty1 + && StampHashing.stampEquals this.ty2 other.ty2 override this.Equals(other:objnull) = match other with @@ -86,22 +113,8 @@ type TTypeCacheKey = | _ -> false override this.GetHashCode() : int = - // TODO: we need reasonable uniformity - // The idea is to keep the illusion of immutability of TType. - // This hash must be stable during compilation, otherwise we won't be able to find the keys in the cache. - let rec simpleTypeHash ty = - match ty with - | TType_ucase (_, tinst) -> tinst |> hashListOrderMatters (simpleTypeHash) - | TType_app(tcref, tinst, _) -> tinst |> hashListOrderMatters (simpleTypeHash) |> pipeToHash (hash tcref.Stamp) - | TType_anon(info, tys) -> tys |> hashListOrderMatters (simpleTypeHash) |> pipeToHash (hash info.Stamp) - | TType_tuple(_ , tys) -> tys |> hashListOrderMatters (simpleTypeHash) - | TType_forall(tps, tau) -> tps |> Seq.map _.Stamp |> hashListOrderMatters (hash) |> pipeToHash (simpleTypeHash tau) - | TType_fun (d, r, _) -> simpleTypeHash d |> pipeToHash (simpleTypeHash r) - | TType_var (r, _) -> hash r.Stamp - | TType_measure _ -> 0 - - simpleTypeHash this.ty1 - |> pipeToHash (simpleTypeHash this.ty2) + StampHashing.simpleTypeHash this.ty1 + |> pipeToHash (StampHashing.simpleTypeHash this.ty2) |> pipeToHash (hash this.canCoerce) override this.ToString () = $"{this.ty1.DebugText}-{this.ty2.DebugText}" @@ -110,16 +123,14 @@ let createTypeSubsumptionCache (g: TcGlobals) = let options = if g.compilationMode = CompilationMode.OneOff then { CacheOptions.Default with - EvictionMethod = EvictionMethod.Blocking - Strategy = CachingStrategy.LRU - PercentageToEvict = 5 - MaximumCapacity = 8192 } + MaximumCapacity = 8192 + EvictionMethod = EvictionMethod.NoEviction } else { CacheOptions.Default with EvictionMethod = EvictionMethod.Background Strategy = CachingStrategy.LRU PercentageToEvict = 5 - MaximumCapacity = 8192 } + MaximumCapacity = 32_000 } // 8192 } Cache.Create(options) let typeSubsumptionCaches = ConditionalWeakTable>() diff --git a/src/Compiler/Facilities/LanguageFeatures.fs b/src/Compiler/Facilities/LanguageFeatures.fs index a17c030ae0b..7a9a14b8602 100644 --- a/src/Compiler/Facilities/LanguageFeatures.fs +++ b/src/Compiler/Facilities/LanguageFeatures.fs @@ -220,6 +220,7 @@ type LanguageVersion(versionText) = // F# preview LanguageFeature.EnforceAttributeTargets, previewVersion // Not enabled due to a number of external library dependencies on unenforced attributes + LanguageFeature.UseTypeSubsumptionCache, previewVersion LanguageFeature.UnmanagedConstraintCsharpInterop, previewVersion // not enabled because: https://github.com/dotnet/fsharp/issues/17509 LanguageFeature.FromEndSlicing, previewVersion // Unfinished features --- needs work LanguageFeature.AllowAccessModifiersToAutoPropertiesGettersAndSetters, previewVersion @@ -228,9 +229,6 @@ type LanguageVersion(versionText) = LanguageFeature.DeprecatePlacesWhereSeqCanBeOmitted, previewVersion LanguageFeature.SupportValueOptionsAsOptionalParameters, previewVersion LanguageFeature.WarnWhenUnitPassedToObjArg, previewVersion - - // Just to see if it works. - LanguageFeature.UseTypeSubsumptionCache, languageVersion46 ] static let defaultLanguageVersion = LanguageVersion("default") diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 77818beaca5..985653ebdfa 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -33,7 +33,7 @@ type internal CacheOptions = static member Default = { - MaximumCapacity = 100 + MaximumCapacity = 1024 PercentageToEvict = 5 Strategy = CachingStrategy.LRU LevelOfConcurrency = Environment.ProcessorCount @@ -48,7 +48,7 @@ type internal CachedEntity<'Key, 'Value> = val mutable AccessCount: int64 val mutable Node: LinkedListNode> - private new(key, value) = + new(key, value) = { Key = key Value = value @@ -56,10 +56,10 @@ type internal CachedEntity<'Key, 'Value> = Node = Unchecked.defaultof<_> } - static member Create(key, value) = - let entity = CachedEntity(key, value) - entity.Node <- LinkedListNode(entity) - entity + member this.WithNode() = + if isNull this.Node then + this.Node <- LinkedListNode(this) + this member this.ReUse(key, value) = this.Key <- key @@ -69,68 +69,87 @@ type internal CachedEntity<'Key, 'Value> = override this.ToString() = $"{this.Key}" +type internal IEvictionQueue<'Key, 'Value> = interface + abstract member Acquire : 'Key * 'Value -> CachedEntity<'Key, 'Value> + abstract member Add : CachedEntity<'Key, 'Value> * CachingStrategy -> unit + abstract member Update : CachedEntity<'Key, 'Value> -> unit + abstract member GetKeysToEvict : int -> 'Key[] + abstract member Remove : CachedEntity<'Key, 'Value> -> unit +end + type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy) = let list = LinkedList>() let pool = ConcurrentBag>() - member _.Acquire(key, value) = - match pool.TryTake() with - | true , entity -> entity.ReUse(key, value) - | _ -> CachedEntity.Create<_, _>(key, value) - - member _.Add(entity: CachedEntity<'Key, 'Value>, strategy) = - lock list - <| fun () -> - if isNull entity.Node.List then - match strategy with - | CachingStrategy.LRU -> - list.AddLast(entity.Node) - | CachingStrategy.LFU -> - list.AddLast(entity.Node) - // list.AddFirst(entity.Node) - - member _.Update(entity: CachedEntity<'Key, 'Value>) = - lock list - <| fun () -> - entity.AccessCount <- entity.AccessCount + 1L - - let node = entity.Node - - // Sync between store and the eviction queue is not atomic. It might be already evicted or not yet added. - if node.List = list then - - match strategy with - | CachingStrategy.LRU -> - // Just move this node to the end of the list. - list.Remove(node) - list.AddLast(node) - | CachingStrategy.LFU -> - // Bubble up the node in the list, linear time. - // TODO: frequency list approach would be faster. - let rec bubbleUp (current: LinkedListNode>) = - if isNotNull current.Next && current.Next.Value.AccessCount < entity.AccessCount then - bubbleUp current.Next - else - current - - let next = bubbleUp node - - if next <> node then + interface IEvictionQueue<'Key, 'Value> with + + member _.Acquire(key, value) = + match pool.TryTake() with + | true , entity -> entity.ReUse(key, value) + | _ -> + CachedEntity(key, value).WithNode() + + member _.Add(entity: CachedEntity<'Key, 'Value>, strategy) = + lock list + <| fun () -> + if isNull entity.Node.List then + match strategy with + | CachingStrategy.LRU -> + list.AddLast(entity.Node) + | CachingStrategy.LFU -> + list.AddLast(entity.Node) + // list.AddFirst(entity.Node) + + member _.Update(entity: CachedEntity<'Key, 'Value>) = + lock list + <| fun () -> + Interlocked.Increment(&entity.AccessCount) |> ignore + + let node = entity.Node + + // Sync between store and the eviction queue is not atomic. It might be already evicted or not yet added. + if node.List = list then + + match strategy with + | CachingStrategy.LRU -> + // Just move this node to the end of the list. list.Remove(node) - list.AddAfter(next, node) - - member _.GetKeysToEvict(count) = - lock list - <| fun () -> list |> Seq.map _.Key |> Seq.truncate count |> Seq.toArray - - member _.Remove(entity: CachedEntity<_, _>) = - lock list <| fun () -> list.Remove(entity.Node) - // Return to the pool for reuse. - pool.Add(entity) + list.AddLast(node) + | CachingStrategy.LFU -> + // Bubble up the node in the list, linear time. + // TODO: frequency list approach would be faster. + let rec bubbleUp (current: LinkedListNode>) = + if isNotNull current.Next && current.Next.Value.AccessCount < entity.AccessCount then + bubbleUp current.Next + else + current + + let next = bubbleUp node + + if next <> node then + list.Remove(node) + list.AddAfter(next, node) + + member _.GetKeysToEvict(count) = + lock list + <| fun () -> list |> Seq.map _.Key |> Seq.truncate count |> Seq.toArray + + member this.Remove(entity: CachedEntity<_, _>) = + lock list <| fun () -> list.Remove(entity.Node) + // Return to the pool for reuse. + pool.Add(entity) member _.Count = list.Count + static member NoEviction = + { new IEvictionQueue<'Key, 'Value> with + member _.Acquire(key, value) = CachedEntity(key, value) + member _.Add(_, _) = () + member _.Update(entity) = Interlocked.Increment(&entity.AccessCount) |> ignore + member _.GetKeysToEvict(_) = [||] + member _.Remove(_) = () } + [] [] type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> @@ -144,7 +163,10 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> let store = ConcurrentDictionary<'Key, CachedEntity<'Key, 'Value>>(options.LevelOfConcurrency, capacity) - let evictionQueue = EvictionQueue<'Key, 'Value>(options.Strategy) + let evictionQueue : IEvictionQueue<'Key, 'Value> = + match options.EvictionMethod with + | EvictionMethod.NoEviction -> EvictionQueue.NoEviction + | _ -> EvictionQueue(options.Strategy) let tryEvictItems () = let count = @@ -158,7 +180,9 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> | true, removed -> evictionQueue.Remove(removed) eviction.Trigger(key) - | _ -> evictionFail.Trigger(key) + | _ -> + failwith "eviction fail" + evictionFail.Trigger(key) let rec backgroundEviction () = async { @@ -182,21 +206,11 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> if options.EvictionMethod = EvictionMethod.Background then Async.Start(backgroundEviction (), cancellationToken = cts.Token) - let tryEvict () = - if options.EvictionMethod.IsBlocking then - tryEvictItems () - - let tryGet (key: 'Key) = + member _.TryGetValue(key: 'Key, value: outref<'Value>) = match store.TryGetValue(key) with | true, cachedEntity -> - evictionQueue.Update(cachedEntity) - Some cachedEntity - | _ -> None - - member _.TryGetValue(key: 'Key, value: outref<'Value>) = - match tryGet key with - | Some cachedEntity -> cacheHit.Trigger(key, cachedEntity.Value) + evictionQueue.Update(cachedEntity) value <- cachedEntity.Value true | _ -> @@ -205,7 +219,8 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> false member _.TryAdd(key: 'Key, value: 'Value) = - tryEvict () + if options.EvictionMethod.IsBlocking then + tryEvictItems () let cachedEntity = evictionQueue.Acquire(key, value) @@ -216,7 +231,8 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> false member _.AddOrUpdate(key: 'Key, value: 'Value) = - tryEvict () + if options.EvictionMethod.IsBlocking then + tryEvictItems () let entity = store.AddOrUpdate( diff --git a/src/Compiler/Utilities/TypeHashing.fs b/src/Compiler/Utilities/TypeHashing.fs index e1e3a927617..cd1d0ac9b95 100644 --- a/src/Compiler/Utilities/TypeHashing.fs +++ b/src/Compiler/Utilities/TypeHashing.fs @@ -126,26 +126,6 @@ module HashAccessibility = module rec HashTypes = open Microsoft.FSharp.Core.LanguagePrimitives - let rec stampEquals ty1 ty2 = - match ty1, ty2 with - | TType_app(tcref1, tinst1, _), TType_app(tcref2, tinst2, _) -> - tcref1.Stamp = tcref2.Stamp - && tinst1.Length = tinst2.Length - && tinst1 |> List.zip tinst2 |> List.forall (fun (t1, t2) -> stampEquals t1 t2) - - | TType_var(r1, _), TType_var(r2, _) -> r1.Stamp.Equals(r2.Stamp) - | _ -> false - - /// Get has for Stamp for TType_app tyconref and TType_var typar - let hashStamp g ty = - let v: Stamp = - match (stripTyEqns g ty) with - | TType_app(tcref, _, _) -> tcref.Stamp - | TType_var(r, _) -> r.Stamp - | _ -> GenericZero - - hash v - /// Hash a reference to a type let hashTyconRef tcref = hashTyconRefImpl tcref From 245460f33148721cb8243053188b605bca578ba7 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Tue, 22 Apr 2025 16:26:33 +0200 Subject: [PATCH 35/44] some cleanup --- src/Compiler/Checking/TypeRelations.fs | 20 ++-- src/Compiler/Checking/import.fs | 36 +------ src/Compiler/Checking/import.fsi | 3 +- src/Compiler/Utilities/Caches.fs | 97 +++++++++---------- src/Compiler/Utilities/TypeHashing.fs | 30 ++++++ .../src/FSharp.Editor/Common/Logging.fs | 36 ++++--- 6 files changed, 115 insertions(+), 107 deletions(-) diff --git a/src/Compiler/Checking/TypeRelations.fs b/src/Compiler/Checking/TypeRelations.fs index 7cfcb4900a5..fa900069508 100644 --- a/src/Compiler/Checking/TypeRelations.fs +++ b/src/Compiler/Checking/TypeRelations.fs @@ -115,9 +115,6 @@ let inline UpdateCachedTypeSubsumption (g: TcGlobals) (amap: ImportMap) key subs if g.langVersion.SupportsFeature LanguageFeature.UseTypeSubsumptionCache then amap.TypeSubsumptionCache.AddOrUpdate(key, subsumes) -[] -type ResultWorthCaching = Yes | No - /// The feasible coercion relation. Part of the language spec. let rec TypeFeasiblySubsumesType ndeep (g: TcGlobals) (amap: ImportMap) m (ty1: TType) (canCoerce: CanCoerce) (ty2: TType) = @@ -134,33 +131,32 @@ let rec TypeFeasiblySubsumesType ndeep (g: TcGlobals) (amap: ImportMap) m (ty1: | ValueSome subsumes -> subsumes | ValueNone -> - let subsumes, worthCaching = + let subsumes = match ty1, ty2 with | TType_measure _, TType_measure _ | TType_var _, _ | _, TType_var _ -> - true, ResultWorthCaching.No + true | TType_app (tc1, l1, _), TType_app (tc2, l2, _) when tyconRefEq g tc1 tc2 -> - List.lengthsEqAndForall2 (TypesFeasiblyEquiv ndeep g amap m) l1 l2, ResultWorthCaching.Yes + List.lengthsEqAndForall2 (TypesFeasiblyEquiv ndeep g amap m) l1 l2 | TType_tuple _, TType_tuple _ | TType_anon _, TType_anon _ | TType_fun _, TType_fun _ -> - TypesFeasiblyEquiv ndeep g amap m ty1 ty2, ResultWorthCaching.Yes + TypesFeasiblyEquiv ndeep g amap m ty1 ty2 | _ -> // F# reference types are subtypes of type 'obj' if isObjTyAnyNullness g ty1 && (canCoerce = CanCoerce || isRefTy g ty2) then - true, ResultWorthCaching.No + true elif isAppTy g ty2 && (canCoerce = CanCoerce || isRefTy g ty2) && TypeFeasiblySubsumesTypeWithSupertypeCheck g amap m ndeep ty1 ty2 then - true, ResultWorthCaching.Yes + true else let interfaces = GetImmediateInterfacesOfType SkipUnrefInterfaces.Yes g amap m ty2 // See if any interface in type hierarchy of ty2 is a supertype of ty1 - List.exists (TypeFeasiblySubsumesType (ndeep + 1) g amap m ty1 NoCoerce) interfaces, ResultWorthCaching.Yes + List.exists (TypeFeasiblySubsumesType (ndeep + 1) g amap m ty1 NoCoerce) interfaces - if worthCaching = ResultWorthCaching.Yes then - UpdateCachedTypeSubsumption g amap key subsumes + UpdateCachedTypeSubsumption g amap key subsumes subsumes diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 1fafe1b958b..d17dd466731 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -56,40 +56,12 @@ type CanCoerce = | CanCoerce | NoCoerce -module StampHashing = - let rec stampEquals ty1 ty2 = - match ty1, ty2 with - | TType_app(tcref1, tinst1, _), TType_app(tcref2, tinst2, _) -> - tcref1.Stamp = tcref2.Stamp - && tinst1.Length = tinst2.Length - && (tinst1, tinst2) ||> Seq.zip |> Seq.forall (fun (t1, t2) -> stampEquals t1 t2) - - | TType_var(r1, _), TType_var(r2, _) -> r1.Stamp = r2.Stamp - | _ -> false - - let inline hashStamp (x: int64) = - uint x * 2654435761u |> int - - // The idea is to keep the illusion of immutability of TType. - // This hash must be stable during compilation, otherwise we won't be able to find keys or evict from the cache. - let rec simpleTypeHash ty = - match ty with - | TType_ucase (_, tinst) -> tinst |> hashListOrderMatters (simpleTypeHash) - | TType_app(tcref, tinst, _) -> tinst |> hashListOrderMatters (simpleTypeHash) |> pipeToHash (hashStamp tcref.Stamp) - | TType_anon(info, tys) -> tys |> hashListOrderMatters (simpleTypeHash) |> pipeToHash (hashStamp info.Stamp) - | TType_tuple(_ , tys) -> tys |> hashListOrderMatters (simpleTypeHash) - | TType_forall(tps, tau) -> tps |> Seq.map _.Stamp |> hashListOrderMatters (hashStamp) |> pipeToHash (simpleTypeHash tau) - | TType_fun (d, r, _) -> simpleTypeHash d |> pipeToHash (simpleTypeHash r) - | TType_var (r, _) -> hashStamp r.Stamp - | TType_measure _ -> 0 - [] type TTypeCacheKey = val ty1: TType val ty2: TType val canCoerce: CanCoerce - //val tcGlobals: TcGlobals private new (ty1, ty2, canCoerce) = { ty1 = ty1; ty2 = ty2; canCoerce = canCoerce } @@ -104,8 +76,8 @@ type TTypeCacheKey = elif this.ty1 === other.ty1 && this.ty2 === other.ty2 then true else - StampHashing.stampEquals this.ty1 other.ty1 - && StampHashing.stampEquals this.ty2 other.ty2 + HashStamps.stampEquals this.ty1 other.ty1 + && HashStamps.stampEquals this.ty2 other.ty2 override this.Equals(other:objnull) = match other with @@ -113,8 +85,8 @@ type TTypeCacheKey = | _ -> false override this.GetHashCode() : int = - StampHashing.simpleTypeHash this.ty1 - |> pipeToHash (StampHashing.simpleTypeHash this.ty2) + HashStamps.hashTType this.ty1 + |> pipeToHash (HashStamps.hashTType this.ty2) |> pipeToHash (hash this.canCoerce) override this.ToString () = $"{this.ty1.DebugText}-{this.ty2.DebugText}" diff --git a/src/Compiler/Checking/import.fsi b/src/Compiler/Checking/import.fsi index 4ce9b25f755..0ba2a635ec0 100644 --- a/src/Compiler/Checking/import.fsi +++ b/src/Compiler/Checking/import.fsi @@ -47,8 +47,7 @@ type TTypeCacheKey = interface System.IEquatable private new: ty1: TType * ty2: TType * canCoerce: CanCoerce -> TTypeCacheKey - static member FromStrippedTypes: - ty1: TType * ty2: TType * canCoerce: CanCoerce -> TTypeCacheKey + static member FromStrippedTypes: ty1: TType * ty2: TType * canCoerce: CanCoerce -> TTypeCacheKey val ty1: TType val ty2: TType diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 985653ebdfa..2059fe490f0 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -59,6 +59,7 @@ type internal CachedEntity<'Key, 'Value> = member this.WithNode() = if isNull this.Node then this.Node <- LinkedListNode(this) + this member this.ReUse(key, value) = @@ -69,13 +70,14 @@ type internal CachedEntity<'Key, 'Value> = override this.ToString() = $"{this.Key}" -type internal IEvictionQueue<'Key, 'Value> = interface - abstract member Acquire : 'Key * 'Value -> CachedEntity<'Key, 'Value> - abstract member Add : CachedEntity<'Key, 'Value> * CachingStrategy -> unit - abstract member Update : CachedEntity<'Key, 'Value> -> unit - abstract member GetKeysToEvict : int -> 'Key[] - abstract member Remove : CachedEntity<'Key, 'Value> -> unit -end +type internal IEvictionQueue<'Key, 'Value> = + interface + abstract member Acquire: 'Key * 'Value -> CachedEntity<'Key, 'Value> + abstract member Add: CachedEntity<'Key, 'Value> * CachingStrategy -> unit + abstract member Update: CachedEntity<'Key, 'Value> -> unit + abstract member GetKeysToEvict: int -> 'Key[] + abstract member Remove: CachedEntity<'Key, 'Value> -> unit + end type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy) = @@ -86,20 +88,17 @@ type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy) = member _.Acquire(key, value) = match pool.TryTake() with - | true , entity -> entity.ReUse(key, value) - | _ -> - CachedEntity(key, value).WithNode() + | true, entity -> entity.ReUse(key, value) + | _ -> CachedEntity(key, value).WithNode() member _.Add(entity: CachedEntity<'Key, 'Value>, strategy) = lock list <| fun () -> if isNull entity.Node.List then match strategy with - | CachingStrategy.LRU -> - list.AddLast(entity.Node) - | CachingStrategy.LFU -> - list.AddLast(entity.Node) - // list.AddFirst(entity.Node) + | CachingStrategy.LRU -> list.AddLast(entity.Node) + | CachingStrategy.LFU -> list.AddLast(entity.Node) + // list.AddFirst(entity.Node) member _.Update(entity: CachedEntity<'Key, 'Value>) = lock list @@ -146,9 +145,13 @@ type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy) = { new IEvictionQueue<'Key, 'Value> with member _.Acquire(key, value) = CachedEntity(key, value) member _.Add(_, _) = () - member _.Update(entity) = Interlocked.Increment(&entity.AccessCount) |> ignore + + member _.Update(entity) = + Interlocked.Increment(&entity.AccessCount) |> ignore + member _.GetKeysToEvict(_) = [||] - member _.Remove(_) = () } + member _.Remove(_) = () + } [] [] @@ -163,7 +166,7 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> let store = ConcurrentDictionary<'Key, CachedEntity<'Key, 'Value>>(options.LevelOfConcurrency, capacity) - let evictionQueue : IEvictionQueue<'Key, 'Value> = + let evictionQueue: IEvictionQueue<'Key, 'Value> = match options.EvictionMethod with | EvictionMethod.NoEviction -> EvictionQueue.NoEviction | _ -> EvictionQueue(options.Strategy) @@ -264,59 +267,56 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> member this.Dispose() = (this :> IDisposable).Dispose() - override this.Finalize (): unit = - this.Dispose() + override this.Finalize() : unit = this.Dispose() - -module internal Cache = +module internal Cache = let mutable cacheId = 0 - + [] let MeterName = "FSharp.Compiler.Caches" - + let addInstrumentation (cache: Cache<_, _>) = let meter = new Meter(MeterName) let cacheId = Interlocked.Increment &cacheId - + let mutable evictions = 0L let mutable fails = 0L let mutable hits = 0L let mutable misses = 0L - + let mutable allEvictions = 0L let mutable allFails = 0L let mutable allHits = 0L let mutable allMisses = 0L - - cache.CacheHit |> Event.add (fun _ -> + + cache.CacheHit + |> Event.add (fun _ -> Interlocked.Increment &hits |> ignore - Interlocked.Increment &allHits |> ignore - ) - - cache.CacheMiss |> Event.add (fun _ -> + Interlocked.Increment &allHits |> ignore) + + cache.CacheMiss + |> Event.add (fun _ -> Interlocked.Increment &misses |> ignore - Interlocked.Increment &allMisses |> ignore - ) - - cache.Eviction |> Event.add (fun _ -> + Interlocked.Increment &allMisses |> ignore) + + cache.Eviction + |> Event.add (fun _ -> Interlocked.Increment &evictions |> ignore - Interlocked.Increment &allEvictions |> ignore - ) - - cache.EvictionFail |> Event.add (fun _ -> + Interlocked.Increment &allEvictions |> ignore) + + cache.EvictionFail + |> Event.add (fun _ -> Interlocked.Increment &fails |> ignore - Interlocked.Increment &allFails |> ignore - ) - - + Interlocked.Increment &allFails |> ignore) + let hitRatio () = let misses = Interlocked.Exchange(&misses, 0L) let hits = Interlocked.Exchange(&hits, 0L) float hits / float (hits + misses) - + meter.CreateObservableGauge($"hit ratio {cacheId}", hitRatio) |> ignore - let Create<'Key, 'Value when 'Key: not null and 'Key: equality>(options: CacheOptions) = + let Create<'Key, 'Value when 'Key: not null and 'Key: equality> (options: CacheOptions) = // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. let capacity = options.MaximumCapacity @@ -324,8 +324,7 @@ module internal Cache = let cts = new CancellationTokenSource() let cache = new Cache<'Key, 'Value>(options, capacity, cts) - #if DEBUG +#if DEBUG addInstrumentation cache - #endif +#endif cache - diff --git a/src/Compiler/Utilities/TypeHashing.fs b/src/Compiler/Utilities/TypeHashing.fs index cd1d0ac9b95..7639f2dd679 100644 --- a/src/Compiler/Utilities/TypeHashing.fs +++ b/src/Compiler/Utilities/TypeHashing.fs @@ -328,3 +328,33 @@ module HashTastMemberOrVals = hashNonMemberVal (g, obs) (tps, vref.Deref, tau, cxs) | Some _ -> hashMember (g, obs) emptyTyparInst vref.Deref + +module HashStamps = + let rec stampEquals ty1 ty2 = + match ty1, ty2 with + | TType_app(tcref1, tinst1, _), TType_app(tcref2, tinst2, _) -> + tcref1.Stamp = tcref2.Stamp + && tinst1.Length = tinst2.Length + && (tinst1, tinst2) ||> Seq.zip |> Seq.forall (fun (t1, t2) -> stampEquals t1 t2) + + | TType_var(r1, _), TType_var(r2, _) -> r1.Stamp = r2.Stamp + | _ -> false + + let inline hashStamp (x: int64) = uint x * 2654435761u |> int + + // The idea is to keep the illusion of immutability of TType. + // This hash must be stable during compilation, otherwise we won't be able to find keys or evict from the cache. + let rec hashTType ty = + match ty with + | TType_ucase(_, tinst) -> tinst |> hashListOrderMatters (hashTType) + | TType_app(tcref, tinst, _) -> tinst |> hashListOrderMatters (hashTType) |> pipeToHash (hashStamp tcref.Stamp) + | TType_anon(info, tys) -> tys |> hashListOrderMatters (hashTType) |> pipeToHash (hashStamp info.Stamp) + | TType_tuple(_, tys) -> tys |> hashListOrderMatters (hashTType) + | TType_forall(tps, tau) -> + tps + |> Seq.map _.Stamp + |> hashListOrderMatters (hashStamp) + |> pipeToHash (hashTType tau) + | TType_fun(d, r, _) -> hashTType d |> pipeToHash (hashTType r) + | TType_var(r, _) -> hashStamp r.Stamp + | TType_measure _ -> 0 diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index 051fc17fdc5..875877e212e 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -158,23 +158,35 @@ module FSharpServiceTelemetry = if instrument.Meter.Name = "FSharp.Compiler.Caches" then l.EnableMeasurementEvents(instrument) ) + let measurements = Collections.Generic.Dictionary<_, _>() let changed = ResizeArray() - let callBack = MeasurementCallback(fun i v _ _ -> - let v = if Double.IsNaN v then "-" else $"%.1f{v * 100.}%%" - if measurements.ContainsKey(i.Name) && measurements[i.Name] = v then () - else - measurements[i.Name] <- v - changed.Add i.Name) + + let callBack = + MeasurementCallback(fun i v _ _ -> + let v = if Double.IsNaN v then "-" else $"%.1f{v * 100.}%%" + + if measurements.ContainsKey(i.Name) && measurements[i.Name] = v then + () + else + measurements[i.Name] <- v + changed.Add i.Name) + listener.SetMeasurementEventCallback callBack listener.Start() let timer = new System.Timers.Timer(1000.0, AutoReset = true) - timer.Elapsed.Add (fun _ -> + + timer.Elapsed.Add(fun _ -> changed.Clear() listener.RecordObservableInstruments() - let msg = seq { for k in changed -> $"{k}: {measurements[k]}" } |> String.concat ", " - if msg <> "" then logMsg msg) + + let msg = + seq { for k in changed -> $"{k}: {measurements[k]}" } |> String.concat ", " + + if msg <> "" then + logMsg msg) + timer.Start() open OpenTelemetry.Resources @@ -184,13 +196,13 @@ module FSharpServiceTelemetry = let export () = let meterProvider = // Configure OpenTelemetry metrics. Metrics can be viewed in Prometheus or other compatible tools. - OpenTelemetry.Sdk.CreateMeterProviderBuilder() + OpenTelemetry.Sdk + .CreateMeterProviderBuilder() .ConfigureResource(fun r -> r.AddService("F#") |> ignore) .AddMeter(FSharp.Compiler.Cache.MeterName) .AddOtlpExporter(fun _e m -> m.PeriodicExportingMetricReaderOptions.ExportIntervalMilliseconds <- 1000 - m.TemporalityPreference <- MetricReaderTemporalityPreference.Cumulative - ) + m.TemporalityPreference <- MetricReaderTemporalityPreference.Cumulative) .Build() let tracerProvider = From 56eab5ae76f6fc39b5c9e88c008e5b6fa42f52e2 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Wed, 23 Apr 2025 15:01:06 +0200 Subject: [PATCH 36/44] metrics --- src/Compiler/Checking/import.fs | 2 +- src/Compiler/Utilities/Caches.fs | 201 +++++++++++------- .../src/FSharp.Editor/Common/Logging.fs | 35 +-- 3 files changed, 131 insertions(+), 107 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index d17dd466731..d87e13b47ec 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -105,7 +105,7 @@ let createTypeSubsumptionCache (g: TcGlobals) = MaximumCapacity = 32_000 } // 8192 } Cache.Create(options) -let typeSubsumptionCaches = ConditionalWeakTable>() +let typeSubsumptionCaches = ConditionalWeakTable>() //------------------------------------------------------------------------- // Import an IL types as F# types. diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 2059fe490f0..6638ad7ca60 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -9,6 +9,7 @@ open System.Threading open System.Diagnostics open System.Diagnostics.Metrics open Internal.Utilities.Library +open System.Runtime.CompilerServices [] type internal CachingStrategy = @@ -71,25 +72,27 @@ type internal CachedEntity<'Key, 'Value> = override this.ToString() = $"{this.Key}" type internal IEvictionQueue<'Key, 'Value> = - interface - abstract member Acquire: 'Key * 'Value -> CachedEntity<'Key, 'Value> - abstract member Add: CachedEntity<'Key, 'Value> * CachingStrategy -> unit - abstract member Update: CachedEntity<'Key, 'Value> -> unit - abstract member GetKeysToEvict: int -> 'Key[] - abstract member Remove: CachedEntity<'Key, 'Value> -> unit - end + abstract member Acquire: 'Key * 'Value -> CachedEntity<'Key, 'Value> + abstract member Add: CachedEntity<'Key, 'Value> * CachingStrategy -> unit + abstract member Update: CachedEntity<'Key, 'Value> -> unit + abstract member GetKeysToEvict: int -> 'Key[] + abstract member Remove: CachedEntity<'Key, 'Value> -> unit -type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy) = +type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy, maximumCapacity, overCapacity: Event<_>) = let list = LinkedList>() let pool = ConcurrentBag>() + let mutable created = 0 interface IEvictionQueue<'Key, 'Value> with member _.Acquire(key, value) = match pool.TryTake() with | true, entity -> entity.ReUse(key, value) - | _ -> CachedEntity(key, value).WithNode() + | _ -> + if Interlocked.Increment &created > maximumCapacity then + overCapacity.Trigger() + CachedEntity(key, value).WithNode() member _.Add(entity: CachedEntity<'Key, 'Value>, strategy) = lock list @@ -137,7 +140,7 @@ type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy) = member this.Remove(entity: CachedEntity<_, _>) = lock list <| fun () -> list.Remove(entity.Node) // Return to the pool for reuse. - pool.Add(entity) + if pool.Count < maximumCapacity then pool.Add(entity) member _.Count = list.Count @@ -153,15 +156,32 @@ type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy) = member _.Remove(_) = () } +type ICacheEvents = + [] + abstract member CacheHit: IEvent + + [] + abstract member CacheMiss: IEvent + + [] + abstract member Eviction: IEvent + + [] + abstract member EvictionFail: IEvent + + [] + abstract member OverCapacity: IEvent + [] [] type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> internal (options: CacheOptions, capacity, cts: CancellationTokenSource) = - let cacheHit = Event<_ * _>() - let cacheMiss = Event<_>() - let eviction = Event<_>() - let evictionFail = Event<_>() + let cacheHit = Event() + let cacheMiss = Event() + let eviction = Event() + let evictionFail = Event() + let overCapacity = Event() let store = ConcurrentDictionary<'Key, CachedEntity<'Key, 'Value>>(options.LevelOfConcurrency, capacity) @@ -169,7 +189,7 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> let evictionQueue: IEvictionQueue<'Key, 'Value> = match options.EvictionMethod with | EvictionMethod.NoEviction -> EvictionQueue.NoEviction - | _ -> EvictionQueue(options.Strategy) + | _ -> EvictionQueue(options.Strategy, options.MaximumCapacity, overCapacity) let tryEvictItems () = let count = @@ -182,10 +202,10 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> match store.TryRemove(key) with | true, removed -> evictionQueue.Remove(removed) - eviction.Trigger(key) + eviction.Trigger() | _ -> failwith "eviction fail" - evictionFail.Trigger(key) + evictionFail.Trigger() let rec backgroundEviction () = async { @@ -212,12 +232,12 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> member _.TryGetValue(key: 'Key, value: outref<'Value>) = match store.TryGetValue(key) with | true, cachedEntity -> - cacheHit.Trigger(key, cachedEntity.Value) + cacheHit.Trigger() evictionQueue.Update(cachedEntity) value <- cachedEntity.Value true | _ -> - cacheMiss.Trigger(key) + cacheMiss.Trigger() value <- Unchecked.defaultof<'Value> false @@ -248,83 +268,116 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> evictionQueue.Add(entity, options.Strategy) - [] - member val CacheHit = cacheHit.Publish + interface ICacheEvents with - [] - member val CacheMiss = cacheMiss.Publish + [] + member val CacheHit = cacheHit.Publish - [] - member val Eviction = eviction.Publish + [] + member val CacheMiss = cacheMiss.Publish - [] - member val EvictionFail = evictionFail.Publish + [] + member val Eviction = eviction.Publish + + [] + member val EvictionFail = evictionFail.Publish + + [] + member val OverCapacity = overCapacity.Publish interface IDisposable with member this.Dispose() = cts.Cancel() + CacheInstrumentation.RemoveInstrumentation(this) GC.SuppressFinalize(this) member this.Dispose() = (this :> IDisposable).Dispose() override this.Finalize() : unit = this.Dispose() -module internal Cache = - let mutable cacheId = 0 + static member Create<'Key, 'Value when 'Key: not null and 'Key: equality> (options: CacheOptions) = + // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. + let capacity = + options.MaximumCapacity + + (options.MaximumCapacity * options.PercentageToEvict / 100) - [] - let MeterName = "FSharp.Compiler.Caches" + let cts = new CancellationTokenSource() + let cache = new Cache<'Key, 'Value>(options, capacity, cts) + CacheInstrumentation.AddInstrumentation cache |> ignore + cache + + member this.GetStats() = + CacheInstrumentation.GetStats(this) - let addInstrumentation (cache: Cache<_, _>) = - let meter = new Meter(MeterName) - let cacheId = Interlocked.Increment &cacheId +and internal CacheInstrumentation (cache: ICacheEvents) = + static let mutable cacheId = 0 - let mutable evictions = 0L - let mutable fails = 0L - let mutable hits = 0L - let mutable misses = 0L + static let instrumentedCaches = ConcurrentDictionary() - let mutable allEvictions = 0L - let mutable allFails = 0L - let mutable allHits = 0L - let mutable allMisses = 0L + static let meter = new Meter(nameof CacheInstrumentation) + let hits = meter.CreateCounter("hits") + let misses = meter.CreateCounter("misses") + let evictions = meter.CreateCounter("evictions") + let evictionFails = meter.CreateCounter("eviction-fails") + let overCapacity = meter.CreateCounter("over-capacity") - cache.CacheHit - |> Event.add (fun _ -> - Interlocked.Increment &hits |> ignore - Interlocked.Increment &allHits |> ignore) + do + cache.CacheHit.Add <| fun _ -> hits.Add(1L) + cache.CacheMiss.Add <| fun _ -> misses.Add(1L) + cache.Eviction.Add <| fun _ -> evictions.Add(1L) + cache.EvictionFail.Add <| fun _ -> evictionFails.Add(1L) + cache.OverCapacity.Add <| fun _ -> overCapacity.Add(1L) - cache.CacheMiss - |> Event.add (fun _ -> - Interlocked.Increment &misses |> ignore - Interlocked.Increment &allMisses |> ignore) + let current = ConcurrentDictionary() - cache.Eviction - |> Event.add (fun _ -> - Interlocked.Increment &evictions |> ignore - Interlocked.Increment &allEvictions |> ignore) +#if DEBUG + let listener = + new MeterListener( + InstrumentPublished = fun i l -> if i.Meter = meter then l.EnableMeasurementEvents(i)) - cache.EvictionFail - |> Event.add (fun _ -> - Interlocked.Increment &fails |> ignore - Interlocked.Increment &allFails |> ignore) + do + listener.SetMeasurementEventCallback(fun k v _ _ -> + Interlocked.Add(current.GetOrAdd(k, ref 0L), v) |> ignore) + listener.Start() +#endif - let hitRatio () = - let misses = Interlocked.Exchange(&misses, 0L) - let hits = Interlocked.Exchange(&hits, 0L) - float hits / float (hits + misses) + member val CacheId = $"cache-{Interlocked.Increment(&cacheId)}" - meter.CreateObservableGauge($"hit ratio {cacheId}", hitRatio) |> ignore + member val RecentStats = "-" with get, set + + member this.TryUpdateStats(clearCounts) = + let stats = + try + let ratio = float current[hits].Value / float (current[hits].Value + current[misses].Value) + [ for i in current.Keys -> $"{i.Name}: {current[i].Value}"] + |> String.concat ", " + |> sprintf "%s | ratio: %.2f %s" this.CacheId ratio + with _ -> "!" + + if clearCounts then + for r in current.Values do Interlocked.Exchange(r, 0L) |> ignore + + if stats <> this.RecentStats then + this.RecentStats <- stats + true + else + false + + static member GetStats(cache: ICacheEvents) = + instrumentedCaches[cache].TryUpdateStats(false) |> ignore + instrumentedCaches[cache].RecentStats + + static member GetStatsUpdateForAllCaches(clearCounts) = + [ + for i in instrumentedCaches.Values do + if i.TryUpdateStats(clearCounts) then + i.RecentStats + ] + + static member AddInstrumentation(cache: ICacheEvents) = + instrumentedCaches[cache] <- CacheInstrumentation(cache) + + static member RemoveInstrumentation(cache: ICacheEvents) = + instrumentedCaches.TryRemove(cache) |> ignore - let Create<'Key, 'Value when 'Key: not null and 'Key: equality> (options: CacheOptions) = - // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. - let capacity = - options.MaximumCapacity - + (options.MaximumCapacity * options.PercentageToEvict / 100) - let cts = new CancellationTokenSource() - let cache = new Cache<'Key, 'Value>(options, capacity, cts) -#if DEBUG - addInstrumentation cache -#endif - cache diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index 875877e212e..342a1df9efa 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -151,41 +151,12 @@ module FSharpServiceTelemetry = #if DEBUG let logCacheMetricsToOutput () = - let listener = - new MeterListener( - InstrumentPublished = - fun instrument l -> - if instrument.Meter.Name = "FSharp.Compiler.Caches" then - l.EnableMeasurementEvents(instrument) - ) - - let measurements = Collections.Generic.Dictionary<_, _>() - let changed = ResizeArray() - - let callBack = - MeasurementCallback(fun i v _ _ -> - let v = if Double.IsNaN v then "-" else $"%.1f{v * 100.}%%" - - if measurements.ContainsKey(i.Name) && measurements[i.Name] = v then - () - else - measurements[i.Name] <- v - changed.Add i.Name) - - listener.SetMeasurementEventCallback callBack - listener.Start() let timer = new System.Timers.Timer(1000.0, AutoReset = true) timer.Elapsed.Add(fun _ -> - changed.Clear() - listener.RecordObservableInstruments() - - let msg = - seq { for k in changed -> $"{k}: {measurements[k]}" } |> String.concat ", " - - if msg <> "" then - logMsg msg) + FSharp.Compiler.CacheInstrumentation.GetStatsUpdateForAllCaches(clearCounts = true) + |> Seq.iter logMsg) timer.Start() @@ -199,7 +170,7 @@ module FSharpServiceTelemetry = OpenTelemetry.Sdk .CreateMeterProviderBuilder() .ConfigureResource(fun r -> r.AddService("F#") |> ignore) - .AddMeter(FSharp.Compiler.Cache.MeterName) + .AddMeter(nameof FSharp.Compiler.CacheInstrumentation) .AddOtlpExporter(fun _e m -> m.PeriodicExportingMetricReaderOptions.ExportIntervalMilliseconds <- 1000 m.TemporalityPreference <- MetricReaderTemporalityPreference.Cumulative) From 64b6107d0dc02efa1a0531a5a893319cddbcc10d Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Wed, 23 Apr 2025 17:32:00 +0200 Subject: [PATCH 37/44] add signature --- src/Compiler/FSharp.Compiler.Service.fsproj | 1 + src/Compiler/Utilities/Caches.fs | 74 +++++++++--------- src/Compiler/Utilities/Caches.fsi | 87 +++++++++++++++++++++ 3 files changed, 125 insertions(+), 37 deletions(-) create mode 100644 src/Compiler/Utilities/Caches.fsi diff --git a/src/Compiler/FSharp.Compiler.Service.fsproj b/src/Compiler/FSharp.Compiler.Service.fsproj index af43f511094..9754dd25a11 100644 --- a/src/Compiler/FSharp.Compiler.Service.fsproj +++ b/src/Compiler/FSharp.Compiler.Service.fsproj @@ -146,6 +146,7 @@ + diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 6638ad7ca60..d0624d36bdf 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -1,5 +1,4 @@ // LinkedList uses nulls, so we need to disable the nullability warnings for this file. -#nowarn 3261 namespace FSharp.Compiler open System @@ -8,22 +7,20 @@ open System.Collections.Concurrent open System.Threading open System.Diagnostics open System.Diagnostics.Metrics -open Internal.Utilities.Library -open System.Runtime.CompilerServices [] -type internal CachingStrategy = +type CachingStrategy = | LRU | LFU [] -type internal EvictionMethod = +type EvictionMethod = | Blocking | Background | NoEviction [] -type internal CacheOptions = +type CacheOptions = { MaximumCapacity: int PercentageToEvict: int @@ -43,7 +40,7 @@ type internal CacheOptions = [] [] -type internal CachedEntity<'Key, 'Value> = +type CachedEntity<'Key, 'Value> = val mutable Key: 'Key val mutable Value: 'Value val mutable AccessCount: int64 @@ -58,9 +55,7 @@ type internal CachedEntity<'Key, 'Value> = } member this.WithNode() = - if isNull this.Node then - this.Node <- LinkedListNode(this) - + this.Node <- LinkedListNode(this) this member this.ReUse(key, value) = @@ -71,14 +66,15 @@ type internal CachedEntity<'Key, 'Value> = override this.ToString() = $"{this.Key}" -type internal IEvictionQueue<'Key, 'Value> = +type IEvictionQueue<'Key, 'Value> = abstract member Acquire: 'Key * 'Value -> CachedEntity<'Key, 'Value> abstract member Add: CachedEntity<'Key, 'Value> * CachingStrategy -> unit abstract member Update: CachedEntity<'Key, 'Value> -> unit abstract member GetKeysToEvict: int -> 'Key[] abstract member Remove: CachedEntity<'Key, 'Value> -> unit -type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy, maximumCapacity, overCapacity: Event<_>) = +[] +type EvictionQueue<'Key, 'Value>(strategy: CachingStrategy, maximumCapacity, overCapacity: Event<_>) = let list = LinkedList>() let pool = ConcurrentBag>() @@ -92,6 +88,7 @@ type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy, maximumCapa | _ -> if Interlocked.Increment &created > maximumCapacity then overCapacity.Trigger() + CachedEntity(key, value).WithNode() member _.Add(entity: CachedEntity<'Key, 'Value>, strategy) = @@ -122,10 +119,9 @@ type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy, maximumCapa // Bubble up the node in the list, linear time. // TODO: frequency list approach would be faster. let rec bubbleUp (current: LinkedListNode>) = - if isNotNull current.Next && current.Next.Value.AccessCount < entity.AccessCount then - bubbleUp current.Next - else - current + match current.Next with + | NonNull next when next.Value.AccessCount < entity.AccessCount -> bubbleUp next + | _ -> current let next = bubbleUp node @@ -140,7 +136,8 @@ type internal EvictionQueue<'Key, 'Value>(strategy: CachingStrategy, maximumCapa member this.Remove(entity: CachedEntity<_, _>) = lock list <| fun () -> list.Remove(entity.Node) // Return to the pool for reuse. - if pool.Count < maximumCapacity then pool.Add(entity) + if pool.Count < maximumCapacity then + pool.Add(entity) member _.Count = list.Count @@ -167,15 +164,14 @@ type ICacheEvents = abstract member Eviction: IEvent [] - abstract member EvictionFail: IEvent + abstract member EvictionFail: IEvent [] abstract member OverCapacity: IEvent [] [] -type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> - internal (options: CacheOptions, capacity, cts: CancellationTokenSource) = +type Cache<'Key, 'Value when 'Key: not null and 'Key: equality> internal (options: CacheOptions, capacity, cts: CancellationTokenSource) = let cacheHit = Event() let cacheMiss = Event() @@ -295,7 +291,7 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> override this.Finalize() : unit = this.Dispose() - static member Create<'Key, 'Value when 'Key: not null and 'Key: equality> (options: CacheOptions) = + static member Create<'Key, 'Value when 'Key: not null and 'Key: equality>(options: CacheOptions) = // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. let capacity = options.MaximumCapacity @@ -304,12 +300,11 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> let cts = new CancellationTokenSource() let cache = new Cache<'Key, 'Value>(options, capacity, cts) CacheInstrumentation.AddInstrumentation cache |> ignore - cache - - member this.GetStats() = - CacheInstrumentation.GetStats(this) + cache + + member this.GetStats() = CacheInstrumentation.GetStats(this) -and internal CacheInstrumentation (cache: ICacheEvents) = +and CacheInstrumentation(cache: ICacheEvents) = static let mutable cacheId = 0 static let instrumentedCaches = ConcurrentDictionary() @@ -333,11 +328,14 @@ and internal CacheInstrumentation (cache: ICacheEvents) = #if DEBUG let listener = new MeterListener( - InstrumentPublished = fun i l -> if i.Meter = meter then l.EnableMeasurementEvents(i)) + InstrumentPublished = + fun i l -> + if i.Meter = meter then + l.EnableMeasurementEvents(i) + ) do - listener.SetMeasurementEventCallback(fun k v _ _ -> - Interlocked.Add(current.GetOrAdd(k, ref 0L), v) |> ignore) + listener.SetMeasurementEventCallback(fun k v _ _ -> Interlocked.Add(current.GetOrAdd(k, ref 0L), v) |> ignore) listener.Start() #endif @@ -347,15 +345,19 @@ and internal CacheInstrumentation (cache: ICacheEvents) = member this.TryUpdateStats(clearCounts) = let stats = - try - let ratio = float current[hits].Value / float (current[hits].Value + current[misses].Value) - [ for i in current.Keys -> $"{i.Name}: {current[i].Value}"] + try + let ratio = + float current[hits].Value / float (current[hits].Value + current[misses].Value) + + [ for i in current.Keys -> $"{i.Name}: {current[i].Value}" ] |> String.concat ", " |> sprintf "%s | ratio: %.2f %s" this.CacheId ratio - with _ -> "!" + with _ -> + "!" - if clearCounts then - for r in current.Values do Interlocked.Exchange(r, 0L) |> ignore + if clearCounts then + for r in current.Values do + Interlocked.Exchange(r, 0L) |> ignore if stats <> this.RecentStats then this.RecentStats <- stats @@ -379,5 +381,3 @@ and internal CacheInstrumentation (cache: ICacheEvents) = static member RemoveInstrumentation(cache: ICacheEvents) = instrumentedCaches.TryRemove(cache) |> ignore - - diff --git a/src/Compiler/Utilities/Caches.fsi b/src/Compiler/Utilities/Caches.fsi new file mode 100644 index 00000000000..c8a7068c3d1 --- /dev/null +++ b/src/Compiler/Utilities/Caches.fsi @@ -0,0 +1,87 @@ +namespace FSharp.Compiler + +open System +open System.Threading + +[] +type internal CachingStrategy = + | LRU + | LFU + +[] +type internal EvictionMethod = + | Blocking + | Background + | NoEviction + +[] +type internal CacheOptions = + { MaximumCapacity: int + PercentageToEvict: int + Strategy: CachingStrategy + EvictionMethod: EvictionMethod + LevelOfConcurrency: int } + + static member Default: CacheOptions + +[] +type internal CachedEntity<'Key, 'Value> = + new: key: 'Key * value: 'Value -> CachedEntity<'Key, 'Value> + member WithNode: unit -> CachedEntity<'Key, 'Value> + member ReUse: key: 'Key * value: 'Value -> CachedEntity<'Key, 'Value> + override ToString: unit -> string + +type internal IEvictionQueue<'Key, 'Value> = + abstract member Acquire: 'Key * 'Value -> CachedEntity<'Key, 'Value> + abstract member Add: CachedEntity<'Key, 'Value> * CachingStrategy -> unit + abstract member Update: CachedEntity<'Key, 'Value> -> unit + abstract member GetKeysToEvict: int -> 'Key[] + abstract member Remove: CachedEntity<'Key, 'Value> -> unit + +[] +type internal EvictionQueue<'Key, 'Value> = + new: strategy: CachingStrategy * maximumCapacity: int * overCapacity: Event -> EvictionQueue<'Key, 'Value> + member Count: int + static member NoEviction: IEvictionQueue<'Key, 'Value> + interface IEvictionQueue<'Key, 'Value> + +type internal ICacheEvents = + [] + abstract member CacheHit: IEvent + + [] + abstract member CacheMiss: IEvent + + [] + abstract member Eviction: IEvent + + [] + abstract member EvictionFail: IEvent + + [] + abstract member OverCapacity: IEvent + +[] +type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> = + new: options: CacheOptions * capacity: int * cts: CancellationTokenSource -> Cache<'Key, 'Value> + member TryGetValue: key: 'Key * value: outref<'Value> -> bool + member TryAdd: key: 'Key * value: 'Value -> bool + member AddOrUpdate: key: 'Key * value: 'Value -> unit + member Dispose: unit -> unit + member GetStats: unit -> string + + static member Create<'Key, 'Value when 'Key: not null and 'Key: equality> : + options: CacheOptions -> Cache<'Key, 'Value> + + interface ICacheEvents + interface IDisposable + +type internal CacheInstrumentation = + new: cache: ICacheEvents -> CacheInstrumentation + member CacheId: string + member RecentStats: string + member TryUpdateStats: clearCounts: bool -> bool + static member GetStats: cache: ICacheEvents -> string + static member GetStatsUpdateForAllCaches: clearCounts: bool -> string list + static member AddInstrumentation: cache: ICacheEvents -> unit + static member RemoveInstrumentation: cache: ICacheEvents -> unit From 905fe5272f0e162da3f11a6c94187576cd24f112 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Wed, 23 Apr 2025 19:08:08 +0200 Subject: [PATCH 38/44] fix --- src/Compiler/Utilities/Caches.fs | 10 ++++++---- src/Compiler/Utilities/Caches.fsi | 2 +- vsintegration/src/FSharp.Editor/Common/Logging.fs | 2 +- .../FSharp.Editor/LanguageService/LanguageService.fs | 3 ++- 4 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index d0624d36bdf..814165454a8 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -291,7 +291,7 @@ type Cache<'Key, 'Value when 'Key: not null and 'Key: equality> internal (option override this.Finalize() : unit = this.Dispose() - static member Create<'Key, 'Value when 'Key: not null and 'Key: equality>(options: CacheOptions) = + static member Create<'Key, 'Value>(options: CacheOptions) = // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. let capacity = options.MaximumCapacity @@ -325,7 +325,6 @@ and CacheInstrumentation(cache: ICacheEvents) = let current = ConcurrentDictionary() -#if DEBUG let listener = new MeterListener( InstrumentPublished = @@ -337,7 +336,6 @@ and CacheInstrumentation(cache: ICacheEvents) = do listener.SetMeasurementEventCallback(fun k v _ _ -> Interlocked.Add(current.GetOrAdd(k, ref 0L), v) |> ignore) listener.Start() -#endif member val CacheId = $"cache-{Interlocked.Increment(&cacheId)}" @@ -365,6 +363,9 @@ and CacheInstrumentation(cache: ICacheEvents) = else false + member this.Dispose() = + listener.Dispose() + static member GetStats(cache: ICacheEvents) = instrumentedCaches[cache].TryUpdateStats(false) |> ignore instrumentedCaches[cache].RecentStats @@ -377,7 +378,8 @@ and CacheInstrumentation(cache: ICacheEvents) = ] static member AddInstrumentation(cache: ICacheEvents) = - instrumentedCaches[cache] <- CacheInstrumentation(cache) + instrumentedCaches[cache] <- new CacheInstrumentation(cache) static member RemoveInstrumentation(cache: ICacheEvents) = + instrumentedCaches[cache].Dispose() instrumentedCaches.TryRemove(cache) |> ignore diff --git a/src/Compiler/Utilities/Caches.fsi b/src/Compiler/Utilities/Caches.fsi index c8a7068c3d1..87f261c7aee 100644 --- a/src/Compiler/Utilities/Caches.fsi +++ b/src/Compiler/Utilities/Caches.fsi @@ -70,7 +70,7 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> = member Dispose: unit -> unit member GetStats: unit -> string - static member Create<'Key, 'Value when 'Key: not null and 'Key: equality> : + static member Create<'Key, 'Value> : options: CacheOptions -> Cache<'Key, 'Value> interface ICacheEvents diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index 342a1df9efa..d74ca91ceb4 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -149,7 +149,6 @@ module FSharpServiceTelemetry = ActivitySource.AddActivityListener(listener) -#if DEBUG let logCacheMetricsToOutput () = let timer = new System.Timers.Timer(1000.0, AutoReset = true) @@ -160,6 +159,7 @@ module FSharpServiceTelemetry = timer.Start() +#if DEBUG open OpenTelemetry.Resources open OpenTelemetry.Trace open OpenTelemetry.Metrics diff --git a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs index 12d534b4b8e..ea3d122f48b 100644 --- a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs +++ b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs @@ -343,9 +343,10 @@ type internal FSharpPackage() as this = // FSI-LINKAGE-POINT: unsited init do FSharp.Interactive.Hooks.fsiConsoleWindowPackageCtorUnsited (this :> Package) -#if DEBUG do Logging.FSharpServiceTelemetry.logCacheMetricsToOutput () +#if DEBUG + let flushTelemetry = Logging.FSharpServiceTelemetry.export () override this.Dispose(disposing: bool) = From 6e21777ab784fe33a879c8a841ef4180346cd8e8 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Wed, 23 Apr 2025 20:16:56 +0200 Subject: [PATCH 39/44] eviction --- src/Compiler/Checking/import.fs | 6 +++--- src/Compiler/Utilities/Caches.fs | 16 ++++++++++------ src/Compiler/Utilities/Caches.fsi | 2 +- .../src/FSharp.Editor/Common/Logging.fs | 4 ++-- 4 files changed, 16 insertions(+), 12 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index d87e13b47ec..74a06213146 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -95,14 +95,14 @@ let createTypeSubsumptionCache (g: TcGlobals) = let options = if g.compilationMode = CompilationMode.OneOff then { CacheOptions.Default with - MaximumCapacity = 8192 + MaximumCapacity = 100_000 EvictionMethod = EvictionMethod.NoEviction } else { CacheOptions.Default with EvictionMethod = EvictionMethod.Background Strategy = CachingStrategy.LRU - PercentageToEvict = 5 - MaximumCapacity = 32_000 } // 8192 } + PercentageToEvict = 20 + MaximumCapacity = 100_000 } Cache.Create(options) let typeSubsumptionCaches = ConditionalWeakTable>() diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index 814165454a8..e0c574e83f2 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -185,12 +185,13 @@ type Cache<'Key, 'Value when 'Key: not null and 'Key: equality> internal (option let evictionQueue: IEvictionQueue<'Key, 'Value> = match options.EvictionMethod with | EvictionMethod.NoEviction -> EvictionQueue.NoEviction - | _ -> EvictionQueue(options.Strategy, options.MaximumCapacity, overCapacity) + | _ -> EvictionQueue(options.Strategy, capacity, overCapacity) let tryEvictItems () = let count = if store.Count > options.MaximumCapacity then - store.Count - options.MaximumCapacity + (store.Count - options.MaximumCapacity) + + int (float options.MaximumCapacity * float options.PercentageToEvict / 100.0) else 0 @@ -295,7 +296,7 @@ type Cache<'Key, 'Value when 'Key: not null and 'Key: equality> internal (option // Increase expected capacity by the percentage to evict, since we want to not resize the dictionary. let capacity = options.MaximumCapacity - + (options.MaximumCapacity * options.PercentageToEvict / 100) + + int (float options.MaximumCapacity * float options.PercentageToEvict / 100.0) let cts = new CancellationTokenSource() let cache = new Cache<'Key, 'Value>(options, capacity, cts) @@ -345,11 +346,13 @@ and CacheInstrumentation(cache: ICacheEvents) = let stats = try let ratio = - float current[hits].Value / float (current[hits].Value + current[misses].Value) + float current[hits].Value / float (current[hits].Value + current[misses].Value) * 100.0 - [ for i in current.Keys -> $"{i.Name}: {current[i].Value}" ] + [ for i in current.Keys do + let v = current[i].Value + if v > 0 then $"{i.Name}: {v}" ] |> String.concat ", " - |> sprintf "%s | ratio: %.2f %s" this.CacheId ratio + |> sprintf "%s | hit ratio: %s %s" this.CacheId (if Double.IsNaN(ratio) then "-" else $"%.1f{ratio}%%") with _ -> "!" @@ -376,6 +379,7 @@ and CacheInstrumentation(cache: ICacheEvents) = if i.TryUpdateStats(clearCounts) then i.RecentStats ] + |> String.concat "\n" static member AddInstrumentation(cache: ICacheEvents) = instrumentedCaches[cache] <- new CacheInstrumentation(cache) diff --git a/src/Compiler/Utilities/Caches.fsi b/src/Compiler/Utilities/Caches.fsi index 87f261c7aee..71ad76df14f 100644 --- a/src/Compiler/Utilities/Caches.fsi +++ b/src/Compiler/Utilities/Caches.fsi @@ -82,6 +82,6 @@ type internal CacheInstrumentation = member RecentStats: string member TryUpdateStats: clearCounts: bool -> bool static member GetStats: cache: ICacheEvents -> string - static member GetStatsUpdateForAllCaches: clearCounts: bool -> string list + static member GetStatsUpdateForAllCaches: clearCounts: bool -> string static member AddInstrumentation: cache: ICacheEvents -> unit static member RemoveInstrumentation: cache: ICacheEvents -> unit diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index d74ca91ceb4..7a93e3d0d40 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -154,8 +154,8 @@ module FSharpServiceTelemetry = let timer = new System.Timers.Timer(1000.0, AutoReset = true) timer.Elapsed.Add(fun _ -> - FSharp.Compiler.CacheInstrumentation.GetStatsUpdateForAllCaches(clearCounts = true) - |> Seq.iter logMsg) + let stats = FSharp.Compiler.CacheInstrumentation.GetStatsUpdateForAllCaches(clearCounts = true) + if stats <> "" then logMsg $"\n{stats}") timer.Start() From 83d8122e27ed16e9d001e00b68724bd6917acaf1 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Wed, 23 Apr 2025 21:44:28 +0200 Subject: [PATCH 40/44] output --- src/Compiler/Utilities/Caches.fs | 36 ++++++++++++------- src/Compiler/Utilities/Caches.fsi | 3 +- .../src/FSharp.Editor/Common/Logging.fs | 7 ++-- 3 files changed, 29 insertions(+), 17 deletions(-) diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index e0c574e83f2..f88ca251d57 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -311,11 +311,17 @@ and CacheInstrumentation(cache: ICacheEvents) = static let instrumentedCaches = ConcurrentDictionary() static let meter = new Meter(nameof CacheInstrumentation) - let hits = meter.CreateCounter("hits") - let misses = meter.CreateCounter("misses") - let evictions = meter.CreateCounter("evictions") - let evictionFails = meter.CreateCounter("eviction-fails") - let overCapacity = meter.CreateCounter("over-capacity") + + let instanceId = $"cache-{Interlocked.Increment(&cacheId)}" + + let hits = meter.CreateCounter("hits", "count", instanceId) + let misses = meter.CreateCounter("misses", "count", instanceId) + let evictions = meter.CreateCounter("evictions", "count", instanceId) + + let evictionFails = + meter.CreateCounter("eviction-fails", "count", instanceId) + + let overCapacity = meter.CreateCounter("over-capacity", "count", instanceId) do cache.CacheHit.Add <| fun _ -> hits.Add(1L) @@ -330,7 +336,7 @@ and CacheInstrumentation(cache: ICacheEvents) = new MeterListener( InstrumentPublished = fun i l -> - if i.Meter = meter then + if i.Meter = meter && i.Description = instanceId then l.EnableMeasurementEvents(i) ) @@ -338,7 +344,7 @@ and CacheInstrumentation(cache: ICacheEvents) = listener.SetMeasurementEventCallback(fun k v _ _ -> Interlocked.Add(current.GetOrAdd(k, ref 0L), v) |> ignore) listener.Start() - member val CacheId = $"cache-{Interlocked.Increment(&cacheId)}" + member val CacheId = instanceId member val RecentStats = "-" with get, set @@ -346,11 +352,16 @@ and CacheInstrumentation(cache: ICacheEvents) = let stats = try let ratio = - float current[hits].Value / float (current[hits].Value + current[misses].Value) * 100.0 + float current[hits].Value / float (current[hits].Value + current[misses].Value) + * 100.0 + + [ + for i in current.Keys do + let v = current[i].Value - [ for i in current.Keys do - let v = current[i].Value - if v > 0 then $"{i.Name}: {v}" ] + if v > 0 then + $"{i.Name}: {v}" + ] |> String.concat ", " |> sprintf "%s | hit ratio: %s %s" this.CacheId (if Double.IsNaN(ratio) then "-" else $"%.1f{ratio}%%") with _ -> @@ -366,8 +377,7 @@ and CacheInstrumentation(cache: ICacheEvents) = else false - member this.Dispose() = - listener.Dispose() + member this.Dispose() = listener.Dispose() static member GetStats(cache: ICacheEvents) = instrumentedCaches[cache].TryUpdateStats(false) |> ignore diff --git a/src/Compiler/Utilities/Caches.fsi b/src/Compiler/Utilities/Caches.fsi index 71ad76df14f..ab37575b70d 100644 --- a/src/Compiler/Utilities/Caches.fsi +++ b/src/Compiler/Utilities/Caches.fsi @@ -70,8 +70,7 @@ type internal Cache<'Key, 'Value when 'Key: not null and 'Key: equality> = member Dispose: unit -> unit member GetStats: unit -> string - static member Create<'Key, 'Value> : - options: CacheOptions -> Cache<'Key, 'Value> + static member Create<'Key, 'Value> : options: CacheOptions -> Cache<'Key, 'Value> interface ICacheEvents interface IDisposable diff --git a/vsintegration/src/FSharp.Editor/Common/Logging.fs b/vsintegration/src/FSharp.Editor/Common/Logging.fs index 7a93e3d0d40..c9615a7ae57 100644 --- a/vsintegration/src/FSharp.Editor/Common/Logging.fs +++ b/vsintegration/src/FSharp.Editor/Common/Logging.fs @@ -154,8 +154,11 @@ module FSharpServiceTelemetry = let timer = new System.Timers.Timer(1000.0, AutoReset = true) timer.Elapsed.Add(fun _ -> - let stats = FSharp.Compiler.CacheInstrumentation.GetStatsUpdateForAllCaches(clearCounts = true) - if stats <> "" then logMsg $"\n{stats}") + let stats = + FSharp.Compiler.CacheInstrumentation.GetStatsUpdateForAllCaches(clearCounts = true) + + if stats <> "" then + logMsg $"\n{stats}") timer.Start() From 2c81e17b23e81cb0adab61dce296c76387ee3fc2 Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Thu, 24 Apr 2025 12:38:45 +0200 Subject: [PATCH 41/44] back to singleton --- src/Compiler/Checking/import.fs | 41 ++++++++++++++++---------- src/Compiler/Utilities/Caches.fs | 49 ++++++++++++++++++------------- src/Compiler/Utilities/Caches.fsi | 3 +- 3 files changed, 56 insertions(+), 37 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 74a06213146..0362496f31e 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -29,6 +29,7 @@ open FSharp.Compiler.TcGlobals #if !NO_TYPEPROVIDERS open FSharp.Compiler.TypeProviders +open System.Threading #endif /// Represents an interface to some of the functionality of TcImports, for loading assemblies @@ -91,21 +92,31 @@ type TTypeCacheKey = override this.ToString () = $"{this.ty1.DebugText}-{this.ty2.DebugText}" -let createTypeSubsumptionCache (g: TcGlobals) = - let options = - if g.compilationMode = CompilationMode.OneOff then - { CacheOptions.Default with - MaximumCapacity = 100_000 - EvictionMethod = EvictionMethod.NoEviction } - else - { CacheOptions.Default with - EvictionMethod = EvictionMethod.Background - Strategy = CachingStrategy.LRU - PercentageToEvict = 20 - MaximumCapacity = 100_000 } - Cache.Create(options) +let getOrCreateTypeSubsumptionCache = + let mutable latch = 0 + let mutable cache = None + + fun (g: TcGlobals) -> + // Single execution latch. We create a singleton assuming compilationMode will not change during the lifetime of the process. + if Interlocked.CompareExchange(&latch, 1, 0) = 0 then + let options = + match g.compilationMode with + | CompilationMode.OneOff -> + // This is a one-off compilation, so we don't need to worry about eviction. + { CacheOptions.Default with + MaximumCapacity = 200_000 + EvictionMethod = EvictionMethod.NoEviction } + | _ -> + // Oncremental use, so we need to set up the cache with eviction. + { CacheOptions.Default with + EvictionMethod = EvictionMethod.Background + Strategy = CachingStrategy.LRU + PercentageToEvict = 5 + MaximumCapacity = 4 * 32768 } + cache <- Some (Cache.Create(options)) + cache.Value -let typeSubsumptionCaches = ConditionalWeakTable>() +let _typeSubsumptionCaches = ConditionalWeakTable>() //------------------------------------------------------------------------- // Import an IL types as F# types. @@ -129,7 +140,7 @@ type ImportMap(g: TcGlobals, assemblyLoader: AssemblyLoader) = member _.ILTypeRefToTyconRefCache = typeRefToTyconRefCache - member _.TypeSubsumptionCache = typeSubsumptionCaches.GetValue(g, createTypeSubsumptionCache) // getOrCreateTypeSubsumptionCache g.compilationMode + member val TypeSubsumptionCache: Cache = getOrCreateTypeSubsumptionCache g let CanImportILScopeRef (env: ImportMap) m scoref = diff --git a/src/Compiler/Utilities/Caches.fs b/src/Compiler/Utilities/Caches.fs index f88ca251d57..97f2b08d8af 100644 --- a/src/Compiler/Utilities/Caches.fs +++ b/src/Compiler/Utilities/Caches.fs @@ -66,31 +66,36 @@ type CachedEntity<'Key, 'Value> = override this.ToString() = $"{this.Key}" +type EntityPool<'Key, 'Value>(maximumCapacity, overCapacity: Event<_>) = + let pool = ConcurrentBag>() + let mutable created = 0 + + member _.Acquire(key, value) = + match pool.TryTake() with + | true, entity -> entity.ReUse(key, value) + | _ -> + if Interlocked.Increment &created > maximumCapacity then + overCapacity.Trigger() + + CachedEntity(key, value).WithNode() + + member _.Reclaim(entity: CachedEntity<'Key, 'Value>) = + if pool.Count < maximumCapacity then + pool.Add(entity) + type IEvictionQueue<'Key, 'Value> = - abstract member Acquire: 'Key * 'Value -> CachedEntity<'Key, 'Value> abstract member Add: CachedEntity<'Key, 'Value> * CachingStrategy -> unit abstract member Update: CachedEntity<'Key, 'Value> -> unit abstract member GetKeysToEvict: int -> 'Key[] abstract member Remove: CachedEntity<'Key, 'Value> -> unit [] -type EvictionQueue<'Key, 'Value>(strategy: CachingStrategy, maximumCapacity, overCapacity: Event<_>) = +type EvictionQueue<'Key, 'Value>(strategy: CachingStrategy) = let list = LinkedList>() - let pool = ConcurrentBag>() - let mutable created = 0 interface IEvictionQueue<'Key, 'Value> with - member _.Acquire(key, value) = - match pool.TryTake() with - | true, entity -> entity.ReUse(key, value) - | _ -> - if Interlocked.Increment &created > maximumCapacity then - overCapacity.Trigger() - - CachedEntity(key, value).WithNode() - member _.Add(entity: CachedEntity<'Key, 'Value>, strategy) = lock list <| fun () -> @@ -135,15 +140,11 @@ type EvictionQueue<'Key, 'Value>(strategy: CachingStrategy, maximumCapacity, ove member this.Remove(entity: CachedEntity<_, _>) = lock list <| fun () -> list.Remove(entity.Node) - // Return to the pool for reuse. - if pool.Count < maximumCapacity then - pool.Add(entity) member _.Count = list.Count static member NoEviction = { new IEvictionQueue<'Key, 'Value> with - member _.Acquire(key, value) = CachedEntity(key, value) member _.Add(_, _) = () member _.Update(entity) = @@ -179,13 +180,15 @@ type Cache<'Key, 'Value when 'Key: not null and 'Key: equality> internal (option let evictionFail = Event() let overCapacity = Event() + let pool = EntityPool<'Key, 'Value>(capacity, overCapacity) + let store = ConcurrentDictionary<'Key, CachedEntity<'Key, 'Value>>(options.LevelOfConcurrency, capacity) let evictionQueue: IEvictionQueue<'Key, 'Value> = match options.EvictionMethod with | EvictionMethod.NoEviction -> EvictionQueue.NoEviction - | _ -> EvictionQueue(options.Strategy, capacity, overCapacity) + | _ -> EvictionQueue(options.Strategy) let tryEvictItems () = let count = @@ -199,6 +202,7 @@ type Cache<'Key, 'Value when 'Key: not null and 'Key: equality> internal (option match store.TryRemove(key) with | true, removed -> evictionQueue.Remove(removed) + pool.Reclaim(removed) eviction.Trigger() | _ -> failwith "eviction fail" @@ -242,24 +246,29 @@ type Cache<'Key, 'Value when 'Key: not null and 'Key: equality> internal (option if options.EvictionMethod.IsBlocking then tryEvictItems () - let cachedEntity = evictionQueue.Acquire(key, value) + let cachedEntity = pool.Acquire(key, value) if store.TryAdd(key, cachedEntity) then evictionQueue.Add(cachedEntity, options.Strategy) true else + pool.Reclaim(cachedEntity) false member _.AddOrUpdate(key: 'Key, value: 'Value) = if options.EvictionMethod.IsBlocking then tryEvictItems () + let aquired = pool.Acquire(key, value) + let entity = store.AddOrUpdate( key, - (fun _ -> evictionQueue.Acquire(key, value)), + (fun _ -> aquired), (fun _ (current: CachedEntity<_, _>) -> + pool.Reclaim aquired current.Value <- value + evictionQueue.Remove(current) current) ) diff --git a/src/Compiler/Utilities/Caches.fsi b/src/Compiler/Utilities/Caches.fsi index ab37575b70d..c2dd99fe041 100644 --- a/src/Compiler/Utilities/Caches.fsi +++ b/src/Compiler/Utilities/Caches.fsi @@ -32,7 +32,6 @@ type internal CachedEntity<'Key, 'Value> = override ToString: unit -> string type internal IEvictionQueue<'Key, 'Value> = - abstract member Acquire: 'Key * 'Value -> CachedEntity<'Key, 'Value> abstract member Add: CachedEntity<'Key, 'Value> * CachingStrategy -> unit abstract member Update: CachedEntity<'Key, 'Value> -> unit abstract member GetKeysToEvict: int -> 'Key[] @@ -40,7 +39,7 @@ type internal IEvictionQueue<'Key, 'Value> = [] type internal EvictionQueue<'Key, 'Value> = - new: strategy: CachingStrategy * maximumCapacity: int * overCapacity: Event -> EvictionQueue<'Key, 'Value> + new: strategy: CachingStrategy -> EvictionQueue<'Key, 'Value> member Count: int static member NoEviction: IEvictionQueue<'Key, 'Value> interface IEvictionQueue<'Key, 'Value> From f9ab164652b491e0190b14a92edef74e217aae8b Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Thu, 24 Apr 2025 12:39:16 +0200 Subject: [PATCH 42/44] otel --- tests/FSharp.Test.Utilities/XunitHelpers.fs | 18 +++++++++++++++++- .../FSharp.Editor.Tests.fsproj | 3 +++ .../tests/Salsa/VisualFSharp.Salsa.fsproj | 3 +++ .../UnitTests/VisualFSharp.UnitTests.fsproj | 3 +++ 4 files changed, 26 insertions(+), 1 deletion(-) diff --git a/tests/FSharp.Test.Utilities/XunitHelpers.fs b/tests/FSharp.Test.Utilities/XunitHelpers.fs index 34a44df17ed..c68b2f29a14 100644 --- a/tests/FSharp.Test.Utilities/XunitHelpers.fs +++ b/tests/FSharp.Test.Utilities/XunitHelpers.fs @@ -12,9 +12,9 @@ open TestFramework open FSharp.Compiler.Diagnostics -open OpenTelemetry open OpenTelemetry.Resources open OpenTelemetry.Trace +open OpenTelemetry.Metrics /// Disables custom internal parallelization added with XUNIT_EXTRAS. /// Execute test cases in a class or a module one by one instead of all at once. Allow other collections to run simultaneously. @@ -146,12 +146,17 @@ type FSharpXunitFramework(sink: IMessageSink) = AssemblyResolver.addResolver () #endif + // On Windows forwarding localhost to wsl2 docker container sometimes does not work. Use IP address instead. + let otlpEndpoint = Uri("http://127.0.0.1:4317") + // Configure OpenTelemetry export. Traces can be viewed in Jaeger or other compatible tools. use tracerProvider = OpenTelemetry.Sdk.CreateTracerProviderBuilder() .AddSource(ActivityNames.FscSourceName) .ConfigureResource(fun r -> r.AddService("F#") |> ignore) .AddOtlpExporter(fun o -> + o.Endpoint <- otlpEndpoint + o.Protocol <- OpenTelemetry.Exporter.OtlpExportProtocol.Grpc // Empirical values to ensure no traces are lost and no significant delay at the end of test run. o.TimeoutMilliseconds <- 200 o.BatchExportProcessorOptions.MaxQueueSize <- 16384 @@ -159,6 +164,17 @@ type FSharpXunitFramework(sink: IMessageSink) = ) .Build() + use meterProvider = + OpenTelemetry.Sdk.CreateMeterProviderBuilder() + .AddMeter(nameof FSharp.Compiler.CacheInstrumentation) + .ConfigureResource(fun r -> r.AddService("F#") |> ignore) + .AddOtlpExporter(fun e m -> + e.Endpoint <- otlpEndpoint + e.Protocol <- OpenTelemetry.Exporter.OtlpExportProtocol.Grpc + m.PeriodicExportingMetricReaderOptions.ExportIntervalMilliseconds <- 1000 + ) + .Build() + logConfig initialConfig log "Installing TestConsole redirection" TestConsole.install() diff --git a/vsintegration/tests/FSharp.Editor.Tests/FSharp.Editor.Tests.fsproj b/vsintegration/tests/FSharp.Editor.Tests/FSharp.Editor.Tests.fsproj index 738a3e1323c..25f46d3505f 100644 --- a/vsintegration/tests/FSharp.Editor.Tests/FSharp.Editor.Tests.fsproj +++ b/vsintegration/tests/FSharp.Editor.Tests/FSharp.Editor.Tests.fsproj @@ -17,6 +17,9 @@ + + XunitSetup.fs + diff --git a/vsintegration/tests/Salsa/VisualFSharp.Salsa.fsproj b/vsintegration/tests/Salsa/VisualFSharp.Salsa.fsproj index 7f24444a6e5..ba2efee9706 100644 --- a/vsintegration/tests/Salsa/VisualFSharp.Salsa.fsproj +++ b/vsintegration/tests/Salsa/VisualFSharp.Salsa.fsproj @@ -16,6 +16,9 @@ + + XunitSetup.fs + diff --git a/vsintegration/tests/UnitTests/VisualFSharp.UnitTests.fsproj b/vsintegration/tests/UnitTests/VisualFSharp.UnitTests.fsproj index 77c7d12e017..c7f69001d9e 100644 --- a/vsintegration/tests/UnitTests/VisualFSharp.UnitTests.fsproj +++ b/vsintegration/tests/UnitTests/VisualFSharp.UnitTests.fsproj @@ -17,6 +17,9 @@ + + XunitSetup.fs + From 6e6ca75885638665434d1f1d9532ecaeb96dc43e Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Thu, 24 Apr 2025 12:42:54 +0200 Subject: [PATCH 43/44] fix --- src/Compiler/Checking/import.fs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 0362496f31e..06e304d35cf 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -8,6 +8,7 @@ open System.Collections.Generic open System.Collections.Immutable open System.Diagnostics open System.Runtime.CompilerServices +open System.Threading open Internal.Utilities.Library open Internal.Utilities.Library.Extras @@ -29,7 +30,6 @@ open FSharp.Compiler.TcGlobals #if !NO_TYPEPROVIDERS open FSharp.Compiler.TypeProviders -open System.Threading #endif /// Represents an interface to some of the functionality of TcImports, for loading assemblies @@ -116,8 +116,6 @@ let getOrCreateTypeSubsumptionCache = cache <- Some (Cache.Create(options)) cache.Value -let _typeSubsumptionCaches = ConditionalWeakTable>() - //------------------------------------------------------------------------- // Import an IL types as F# types. //------------------------------------------------------------------------- From 619dbd416a6be626d66b51c5ab5ed598202c4fbd Mon Sep 17 00:00:00 2001 From: Jakub Majocha <1760221+majocha@users.noreply.github.com> Date: Thu, 24 Apr 2025 13:31:21 +0200 Subject: [PATCH 44/44] fixfix --- src/Compiler/Checking/import.fs | 44 +++++++++++++++++---------------- 1 file changed, 23 insertions(+), 21 deletions(-) diff --git a/src/Compiler/Checking/import.fs b/src/Compiler/Checking/import.fs index 06e304d35cf..1b3ccbd64fb 100644 --- a/src/Compiler/Checking/import.fs +++ b/src/Compiler/Checking/import.fs @@ -93,28 +93,30 @@ type TTypeCacheKey = override this.ToString () = $"{this.ty1.DebugText}-{this.ty2.DebugText}" let getOrCreateTypeSubsumptionCache = - let mutable latch = 0 + let mutable lockObj = obj() let mutable cache = None - fun (g: TcGlobals) -> - // Single execution latch. We create a singleton assuming compilationMode will not change during the lifetime of the process. - if Interlocked.CompareExchange(&latch, 1, 0) = 0 then - let options = - match g.compilationMode with - | CompilationMode.OneOff -> - // This is a one-off compilation, so we don't need to worry about eviction. - { CacheOptions.Default with - MaximumCapacity = 200_000 - EvictionMethod = EvictionMethod.NoEviction } - | _ -> - // Oncremental use, so we need to set up the cache with eviction. - { CacheOptions.Default with - EvictionMethod = EvictionMethod.Background - Strategy = CachingStrategy.LRU - PercentageToEvict = 5 - MaximumCapacity = 4 * 32768 } - cache <- Some (Cache.Create(options)) - cache.Value + fun compilationMode -> + lock lockObj <| fun () -> + match cache with + | Some c -> c + | _ -> + let options = + match compilationMode with + | CompilationMode.OneOff -> + // This is a one-off compilation, so we don't need to worry about eviction. + { CacheOptions.Default with + MaximumCapacity = 200_000 + EvictionMethod = EvictionMethod.NoEviction } + | _ -> + // Oncremental use, so we need to set up the cache with eviction. + { CacheOptions.Default with + EvictionMethod = EvictionMethod.Background + Strategy = CachingStrategy.LRU + PercentageToEvict = 5 + MaximumCapacity = 4 * 32768 } + cache <- Some (Cache.Create(options)) + cache.Value //------------------------------------------------------------------------- // Import an IL types as F# types. @@ -138,7 +140,7 @@ type ImportMap(g: TcGlobals, assemblyLoader: AssemblyLoader) = member _.ILTypeRefToTyconRefCache = typeRefToTyconRefCache - member val TypeSubsumptionCache: Cache = getOrCreateTypeSubsumptionCache g + member val TypeSubsumptionCache: Cache = getOrCreateTypeSubsumptionCache g.compilationMode let CanImportILScopeRef (env: ImportMap) m scoref =