diff --git a/FSharpBuild.Directory.Build.props b/FSharpBuild.Directory.Build.props index 5be07042285..604684fb4fb 100644 --- a/FSharpBuild.Directory.Build.props +++ b/FSharpBuild.Directory.Build.props @@ -26,7 +26,6 @@ 1182;0025;$(WarningsAsErrors) $(OtherFlags) --nowarn:3384 $(OtherFlags) --times --nowarn:75 - $(OtherFlags) --test:ParallelCheckingWithSignatureFilesOn $(OtherFlags) $(AdditionalFscCmdFlags) diff --git a/src/Compiler/Checking/CheckDeclarations.fs b/src/Compiler/Checking/CheckDeclarations.fs index 705d9b6d7d6..a65745059fa 100644 --- a/src/Compiler/Checking/CheckDeclarations.fs +++ b/src/Compiler/Checking/CheckDeclarations.fs @@ -3028,6 +3028,17 @@ module EstablishTypeDefinitionCores = let kind = if hasMeasureAttr then TyparKind.Measure else TyparKind.Type let ty, _ = TcTypeOrMeasureAndRecover (Some kind) cenv NoNewTypars checkConstraints ItemOccurence.UseInType WarnOnIWSAM.No envinner tpenv rhsType + // Give a warning if `AutoOpenAttribute` is being aliased. + // If the user were to alias the `Microsoft.FSharp.Core.AutoOpenAttribute` type, it would not be detected by the project graph dependency resolution algorithm. + match stripTyEqns g ty with + | AppTy g (tcref, _) when not tcref.IsErased -> + match tcref.CompiledRepresentation with + | CompiledTypeRepr.ILAsmOpen _ -> () + | CompiledTypeRepr.ILAsmNamed _ -> + if tcref.CompiledRepresentationForNamedType.FullName = g.attrib_AutoOpenAttribute.TypeRef.FullName then + warning(Error(FSComp.SR.chkAutoOpenAttributeInTypeAbbrev(), tycon.Id.idRange)) + | _ -> () + if not firstPass then let ftyvs = freeInTypeLeftToRight g false ty let typars = tycon.Typars m @@ -4278,7 +4289,7 @@ module TcDeclarations = if not (isNil members) && tcref.IsTypeAbbrev then errorR(Error(FSComp.SR.tcTypeAbbreviationsCannotHaveAugmentations(), tyDeclRange)) - + let (SynComponentInfo (attributes, _, _, _, _, _, _, _)) = synTyconInfo if not (List.isEmpty attributes) && (declKind = ExtrinsicExtensionBinding || declKind = IntrinsicExtensionBinding) then let attributeRange = (List.head attributes).Range diff --git a/src/Compiler/Driver/CompilerConfig.fs b/src/Compiler/Driver/CompilerConfig.fs index aee2d46e095..6a534a174b1 100644 --- a/src/Compiler/Driver/CompilerConfig.fs +++ b/src/Compiler/Driver/CompilerConfig.fs @@ -393,6 +393,18 @@ type ParallelReferenceResolution = | On | Off +[] +type TypeCheckingMode = + | Sequential + | Graph + +[] +type TypeCheckingConfig = + { + Mode: TypeCheckingMode + DumpGraph: bool + } + [] type TcConfigBuilder = { @@ -507,7 +519,6 @@ type TcConfigBuilder = mutable emitTailcalls: bool mutable deterministic: bool mutable concurrentBuild: bool - mutable parallelCheckingWithSignatureFiles: bool mutable parallelIlxGen: bool mutable emitMetadataAssembly: MetadataAssemblyGeneration mutable preferredUiLang: string option @@ -591,6 +602,8 @@ type TcConfigBuilder = mutable parallelReferenceResolution: ParallelReferenceResolution mutable captureIdentifiersWhenParsing: bool + + mutable typeCheckingConfig: TypeCheckingConfig } // Directories to start probing in @@ -737,7 +750,6 @@ type TcConfigBuilder = emitTailcalls = true deterministic = false concurrentBuild = true - parallelCheckingWithSignatureFiles = FSharpExperimentalFeaturesEnabledAutomatically parallelIlxGen = FSharpExperimentalFeaturesEnabledAutomatically emitMetadataAssembly = MetadataAssemblyGeneration.None preferredUiLang = None @@ -782,6 +794,15 @@ type TcConfigBuilder = exiter = QuitProcessExiter parallelReferenceResolution = ParallelReferenceResolution.Off captureIdentifiersWhenParsing = false + typeCheckingConfig = + { + TypeCheckingConfig.Mode = + if FSharpExperimentalFeaturesEnabledAutomatically then + TypeCheckingMode.Graph + else + TypeCheckingMode.Sequential + DumpGraph = false + } } member tcConfigB.FxResolver = @@ -1286,7 +1307,6 @@ type TcConfig private (data: TcConfigBuilder, validate: bool) = member _.emitTailcalls = data.emitTailcalls member _.deterministic = data.deterministic member _.concurrentBuild = data.concurrentBuild - member _.parallelCheckingWithSignatureFiles = data.parallelCheckingWithSignatureFiles member _.parallelIlxGen = data.parallelIlxGen member _.emitMetadataAssembly = data.emitMetadataAssembly member _.pathMap = data.pathMap @@ -1322,6 +1342,7 @@ type TcConfig private (data: TcConfigBuilder, validate: bool) = member _.exiter = data.exiter member _.parallelReferenceResolution = data.parallelReferenceResolution member _.captureIdentifiersWhenParsing = data.captureIdentifiersWhenParsing + member _.typeCheckingConfig = data.typeCheckingConfig static member Create(builder, validate) = use _ = UseBuildPhase BuildPhase.Parameter diff --git a/src/Compiler/Driver/CompilerConfig.fsi b/src/Compiler/Driver/CompilerConfig.fsi index 89f2cf81537..04b87e3e428 100644 --- a/src/Compiler/Driver/CompilerConfig.fsi +++ b/src/Compiler/Driver/CompilerConfig.fsi @@ -203,6 +203,24 @@ type ParallelReferenceResolution = | On | Off +/// Determines the algorithm used for type-checking. +[] +type TypeCheckingMode = + /// Default mode where all source files are processed sequentially in compilation order. + | Sequential + /// Parallel type-checking that uses automated file-to-file dependency detection to construct a file graph processed in parallel. + | Graph + +/// Some of the information dedicated to type-checking. +[] +type TypeCheckingConfig = + { + Mode: TypeCheckingMode + /// When using TypeCheckingMode.Graph, this flag determines whether the + /// resolved file graph should be serialised as a Mermaid diagram into a file next to the output dll. + DumpGraph: bool + } + [] type TcConfigBuilder = { @@ -412,8 +430,6 @@ type TcConfigBuilder = mutable concurrentBuild: bool - mutable parallelCheckingWithSignatureFiles: bool - mutable parallelIlxGen: bool mutable emitMetadataAssembly: MetadataAssemblyGeneration @@ -495,6 +511,8 @@ type TcConfigBuilder = mutable parallelReferenceResolution: ParallelReferenceResolution mutable captureIdentifiersWhenParsing: bool + + mutable typeCheckingConfig: TypeCheckingConfig } static member CreateNew: @@ -738,8 +756,6 @@ type TcConfig = member concurrentBuild: bool - member parallelCheckingWithSignatureFiles: bool - member parallelIlxGen: bool member emitMetadataAssembly: MetadataAssemblyGeneration @@ -866,6 +882,8 @@ type TcConfig = member captureIdentifiersWhenParsing: bool + member typeCheckingConfig: TypeCheckingConfig + /// Represents a computation to return a TcConfig. Normally this is just a constant immutable TcConfig, /// but for F# Interactive it may be based on an underlying mutable TcConfigBuilder. [] diff --git a/src/Compiler/Driver/CompilerOptions.fs b/src/Compiler/Driver/CompilerOptions.fs index 0c48513b6c5..f74c049d3d7 100644 --- a/src/Compiler/Driver/CompilerOptions.fs +++ b/src/Compiler/Driver/CompilerOptions.fs @@ -1372,8 +1372,17 @@ let testFlag tcConfigB = | "ShowLoadedAssemblies" -> tcConfigB.showLoadedAssemblies <- true | "ContinueAfterParseFailure" -> tcConfigB.continueAfterParseFailure <- true | "ParallelOff" -> tcConfigB.concurrentBuild <- false - | "ParallelCheckingWithSignatureFilesOn" -> tcConfigB.parallelCheckingWithSignatureFiles <- true | "ParallelIlxGen" -> tcConfigB.parallelIlxGen <- true + | "GraphBasedChecking" -> + tcConfigB.typeCheckingConfig <- + { tcConfigB.typeCheckingConfig with + Mode = TypeCheckingMode.Graph + } + | "DumpCheckingGraph" -> + tcConfigB.typeCheckingConfig <- + { tcConfigB.typeCheckingConfig with + DumpGraph = true + } #if DEBUG | "ShowParserStackOnParseError" -> showParserStackOnParseError <- true #endif diff --git a/src/Compiler/Driver/GraphChecking/Continuation.fs b/src/Compiler/Driver/GraphChecking/Continuation.fs new file mode 100644 index 00000000000..c7a01d59374 --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/Continuation.fs @@ -0,0 +1,15 @@ +[] +module internal Continuation + +let rec sequence<'T, 'TReturn> (recursions: (('T -> 'TReturn) -> 'TReturn) list) (finalContinuation: 'T list -> 'TReturn) : 'TReturn = + match recursions with + | [] -> finalContinuation [] + | andThenInner :: andThenInners -> + fun (results: 'T list) -> + fun (result: 'T) -> result :: results |> finalContinuation + |> andThenInner + |> sequence andThenInners + +let concatenate<'T, 'TReturn> (recursions: (('T list -> 'TReturn) -> 'TReturn) list) (finalContinuation: 'T list -> 'TReturn) : 'TReturn = + let ultimateContinuation = List.concat >> finalContinuation + sequence recursions ultimateContinuation diff --git a/src/Compiler/Driver/GraphChecking/Continuation.fsi b/src/Compiler/Driver/GraphChecking/Continuation.fsi new file mode 100644 index 00000000000..18a626e7bc1 --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/Continuation.fsi @@ -0,0 +1,23 @@ +[] +module internal Continuation + +/// This function sequences computations that have been expressed in continuation-passing style. +/// Concretely, when 'T is `int` as an example, can be expressed in continuation-passing style as a function, +/// taking as its input another function that is "how to proceed with a computation given the value of the integer", +/// and returning "the result of that computation". +/// That is, an integer is equivalently represented as a generic function (howToProceed : int -> 'TReturn) -> 'TReturn, +/// and the effect of the function corresponding to the integer 3 is simply to apply the input `howToProceed` to the value 3. +/// +/// The motivation for Continuation.sequence is most easily understood when it is viewed without its second argument: +/// it is a higher-order function that takes "a list of 'T expressed in continuation-passing style", and returns "a 'T list expressed in continuation-passing style". +/// The resulting "continuation-passing 'T list" operates by chaining the input 'Ts together, and finally returning the result of continuing the computation after first sequencing the inputs. +/// +/// Crucially, this technique can be used to enable unbounded recursion: +/// it constructs and invokes closures representing intermediate stages of the sequenced computation on the heap, rather than consuming space on the (more constrained) stack. +val sequence<'T, 'TReturn> : + recursions: (('T -> 'TReturn) -> 'TReturn) list -> finalContinuation: ('T list -> 'TReturn) -> 'TReturn + +/// Auxiliary function for `Continuation.sequence` that assumes the recursions return a 'T list. +/// In the final continuation the `'T list list` will first be concatenated into one list, before being passed to the (final) `continuation`. +val concatenate<'T, 'TReturn> : + recursions: (('T list -> 'TReturn) -> 'TReturn) list -> finalContinuation: ('T list -> 'TReturn) -> 'TReturn diff --git a/src/Compiler/Driver/GraphChecking/DependencyResolution.fs b/src/Compiler/Driver/GraphChecking/DependencyResolution.fs new file mode 100644 index 00000000000..78e312e5fee --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/DependencyResolution.fs @@ -0,0 +1,251 @@ +module internal FSharp.Compiler.GraphChecking.DependencyResolution + +open FSharp.Compiler.IO +open FSharp.Compiler.Syntax + +/// Find a path in the Trie. +/// This function could be cached in future if performance is an issue. +let queryTrie (trie: TrieNode) (path: LongIdentifier) : QueryTrieNodeResult = + let rec visit (currentNode: TrieNode) (path: LongIdentifier) = + match path with + | [] -> failwith "path should not be empty" + | [ lastNodeFromPath ] -> + match currentNode.Children.TryGetValue(lastNodeFromPath) with + | false, _ -> QueryTrieNodeResult.NodeDoesNotExist + | true, childNode -> + if Set.isEmpty childNode.Files then + QueryTrieNodeResult.NodeDoesNotExposeData + else + QueryTrieNodeResult.NodeExposesData(childNode.Files) + | currentPath :: restPath -> + match currentNode.Children.TryGetValue(currentPath) with + | false, _ -> QueryTrieNodeResult.NodeDoesNotExist + | true, childNode -> visit childNode restPath + + visit trie path + +let queryTrieMemoized (trie: TrieNode) : QueryTrie = + Internal.Utilities.Library.Tables.memoize (queryTrie trie) + +/// Process namespace declaration. +let processNamespaceDeclaration (queryTrie: QueryTrie) (path: LongIdentifier) (state: FileContentQueryState) : FileContentQueryState = + let queryResult = queryTrie path + + match queryResult with + | QueryTrieNodeResult.NodeDoesNotExist -> state + | QueryTrieNodeResult.NodeDoesNotExposeData -> state.AddOwnNamespace path + | QueryTrieNodeResult.NodeExposesData files -> state.AddOwnNamespace(path, files) + +/// Process an "open" statement. +/// The statement could link to files and/or should be tracked as an open namespace. +let processOpenPath (queryTrie: QueryTrie) (path: LongIdentifier) (state: FileContentQueryState) : FileContentQueryState = + let queryResult = queryTrie path + + match queryResult with + | QueryTrieNodeResult.NodeDoesNotExist -> state + | QueryTrieNodeResult.NodeDoesNotExposeData -> state.AddOpenNamespace path + | QueryTrieNodeResult.NodeExposesData files -> state.AddOpenNamespace(path, files) + +/// Process an identifier. +let processIdentifier (queryTrie: QueryTrie) (path: LongIdentifier) (state: FileContentQueryState) : FileContentQueryState = + let queryResult = queryTrie path + + match queryResult with + | QueryTrieNodeResult.NodeDoesNotExist -> state + | QueryTrieNodeResult.NodeDoesNotExposeData -> + // This can occur when you have a file that uses a known namespace (for example namespace System). + // When any other code uses that System namespace it won't find anything in the user code. + state + | QueryTrieNodeResult.NodeExposesData files -> state.AddDependencies files + +/// Typically used to fold FileContentEntry items over a FileContentQueryState +let rec processStateEntry (queryTrie: QueryTrie) (state: FileContentQueryState) (entry: FileContentEntry) : FileContentQueryState = + match entry with + | FileContentEntry.TopLevelNamespace (topLevelPath, content) -> + let state = + match topLevelPath with + | [] -> state + | _ -> processNamespaceDeclaration queryTrie topLevelPath state + + List.fold (processStateEntry queryTrie) state content + + | FileContentEntry.OpenStatement path -> + // An open statement can directly reference file or be a partial open statement + // Both cases need to be processed. + let stateAfterFullOpenPath = processOpenPath queryTrie path state + + // Any existing open statement could be extended with the current path (if that node where to exists in the trie) + // The extended path could add a new link (in case of a module or namespace with types) + // It might also not add anything at all (in case it the extended path is still a partial one) + (stateAfterFullOpenPath, state.OpenNamespaces) + ||> Set.fold (fun acc openNS -> processOpenPath queryTrie [ yield! openNS; yield! path ] acc) + + | FileContentEntry.PrefixedIdentifier path -> + match path with + | [] -> + // should not be possible though + state + | _ -> + // path could consist of multiple segments + (state, [| 1 .. path.Length |]) + ||> Array.fold (fun state takeParts -> + let path = List.take takeParts path + // process the name was if it were a FQN + let stateAfterFullIdentifier = processIdentifier queryTrie path state + + // Process the name in combination with the existing open namespaces + (stateAfterFullIdentifier, state.OpenNamespaces) + ||> Set.fold (fun acc openNS -> processIdentifier queryTrie [ yield! openNS; yield! path ] acc)) + + | FileContentEntry.NestedModule (nestedContent = nestedContent) -> + // We don't want our current state to be affect by any open statements in the nested module + let nestedState = List.fold (processStateEntry queryTrie) state nestedContent + // Afterward we are only interested in the found dependencies in the nested module + let foundDependencies = + Set.union state.FoundDependencies nestedState.FoundDependencies + + { state with + FoundDependencies = foundDependencies + } + +/// Return all files contained in the trie. +let filesInTrie (node: TrieNode) : Set = + let rec collect (node: TrieNode) (continuation: FileIndex list -> FileIndex list) : FileIndex list = + let continuations: ((FileIndex list -> FileIndex list) -> FileIndex list) list = + [ + for node in node.Children.Values do + yield collect node + ] + + let finalContinuation indexes = + continuation [ yield! node.Files; yield! List.concat indexes ] + + Continuation.sequence continuations finalContinuation + + Set.ofList (collect node id) + +/// +/// For a given file's content, collect all missing ("ghost") file dependencies that the core resolution algorithm didn't return, +/// but are required to satisfy the type-checker. +/// +/// +/// Namespaces, contrary to modules, can and often are defined in multiple files. +/// When a [partial] namespace is opened, but unused, we want to avoid having to link to all the files that define it. +/// This is why, when: +/// - a file references a namespace, but does not explicitly reference anything within it, and +/// - the namespace does not contain any children that can be referenced implicitly (eg. by type inference), +/// then the main resolution algorithm does not create a link to any file defining the namespace. +/// However, to satisfy the type-checker, the namespace must be resolved. +/// This function returns a list of extra dependencies that makes sure that any such namespaces can be resolved (if it exists). +/// For each unused open namespace we return one or more file links that define it. +/// +let collectGhostDependencies (fileIndex: FileIndex) (trie: TrieNode) (queryTrie: QueryTrie) (result: FileContentQueryState) = + // Go over all open namespaces, and assert all those links eventually went anywhere + Set.toArray result.OpenedNamespaces + |> Array.collect (fun path -> + match queryTrie path with + | QueryTrieNodeResult.NodeExposesData _ + | QueryTrieNodeResult.NodeDoesNotExist -> Array.empty + | QueryTrieNodeResult.NodeDoesNotExposeData -> + // At this point we are following up if an open namespace really lead nowhere. + let node = + let rec find (node: TrieNode) (path: LongIdentifier) = + match path with + | [] -> node + | head :: tail -> find node.Children[head] tail + + find trie path + + let filesDefiningNamespace = + filesInTrie node |> Set.filter (fun idx -> idx < fileIndex) + + let dependenciesDefiningNamespace = + Set.intersect result.FoundDependencies filesDefiningNamespace + + [| + if Set.isEmpty dependenciesDefiningNamespace then + // There is no existing dependency defining the namespace, + // so we need to add one. + if Set.isEmpty filesDefiningNamespace then + // No file defines inferrable symbols for this namespace, but the namespace might exist. + // Because we don't track what files define a namespace without any relevant content, + // the only way to ensure the namespace is in scope is to add a link to every preceding file. + yield! [| 0 .. (fileIndex - 1) |] + else + // At least one file defines the namespace - add a dependency to the first (top) one. + yield Seq.head filesDefiningNamespace + |]) + +let mkGraph (compilingFSharpCore: bool) (filePairs: FilePairMap) (files: FileInProject array) : Graph = + // We know that implementation files backed by signatures cannot be depended upon. + // Do not include them when building the Trie. + let trieInput = + files + |> Array.choose (fun f -> + match f.ParsedInput with + | ParsedInput.ImplFile _ when filePairs.HasSignature f.Idx -> None + | ParsedInput.ImplFile _ + | ParsedInput.SigFile _ -> Some f) + + let trie = TrieMapping.mkTrie trieInput + let queryTrie: QueryTrie = queryTrieMemoized trie + + let fileContents = files |> Array.Parallel.map FileContentMapping.mkFileContent + + let findDependencies (file: FileInProject) : FileIndex array = + let fileContent = fileContents[file.Idx] + + let knownFiles = [ 0 .. (file.Idx - 1) ] |> set + // File depends on all files above it that define accessible symbols at the root level (global namespace). + let filesFromRoot = trie.Files |> Set.filter (fun rootIdx -> rootIdx < file.Idx) + // Start by listing root-level dependencies. + let initialDepsResult = + (FileContentQueryState.Create file.Idx knownFiles filesFromRoot), fileContent + // Sequentially process all relevant entries of the file and keep updating the state and set of dependencies. + let depsResult = + initialDepsResult + // Seq is faster than List in this case. + ||> Seq.fold (processStateEntry queryTrie) + + // Add missing links for cases where an unused open namespace did not create a link. + let ghostDependencies = collectGhostDependencies file.Idx trie queryTrie depsResult + + // Add a link from implementation files to their signature files. + let signatureDependency = + match filePairs.TryGetSignatureIndex file.Idx with + | None -> Array.empty + | Some sigIdx -> Array.singleton sigIdx + + // Files in FSharp.Core have an implicit dependency on `prim-types-prelude.fsi` - add it. + let fsharpCoreImplicitDependencies = + let filename = "prim-types-prelude.fsi" + + let implicitDepIdx = + files + |> Array.tryFindIndex (fun f -> FileSystemUtils.fileNameOfPath f.FileName = filename) + + [| + if compilingFSharpCore then + match implicitDepIdx with + | Some idx -> + if file.Idx > idx then + yield idx + | None -> + exn $"Expected to find file '{filename}' during compilation of FSharp.Core, but it was not found." + |> raise + |] + + let allDependencies = + [| + yield! depsResult.FoundDependencies + yield! ghostDependencies + yield! signatureDependency + yield! fsharpCoreImplicitDependencies + |] + |> Array.distinct + + allDependencies + + files + |> Array.Parallel.map (fun file -> file.Idx, findDependencies file) + |> readOnlyDict diff --git a/src/Compiler/Driver/GraphChecking/DependencyResolution.fsi b/src/Compiler/Driver/GraphChecking/DependencyResolution.fsi new file mode 100644 index 00000000000..8ffdf4a58c9 --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/DependencyResolution.fsi @@ -0,0 +1,33 @@ +/// Logic for constructing a file dependency graph for the purposes of parallel type-checking. +module internal FSharp.Compiler.GraphChecking.DependencyResolution + +/// Query a TrieNode to find a certain path. +/// This code is only used directly in unit tests. +val queryTrie: trie: TrieNode -> path: LongIdentifier -> QueryTrieNodeResult + +/// Process an open path (found in the ParsedInput) with a given FileContentQueryState. +/// This code is only used directly in unit tests. +val processOpenPath: + queryTrie: QueryTrie -> path: LongIdentifier -> state: FileContentQueryState -> FileContentQueryState + +/// +/// Construct an approximate* dependency graph for files within a project, based on their ASTs. +/// +/// "Are we compiling FSharp.Core?" - used to add extra dependencies for FSharp.Core that are not otherwise detectable. +/// Maps the index of a signature file with the index of its implementation counterpart and vice versa. +/// The files inside a project. +/// A dictionary of FileIndex (alias for int) +/// +/// +/// *The constructed graph is a supergraph of the "necessary" file dependency graph, +/// ie. if file A is necessary to type-check file B, the resulting graph will contain edge B -> A. +/// The opposite is not true, ie. if file A is not necessary to type-check file B, the resulting graph *might* contain edge B -> A. +/// This is because the graph resolution algorithm has limited capability as it is based on ASTs alone. +/// +/// +/// The file order is used by the resolution algorithm to remove edges not allowed by the language. +/// Ie. if file B preceeds file A, the resulting graph will not contain edge B -> A. +/// Hence this function cannot, as it stands, be used to help create a "reasonable" file ordering for an unordered set of files. +/// +/// +val mkGraph: compilingFSharpCore: bool -> filePairs: FilePairMap -> files: FileInProject array -> Graph diff --git a/src/Compiler/Driver/GraphChecking/Docs.md b/src/Compiler/Driver/GraphChecking/Docs.md new file mode 100644 index 00000000000..e38d251e8c0 --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/Docs.md @@ -0,0 +1,332 @@ +## Parallel type-checking in FSharp +This document describes the idea and implementation details for parallel type-checking of independent files in the F# compiler. + +Performance of F# compilation and code analysis is one of the concerns for big codebases. +One way to speed it up was originally described in https://github.com/dotnet/fsharp/discussions/11634 by @kerams . +That is going to be the main topic of this page. +But before we dive into the details, let's first discuss how the things work at the moment. + +## Context and the current state of the compiler + +### Current state of type-checking + +One of the main phases of compilation is type-checking. Depending on the project in question, it can take as much as 50% of the total compilation time. +Currently, by default all files in a project are type-checked in sequence, one-by-one, leading to increased compilation wall-clock time. + +The same is true about code analysis (used by the IDEs), but to an even higher degree - since code analysis skips some of the expensive compilation phases, type-checking represents a bigger fraction of the total wall-clock time, hence any improvements in this area can lead to more drastic total time reduction. + +### Maintaining type-checking state + +There is a lot of information associated with type-checking individual files and groups of them. + +Currently, due to the (mostly) sequential nature of the processing, it is sufficient to maintain a single instance of such information for the whole project. +This instance is incrementally built on as more and more files have been processed. + +### Recent addition - "Parallel type checking for impl files with backing sig files" + +A recent [change](https://github.com/dotnet/fsharp/pull/13737) introduced in the compiler (not language service) added a level of parallelism in type-checking (behind an experimental feature flag). +It allows for parallel type-checking of implementation files backed by signature files. +Such files by definition cannot be depended upon by any other files w.r.t. type-checking, since all the necessary information is exposed by the corresponding `.fsi` files. + +The new feature, when enabled, allows partial parallelisation of type-checking as follows: +1. All `.fsi` files and `.fs` files without backing `.fsi` files are type-checked in sequence, as before. +2. Then all `.fs` files with backing `.fsi` files are type-checked in parallel. + +For a project that uses `.fsi` files throughout, such as the `FSharp.Compiler.Service` project, this presents a major speedup. + +Some data points: +- [Fantomas](https://github.com/fsprojects/fantomas) solution: total build time 17.49s -> 14.28s - [link](https://github.com/dotnet/fsharp/pull/13737#issuecomment-1223637818) +- F# codebase build time: 112s -> 92s - [link](https://github.com/dotnet/fsharp/pull/13737#issuecomment-1223386853) + +#### Enabling the feature +The feature is opt-in and can be enabled in the compiler via a CLI arg & MSBuild property. + +## The importance of using Server GC for parallel work + +By default .NET processes use Workstation GC, which is single-threaded. What this means is it can become a bottleneck for highly-parallel operations, due to increased GC pressure and the cost of GC pauses being multiplied by the number of threads waiting. +That is why when increasing parallelisation of the compiler and the compiler service it is important to note the GC mode being used and consider enabling Server GC. +This is no different for parallel type-checking - any performance tests of the feature should be done using Server GC. + +Below is an example showing the difference it can make for a parallel workflow. + +### Parallel projects analysis results for a synthetic solution +| GC Mode | Processing Mode | Time | +|-------------|-----------------|-----------------------------------------| +| Workstation | Sequential | 16005ms | +| Workstation | Parallel | 10849ms | +| Server | Sequential | 14594ms (-9% vs Workstation Sequential) | +| Server | Parallel | 2659ms (-75% vs Workstation Parallel) | + +For more details see https://github.com/dotnet/fsharp/pull/13521 + +## Parallel type-checking of independent files + +The main idea we would like to present here for speeding up type-checking is quite simple: +- process files using a dependency graph instead of doing so in a sequential order +- based on AST information, quickly detect what files definitely do not depend on each other, trim the dependency graph and increase parallelisation possible +- implement delta-based type-checking that allows building a 'fresh' TcState copy from a list of delta-based results. + +Below is some quasi-theoretical background on type-checking in general. + +### Background +Files in an F# project are ordered and processed from the top (first) to the bottom (last) file. +The compiler ensures that no information, including type information, flows upwards. + +Consider the following list of files in a project: +```fsharp +A.fs +B.fs +C.fs +D.fs +``` +By default, during compilation they are type-checked in the order of appearance: `[A.fs, B.fs, C.fs, D.fs]` + +Let's define `allowed dependency` as follows: +> If the contents of 'X.fs' _can_, based on its position in the project hierarchy, influence the type-checking process of 'Y.fs', then 'X.fs' -> 'Y.fs' is an _allowed dependency_ + +The _allowed dependencies graph_ for our sample project looks as follows: +``` +A.fs -> [] +B.fs -> [A.fs] +C.fs -> [B.fs; A.fs] +D.fs -> [C.fs; B.fs; A.fs] +``` + +Sequential type-checking of files in the appearance order guarantees that when processing a given file, all of its `allowed dependencies` w.r.t. type-checking have already been type-checked and their type information is available. + +### Necessary dependencies + +Let's define a `necessary dependency` too: +> File 'X.fs' _necessarily depends_ on file `Y.fs` for type-checking purposes, if the lack of type-checking information from 'Y.fs' would influence the results of type-checking 'X.fs' + +And finally a `dependency graph` as follows: +> A _dependency graph_ is any graph that is a subset of the `allowed dependencies` graph and a superset of the `necessary dependencies` graph + +A few slightly imprecise/vague statements about all the graphs: +1. Any dependency graph is a directed, acycling graph (DAG). +1. The _Necessary dependencies_ graph is a subgraph of the _allowed dependencies_ graph. +2. If there is no path between 'B.fs' and 'C.fs' in the _necessary dependencies_ graph, they can in principle be type-checked in parallel (as long as there is a way to maintain more than one instance of type-checking information). +3. Type-checking _must_ process files in an order that is compatible with the topological order in the _necessary dependencies_ graph. +4. If using a dependency graph as an ordering mechanism for (parallel) type-checking, the closer it is to the _necessary dependencies_ graph, the higher parallelism is possible. +5. Type-checking files in appearance order is equivalent to using the `allowed dependencies` graph for ordering. +6. Removing an edge from the _dependency_ graph used _can_ increase (but not decrease) the level of parallelism possible and improve wall-clock time of parallel type-checking. + +Let's look at point `6.` in more detail. + +### The impact of reducing the dependency graph on type-checking parallelisation and wall-clock time. + +Let us make a few definitions and simplifications: +1. Time it takes to type-check file f = `T(f)` +2. Time it takes to type-check files f1...fn in parallel = `T(f1+...fn)` +3. Time it takes to type-check a file f and all its dependencies = `D(f)` +4. Time it takes to type-check the graph G = `D(G)` +5. Type-checking is performed on a machine with infinite number of parallel processors. +6. There is no slowdowns due to parallel processing, ie. T(f1+...+fn) = max(T(f1),...,T(fn)) + +With the above it can be observed that: +``` +D(G) = max(D(f)), for any file 'f' + +and + +D(f) = max(D(n) + T(f)) for n = any necessary dependency of 'f' +``` +In other words wall-clock time for type-checking using a given dependency graph is equal to the "longest" path in the graph. + +For the _allowed dependencies graph_ the following holds: +``` +D(f) = T(f) + sum(T(g)), for all files 'g' above file 'f' +``` +In other words, the longest path's length = the sum of times to type-check all individual files. + +Therefore the change that parallel type-checking brings is the replacement of the _allowed dependencies_ graph as currently used with a reduced graph that is: +- much more similar to the _necessary dependencies_ graph, +- providing a smaller value of `D(G)`. + +## A way to reduce the dependency graph used + +For all practical purposes the only way to calculate the _necessary dependencies_ graph fully accurately is to perform the type-checking process, which misses the point of this exercise. + +However, there exist cheaper solutions that reduce the initial graph significantly with low computational cost, providing a good trade-off. + +As noted in https://github.com/dotnet/fsharp/discussions/11634 , scanning the ASTs can provide a lot of information that helps narrow down the set of types, modules/namespaces and files that a given file _might_ depend on. + +This is the approach used in this solution. + +The dependency detection algorithm can be summarised as follows: +1. Process each file's AST in parallel and extract the following information: + 1. Top-level modules and namespaces. Consider `AutoOpens`. + 2. Opens, partial module/namespace references. Consider module abbreviations, partial opens etc. + 3. Prefixed identifiers (for example, `System.Console` in `System.Console.Write(""")`). + 4. Nested modules. +2. Build a single [Trie](https://en.wikipedia.org/wiki/Trie) composed of all the found namespaces and (nested) modules. + Inside each node, we keep track of what file indices contributed to its existence. + Note that a `Trie` has a special `Root` node. This node can be populated by top level `AutoModule` modules or `global` namespaces. + Note that we if a file is backed by a signature, only the signature will contribute nodes to the Trie. +3. For each file, in parallel: + 1. Process all file content entries found in 2. For each file content entry, query the global Trie to see if the reference points to any file. + Given a list of files found in the Trie nodes, add links to those of them that precede the current file. + - Files found in the `Root` node are always considered a match, as they represent top-level, always available symbols. + - When a file is backed by a signature, automatically add a link to its the signature file. + +### Edge-case 1. - `[]` + +Modules with `[]` are in a way 'transparent', meaning that all the types/nested modules inside them are surfaced as if they were on a level above. + +The dependency algorithm takes this into account. +If a top level module (eg. `module Utilities`) contains an `[]` attribute, its contents is automatically available. +To take that into account, any file containing top-level AutoOpens is added to the `Root` node. + +The main problem with that is that `System.AutoOpenAttribute` could be aliased and hide behind a different name. +Therefore it's not easy to see whether the attribute is being used based purely on its AST. + +There are ways to evaluate this, which involve scanning all module abbreviations in the project and in any referenced dlls. +However, currently the algorithm uses a shortcut: it checks whether the attribute type name is on a hardcoded list of "suspicious" names. This is not fully reliable, as an arbitrary type alias, eg. `type X = System.AutoOpenAttribute` will not be recognised correctly. + +To overcome this limitation, we decided to discourage users from creating such aliases. +We now emit a warning when an alias for `AutoOpenAttribute` is found. + +Note that we do not process nested `[]` modules - this is because as soon as the top-level module is considered 'potentially used', the whole file is marked as a dependency. +An example explaining this: +```fsharp +namespace A + +// If the algorithm determines module A.B is 'potentially used' in another file, there is no need to inspect its contents. +module B = + + module C = + + // In particular there is no need to check this AutoOpen attribute + [] + module D = + () +``` + +### Edge-case 2. - module abbreviations +Initially there was some concern about the impact of module abbreviations for dependency tracking algorithm. +However module abbreviations do not actually require any special handling in the current algorithm. + +Consider the following example: +``` +// F1.fs +module A +module B = let x = 1 + +// F2.fs +module C +open A +module D = B +``` +Here, the line `module D = B` generates the following link: `F2.fs -> F1.fs`. +Any files that might make use of the abbreviation require a dependency onto `F2.fs`, which in turn creates an indirect dependency onto `F1.fs`. +Therefore no special handling is required for this scenario. + +### Optimising the graph for files with shared namespaces +One common namespace setup in an F# project involves sharing a namespace across multiple files. +The problem this creates is that normally every `open` statement against that namespace would create a link to all the files defining it. + +To help reduce the dependency graph, we detect scenarios in which the namespace itself does not contain any type definitions. +In such cases, the Trie node referring to that namespace does not link to any files directly (but its subnodes might). + +Consider the following: + +`A.fs` +```fsharp +module Foo.Bar.A + +let a = 0 +``` + +`B.fs` +```fsharp +module Foo.Bar.B + +let b = 1 +``` + +Such a setup creates the following Trie contents: + +```mermaid +graph TB +R("Root: []") +Foo("namespace Foo: []") +Bar("namespace Bar: []") +A("module A: [ A.fs ]") +B("module B: [ B.fs ]") +R --- Foo +Foo --- Bar +Bar --- A +Bar --- B +``` +Note that the `Foo` and `Bar` namespaces do not link to any files. + +However, this can lead to type-checking errors. +Consider the following: + +`X.fs` +```fsharp +namespace X +``` + +`Y.fs` +```fsharp +namespace Y + +open X // This open statement is unnecessary, however it is valid F# code. +``` + +which leads to the following Trie: + +```mermaid +graph TB +R("Root: []") +X("namespace X: []") +Y("namespace Y: []") +R --- X +R --- Y +``` + +To satisfy the type-checker when unused `open` statements are used, we need to make sure that at least one defining `namespace X` is a dependency of `Y.fs`. +We call such dependencies added outside of the main dependency resolution algorithm `ghost dependencies`. + +### Performance +There are two main factors w.r.t. performance of the graph-based type-checking: +1. The level of parallelisation allowed by the resolved dependency graph. +2. The overhead of creating the dependency graph and graph-based processing of the graph. +At minimum, to make this feature useful, any overhead (2.) cost should in the vast majority of use cases be significantly lower than the speedup generated by 1. + +Initial timings showed that the graph-based type-checking was significantly faster than sequential type-checking and faster than the two-phase type-checking feature. +Projects that were tested included: +- `FSharp.Compiler.Service` +- `Fantomas.Core` +- `FSharp.Compiler.ComponentTests` + +Below are initial results for an early version of the algorithm: + +``` +BenchmarkDotNet=v0.13.2, OS=Windows 11 (10.0.22621.1105) +12th Gen Intel Core i7-12700K, 1 CPU, 20 logical and 12 physical cores +.NET SDK=7.0.102 + [Host] : .NET 7.0.2 (7.0.222.60605), X64 RyuJIT AVX2 DEBUG + DefaultJob : .NET 7.0.2 (7.0.222.60605), X64 RyuJIT AVX2 + + +| Method | GraphTypeChecking | Mean | Error | StdDev | Gen0 | Gen1 | Gen2 | Allocated | +|---------------------- |------------------ |--------:|--------:|--------:|------------:|-----------:|----------:|----------:| +| FSharpPlus | False | 32.22 s | 0.615 s | 0.708 s | 202000.0000 | 10000.0000 | 4000.0000 | 51.38 GB | +| FSharpCompilerService | False | 18.59 s | 0.192 s | 0.180 s | 10000.0000 | 4000.0000 | 2000.0000 | 21.03 GB | +| FSharpPlus | True | 30.86 s | 0.352 s | 0.275 s | 196000.0000 | 10000.0000 | 3000.0000 | 51.4 GB | +| FSharpCompilerService | True | 10.88 s | 0.154 s | 0.144 s | 10000.0000 | 4000.0000 | 2000.0000 | 21.32 GB | +``` + +## The problem of maintaining multiple instances of type-checking information + +The parallel type-checking idea generates a problem that needs to be solved. +Instead of one instance of the type-checking information, we now have to maintain multiple instances - one for each node in the graph. +We solve it in the following way: +1. Each file's type-checking results in a 'delta' function `'State -> 'State` which adds information to the state. +2. When type-checking a new file, its input state is built from scratch by evaluating delta functions of all its dependencies. + +### Ordering of diagnostics/errors + +Any changes in scheduling of work that can produce diagnostics can change the order in which diagnostics appear to the end user. To retain existing ordering of diagnostics, we use a mechanism where each work item first uses a dedicated logger, and at the end individual loggers are sequentially replayed into the single logger, in the desired order. This mechanism is used in a few places in the compiler already. diff --git a/src/Compiler/Driver/GraphChecking/FileContentMapping.fs b/src/Compiler/Driver/GraphChecking/FileContentMapping.fs new file mode 100644 index 00000000000..12dd8b9c004 --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/FileContentMapping.fs @@ -0,0 +1,649 @@ +module internal rec FSharp.Compiler.GraphChecking.FileContentMapping + +open FSharp.Compiler.Syntax +open FSharp.Compiler.SyntaxTreeOps + +type Continuations = ((FileContentEntry list -> FileContentEntry list) -> FileContentEntry list) list + +/// Collect a list of 'U from option 'T via a mapping function. +let collectFromOption (mapping: 'T -> 'U list) (t: 'T option) : 'U list = List.collect mapping (Option.toList t) + +let longIdentToPath (skipLast: bool) (longId: LongIdent) : LongIdentifier = + if skipLast then + List.take (longId.Length - 1) longId + else + longId + |> List.map (fun ident -> ident.idText) + +let synLongIdentToPath (skipLast: bool) (synLongIdent: SynLongIdent) = + longIdentToPath skipLast synLongIdent.LongIdent + +let visitSynLongIdent (lid: SynLongIdent) : FileContentEntry list = visitLongIdent lid.LongIdent + +let visitLongIdent (lid: LongIdent) = + match lid with + | [] + | [ _ ] -> [] + | lid -> [ FileContentEntry.PrefixedIdentifier(longIdentToPath true lid) ] + +let visitLongIdentForModuleAbbrev (lid: LongIdent) = + match lid with + | [] -> [] + | lid -> [ FileContentEntry.PrefixedIdentifier(longIdentToPath false lid) ] + +let visitSynAttribute (a: SynAttribute) : FileContentEntry list = + [ yield! visitSynLongIdent a.TypeName; yield! visitSynExpr a.ArgExpr ] + +let visitSynAttributeList (attributes: SynAttributeList) : FileContentEntry list = + List.collect visitSynAttribute attributes.Attributes + +let visitSynAttributes (attributes: SynAttributes) : FileContentEntry list = + List.collect visitSynAttributeList attributes + +let visitSynModuleDecl (decl: SynModuleDecl) : FileContentEntry list = + [ + match decl with + | SynModuleDecl.Open(target = SynOpenDeclTarget.ModuleOrNamespace (longId, _)) -> + yield FileContentEntry.OpenStatement(synLongIdentToPath false longId) + | SynModuleDecl.Open(target = SynOpenDeclTarget.Type (typeName, _)) -> yield! visitSynType typeName + | SynModuleDecl.Attributes (attributes, _) -> yield! List.collect visitSynAttributeList attributes + | SynModuleDecl.Expr (expr, _) -> yield! visitSynExpr expr + | SynModuleDecl.NestedModule (moduleInfo = SynComponentInfo (longId = [ ident ]; attributes = attributes); decls = decls) -> + yield! visitSynAttributes attributes + yield FileContentEntry.NestedModule(ident.idText, List.collect visitSynModuleDecl decls) + | SynModuleDecl.NestedModule _ -> failwith "A nested module cannot have multiple identifiers" + | SynModuleDecl.Let (bindings = bindings) -> yield! List.collect visitBinding bindings + | SynModuleDecl.Types (typeDefns = typeDefns) -> yield! List.collect visitSynTypeDefn typeDefns + | SynModuleDecl.HashDirective _ -> () + | SynModuleDecl.ModuleAbbrev (longId = longId) -> yield! visitLongIdentForModuleAbbrev longId + | SynModuleDecl.NamespaceFragment _ -> () + | SynModuleDecl.Exception(exnDefn = SynExceptionDefn (exnRepr = SynExceptionDefnRepr (attributes = attributes + caseName = caseName + longId = longId) + members = members)) -> + yield! visitSynAttributes attributes + yield! visitSynUnionCase caseName + yield! collectFromOption visitLongIdent longId + yield! List.collect visitSynMemberDefn members + ] + +let visitSynModuleSigDecl (md: SynModuleSigDecl) = + [ + match md with + | SynModuleSigDecl.Open(target = SynOpenDeclTarget.ModuleOrNamespace (longId, _)) -> + yield FileContentEntry.OpenStatement(synLongIdentToPath false longId) + | SynModuleSigDecl.Open(target = SynOpenDeclTarget.Type (typeName, _)) -> yield! visitSynType typeName + | SynModuleSigDecl.NestedModule (moduleInfo = SynComponentInfo (longId = [ ident ]; attributes = attributes); moduleDecls = decls) -> + yield! visitSynAttributes attributes + yield FileContentEntry.NestedModule(ident.idText, List.collect visitSynModuleSigDecl decls) + | SynModuleSigDecl.NestedModule _ -> failwith "A nested module cannot have multiple identifiers" + | SynModuleSigDecl.ModuleAbbrev (longId = longId) -> yield! visitLongIdentForModuleAbbrev longId + | SynModuleSigDecl.Val (valSig, _) -> yield! visitSynValSig valSig + | SynModuleSigDecl.Types (types = types) -> yield! List.collect visitSynTypeDefnSig types + | SynModuleSigDecl.Exception(exnSig = SynExceptionSig (exnRepr = SynExceptionDefnRepr (attributes = attributes + caseName = caseName + longId = longId) + members = members)) -> + yield! visitSynAttributes attributes + yield! visitSynUnionCase caseName + yield! collectFromOption visitLongIdent longId + yield! List.collect visitSynMemberSig members + | SynModuleSigDecl.HashDirective _ + | SynModuleSigDecl.NamespaceFragment _ -> () + ] + +let visitSynUnionCase (SynUnionCase (attributes = attributes; caseType = caseType)) = + [ + yield! visitSynAttributes attributes + match caseType with + | SynUnionCaseKind.Fields cases -> yield! List.collect visitSynField cases + | SynUnionCaseKind.FullType (fullType = fullType) -> yield! visitSynType fullType + ] + +let visitSynEnumCase (SynEnumCase (attributes = attributes)) = visitSynAttributes attributes + +let visitSynTypeDefn + (SynTypeDefn (typeInfo = SynComponentInfo (attributes = attributes; typeParams = typeParams; constraints = constraints) + typeRepr = typeRepr + members = members)) + : FileContentEntry list = + [ + yield! visitSynAttributes attributes + yield! collectFromOption visitSynTyparDecls typeParams + yield! List.collect visitSynTypeConstraint constraints + match typeRepr with + | SynTypeDefnRepr.Simple (simpleRepr, _) -> + match simpleRepr with + | SynTypeDefnSimpleRepr.Union (unionCases = unionCases) -> yield! List.collect visitSynUnionCase unionCases + | SynTypeDefnSimpleRepr.Enum (cases = cases) -> yield! List.collect visitSynEnumCase cases + | SynTypeDefnSimpleRepr.Record (recordFields = recordFields) -> yield! List.collect visitSynField recordFields + // This is only used in the typed tree + // The parser doesn't construct this + | SynTypeDefnSimpleRepr.General _ + | SynTypeDefnSimpleRepr.LibraryOnlyILAssembly _ -> () + | SynTypeDefnSimpleRepr.TypeAbbrev (rhsType = rhsType) -> yield! visitSynType rhsType + | SynTypeDefnSimpleRepr.None _ + // This is only used in the typed tree + // The parser doesn't construct this + | SynTypeDefnSimpleRepr.Exception _ -> () + | SynTypeDefnRepr.ObjectModel (kind, members, _) -> + match kind with + | SynTypeDefnKind.Delegate (signature, _) -> + yield! visitSynType signature + yield! List.collect visitSynMemberDefn members + | _ -> yield! List.collect visitSynMemberDefn members + | SynTypeDefnRepr.Exception _ -> + // This is only used in the typed tree + // The parser doesn't construct this + () + yield! List.collect visitSynMemberDefn members + ] + +let visitSynTypeDefnSig + (SynTypeDefnSig (typeInfo = SynComponentInfo (attributes = attributes; typeParams = typeParams; constraints = constraints) + typeRepr = typeRepr + members = members)) + = + [ + yield! visitSynAttributes attributes + yield! collectFromOption visitSynTyparDecls typeParams + yield! List.collect visitSynTypeConstraint constraints + match typeRepr with + | SynTypeDefnSigRepr.Simple (simpleRepr, _) -> + match simpleRepr with + | SynTypeDefnSimpleRepr.Union (unionCases = unionCases) -> yield! List.collect visitSynUnionCase unionCases + | SynTypeDefnSimpleRepr.Enum (cases = cases) -> yield! List.collect visitSynEnumCase cases + | SynTypeDefnSimpleRepr.Record (recordFields = recordFields) -> yield! List.collect visitSynField recordFields + // This is only used in the typed tree + // The parser doesn't construct this + | SynTypeDefnSimpleRepr.General _ + | SynTypeDefnSimpleRepr.LibraryOnlyILAssembly _ -> () + | SynTypeDefnSimpleRepr.TypeAbbrev (rhsType = rhsType) -> yield! visitSynType rhsType + | SynTypeDefnSimpleRepr.None _ + // This is only used in the typed tree + // The parser doesn't construct this + | SynTypeDefnSimpleRepr.Exception _ -> () + | SynTypeDefnSigRepr.ObjectModel (kind, members, _) -> + match kind with + | SynTypeDefnKind.Delegate (signature, _) -> + yield! visitSynType signature + yield! List.collect visitSynMemberSig members + | _ -> yield! List.collect visitSynMemberSig members + | SynTypeDefnSigRepr.Exception _ -> + // This is only used in the typed tree + // The parser doesn't construct this + () + yield! List.collect visitSynMemberSig members + ] + +let visitSynValSig (SynValSig (attributes = attributes; synType = synType; synExpr = synExpr)) = + [ + yield! visitSynAttributes attributes + yield! visitSynType synType + yield! collectFromOption visitSynExpr synExpr + ] + +let visitSynField (SynField (attributes = attributes; fieldType = fieldType)) = + [ yield! visitSynAttributes attributes; yield! visitSynType fieldType ] + +let visitSynMemberDefn (md: SynMemberDefn) : FileContentEntry list = + [ + match md with + | SynMemberDefn.Member (memberDefn = binding) -> yield! visitBinding binding + | SynMemberDefn.Open _ -> () + | SynMemberDefn.GetSetMember (memberDefnForGet, memberDefnForSet, _, _) -> + yield! collectFromOption visitBinding memberDefnForGet + yield! collectFromOption visitBinding memberDefnForSet + | SynMemberDefn.ImplicitCtor (ctorArgs = ctorArgs) -> yield! visitSynSimplePats ctorArgs + | SynMemberDefn.ImplicitInherit (inheritType, inheritArgs, _, _) -> + yield! visitSynType inheritType + yield! visitSynExpr inheritArgs + | SynMemberDefn.LetBindings (bindings = bindings) -> yield! List.collect visitBinding bindings + | SynMemberDefn.AbstractSlot (slotSig = slotSig) -> yield! visitSynValSig slotSig + | SynMemberDefn.Interface (interfaceType, _, members, _) -> + yield! visitSynType interfaceType + yield! collectFromOption (List.collect visitSynMemberDefn) members + | SynMemberDefn.Inherit (baseType, _, _) -> yield! visitSynType baseType + | SynMemberDefn.ValField (fieldInfo, _) -> yield! visitSynField fieldInfo + | SynMemberDefn.NestedType _ -> () + | SynMemberDefn.AutoProperty (attributes = attributes; typeOpt = typeOpt; synExpr = synExpr) -> + yield! visitSynAttributes attributes + yield! collectFromOption visitSynType typeOpt + yield! visitSynExpr synExpr + ] + +let visitSynInterfaceImpl (SynInterfaceImpl (interfaceTy = t; bindings = bindings; members = members)) = + [ + yield! visitSynType t + yield! List.collect visitBinding bindings + yield! List.collect visitSynMemberDefn members + ] + +let visitSynType (t: SynType) : FileContentEntry list = + let rec visit (t: SynType) (continuation: FileContentEntry list -> FileContentEntry list) = + match t with + | SynType.LongIdent lid -> continuation (visitSynLongIdent lid) + | SynType.App (typeName = typeName; typeArgs = typeArgs) -> + let continuations = List.map visit (typeName :: typeArgs) + Continuation.concatenate continuations continuation + | SynType.LongIdentApp (typeName = typeName; longDotId = longDotId; typeArgs = typeArgs) -> + let continuations = List.map visit (typeName :: typeArgs) + + let finalContinuation nodes = + visitSynLongIdent longDotId @ List.concat nodes |> continuation + + Continuation.sequence continuations finalContinuation + | SynType.Tuple (path = path) -> + let continuations = List.map visit (getTypeFromTuplePath path) + Continuation.concatenate continuations continuation + | SynType.AnonRecd (fields = fields) -> + let continuations = List.map (snd >> visit) fields + Continuation.concatenate continuations continuation + | SynType.Array (elementType = elementType) -> visit elementType continuation + | SynType.Fun (argType, returnType, _, _) -> + let continuations = List.map visit [ argType; returnType ] + Continuation.concatenate continuations continuation + | SynType.Var _ -> continuation [] + | SynType.Anon _ -> continuation [] + | SynType.WithGlobalConstraints (typeName, constraints, _) -> + visit typeName (fun nodes -> nodes @ List.collect visitSynTypeConstraint constraints |> continuation) + | SynType.HashConstraint (innerType, _) -> visit innerType continuation + | SynType.MeasurePower (baseMeasure = baseMeasure) -> visit baseMeasure continuation + | SynType.StaticConstant _ -> continuation [] + | SynType.StaticConstantExpr (expr, _) -> continuation (visitSynExpr expr) + | SynType.StaticConstantNamed (ident, value, _) -> + let continuations = List.map visit [ ident; value ] + Continuation.concatenate continuations continuation + | SynType.Paren (innerType, _) -> visit innerType continuation + | SynType.SignatureParameter (attributes = attributes; usedType = usedType) -> + visit usedType (fun nodes -> [ yield! visitSynAttributes attributes; yield! nodes ] |> continuation) + | SynType.Or (lhsType, rhsType, _, _) -> + let continuations = List.map visit [ lhsType; rhsType ] + Continuation.concatenate continuations continuation + + visit t id + +let visitSynValTyparDecls (SynValTyparDecls (typars = typars)) = + collectFromOption visitSynTyparDecls typars + +let visitSynTyparDecls (td: SynTyparDecls) : FileContentEntry list = + match td with + | SynTyparDecls.PostfixList (decls, constraints, _) -> + [ + yield! List.collect visitSynTyparDecl decls + yield! List.collect visitSynTypeConstraint constraints + ] + | SynTyparDecls.PrefixList (decls = decls) -> List.collect visitSynTyparDecl decls + | SynTyparDecls.SinglePrefix (decl = decl) -> visitSynTyparDecl decl + +let visitSynTyparDecl (SynTyparDecl (attributes = attributes)) = visitSynAttributes attributes + +let visitSynTypeConstraint (tc: SynTypeConstraint) : FileContentEntry list = + [ + match tc with + | SynTypeConstraint.WhereSelfConstrained _ + | SynTypeConstraint.WhereTyparIsValueType _ + | SynTypeConstraint.WhereTyparIsReferenceType _ + | SynTypeConstraint.WhereTyparIsUnmanaged _ + | SynTypeConstraint.WhereTyparSupportsNull _ + | SynTypeConstraint.WhereTyparIsComparable _ + | SynTypeConstraint.WhereTyparIsEquatable _ -> () + | SynTypeConstraint.WhereTyparDefaultsToType (typeName = typeName) -> yield! visitSynType typeName + | SynTypeConstraint.WhereTyparSubtypeOfType (typeName = typeName) -> yield! visitSynType typeName + | SynTypeConstraint.WhereTyparSupportsMember (typars, memberSig, _) -> + yield! visitSynType typars + yield! visitSynMemberSig memberSig + | SynTypeConstraint.WhereTyparIsEnum (typeArgs = typeArgs) -> yield! List.collect visitSynType typeArgs + | SynTypeConstraint.WhereTyparIsDelegate (typeArgs = typeArgs) -> yield! List.collect visitSynType typeArgs + ] + +let visitSynExpr (e: SynExpr) : FileContentEntry list = + let rec visit (e: SynExpr) (continuation: FileContentEntry list -> FileContentEntry list) : FileContentEntry list = + match e with + | SynExpr.Const _ -> continuation [] + | SynExpr.Paren (expr = expr) -> visit expr continuation + | SynExpr.Quote (operator = operator; quotedExpr = quotedExpr) -> + visit operator (fun operatorNodes -> visit quotedExpr (fun quotedNodes -> operatorNodes @ quotedNodes |> continuation)) + | SynExpr.Typed (expr, targetType, _) -> visit expr (fun nodes -> nodes @ visitSynType targetType |> continuation) + | SynExpr.Tuple (exprs = exprs) -> + let continuations: ((FileContentEntry list -> FileContentEntry list) -> FileContentEntry list) list = + List.map visit exprs + + Continuation.concatenate continuations continuation + | SynExpr.AnonRecd (copyInfo = copyInfo; recordFields = recordFields) -> + let continuations = + match copyInfo with + | None -> List.map (fun (_, _, e) -> visit e) recordFields + | Some (cp, _) -> visit cp :: List.map (fun (_, _, e) -> visit e) recordFields + + Continuation.concatenate continuations continuation + | SynExpr.ArrayOrList (exprs = exprs) -> + let continuations = List.map visit exprs + Continuation.concatenate continuations continuation + | SynExpr.Record (baseInfo = baseInfo; copyInfo = copyInfo; recordFields = recordFields) -> + let fieldNodes = + [ + for SynExprRecordField (fieldName = (si, _); expr = expr) in recordFields do + yield! visitSynLongIdent si + yield! collectFromOption visitSynExpr expr + ] + + match baseInfo, copyInfo with + | Some (t, e, _, _, _), None -> + visit e (fun nodes -> [ yield! visitSynType t; yield! nodes; yield! fieldNodes ] |> continuation) + | None, Some (e, _) -> visit e (fun nodes -> nodes @ fieldNodes |> continuation) + | _ -> continuation fieldNodes + | SynExpr.New (targetType = targetType; expr = expr) -> visit expr (fun nodes -> visitSynType targetType @ nodes |> continuation) + | SynExpr.ObjExpr (objType, argOptions, _, bindings, members, extraImpls, _, _) -> + [ + yield! visitSynType objType + yield! collectFromOption (fst >> visitSynExpr) argOptions + yield! List.collect visitBinding bindings + yield! List.collect visitSynMemberDefn members + yield! List.collect visitSynInterfaceImpl extraImpls + ] + |> continuation + | SynExpr.While (whileExpr = whileExpr; doExpr = doExpr) -> + visit whileExpr (fun whileNodes -> visit doExpr (fun doNodes -> whileNodes @ doNodes |> continuation)) + | SynExpr.For (identBody = identBody; toBody = toBody; doBody = doBody) -> + let continuations = List.map visit [ identBody; toBody; doBody ] + Continuation.concatenate continuations continuation + | SynExpr.ForEach (pat = pat; enumExpr = enumExpr; bodyExpr = bodyExpr) -> + visit enumExpr (fun enumNodes -> + visit bodyExpr (fun bodyNodes -> [ yield! visitPat pat; yield! enumNodes; yield! bodyNodes ] |> continuation)) + | SynExpr.ArrayOrListComputed (expr = expr) -> visit expr continuation + | SynExpr.IndexRange (expr1 = expr1; expr2 = expr2) -> + match expr1, expr2 with + | None, None -> continuation [] + | Some e, None + | None, Some e -> visit e continuation + | Some e1, Some e2 -> visit e1 (fun e1Nodes -> visit e2 (fun e2Nodes -> e1Nodes @ e2Nodes |> continuation)) + | SynExpr.IndexFromEnd (expr, _) -> visit expr continuation + | SynExpr.ComputationExpr (expr = expr) -> visit expr continuation + | SynExpr.Lambda (args = args; body = body) -> visit body (fun bodyNodes -> visitSynSimplePats args @ bodyNodes |> continuation) + | SynExpr.MatchLambda (matchClauses = clauses) -> List.collect visitSynMatchClause clauses |> continuation + | SynExpr.Match (expr = expr; clauses = clauses) -> + visit expr (fun exprNodes -> + [ yield! exprNodes; yield! List.collect visitSynMatchClause clauses ] + |> continuation) + | SynExpr.Do (expr, _) -> visit expr continuation + | SynExpr.Assert (expr, _) -> visit expr continuation + | SynExpr.App (funcExpr = funcExpr; argExpr = argExpr) -> + visit funcExpr (fun funcNodes -> visit argExpr (fun argNodes -> funcNodes @ argNodes |> continuation)) + | SynExpr.TypeApp (expr = expr; typeArgs = typeArgs) -> + visit expr (fun exprNodes -> exprNodes @ List.collect visitSynType typeArgs |> continuation) + | SynExpr.LetOrUse (bindings = bindings; body = body) -> + visit body (fun nodes -> List.collect visitBinding bindings @ nodes |> continuation) + | SynExpr.TryWith (tryExpr = tryExpr; withCases = withCases) -> + visit tryExpr (fun nodes -> nodes @ List.collect visitSynMatchClause withCases |> continuation) + | SynExpr.TryFinally (tryExpr = tryExpr; finallyExpr = finallyExpr) -> + visit tryExpr (fun tNodes -> visit finallyExpr (fun fNodes -> tNodes @ fNodes |> continuation)) + | SynExpr.Lazy (expr, _) -> visit expr continuation + | SynExpr.Sequential (expr1 = expr1; expr2 = expr2) -> + visit expr1 (fun nodes1 -> visit expr2 (fun nodes2 -> nodes1 @ nodes2 |> continuation)) + | SynExpr.IfThenElse (ifExpr = ifExpr; thenExpr = thenExpr; elseExpr = elseExpr) -> + let continuations = List.map visit (ifExpr :: thenExpr :: Option.toList elseExpr) + Continuation.concatenate continuations continuation + | SynExpr.Typar _ -> continuation [] + | SynExpr.Ident _ -> continuation [] + | SynExpr.LongIdent (longDotId = longDotId) -> continuation (visitSynLongIdent longDotId) + | SynExpr.LongIdentSet (longDotId, expr, _) -> visit expr (fun nodes -> visitSynLongIdent longDotId @ nodes |> continuation) + | SynExpr.DotGet (expr = expr; longDotId = longDotId) -> + visit expr (fun nodes -> visitSynLongIdent longDotId @ nodes |> continuation) + | SynExpr.DotSet (targetExpr, longDotId, rhsExpr, _) -> + visit targetExpr (fun tNodes -> + visit rhsExpr (fun rNodes -> + [ yield! tNodes; yield! visitSynLongIdent longDotId; yield! rNodes ] + |> continuation)) + | SynExpr.Set (targetExpr, rhsExpr, _) -> + let continuations = List.map visit [ targetExpr; rhsExpr ] + Continuation.concatenate continuations continuation + | SynExpr.DotIndexedGet (objectExpr, indexArgs, _, _) -> + let continuations = List.map visit [ objectExpr; indexArgs ] + Continuation.concatenate continuations continuation + | SynExpr.DotIndexedSet (objectExpr, indexArgs, valueExpr, _, _, _) -> + let continuations = List.map visit [ objectExpr; indexArgs; valueExpr ] + Continuation.concatenate continuations continuation + | SynExpr.NamedIndexedPropertySet (longDotId, expr1, expr2, _) -> + visit expr1 (fun nodes1 -> + visit expr2 (fun nodes2 -> + [ yield! visitSynLongIdent longDotId; yield! nodes1; yield! nodes2 ] + |> continuation)) + | SynExpr.DotNamedIndexedPropertySet (targetExpr, longDotId, argExpr, rhsExpr, _) -> + let continuations = List.map visit [ targetExpr; argExpr; rhsExpr ] + + let finalContinuation nodes = + visitSynLongIdent longDotId @ List.concat nodes |> continuation + + Continuation.sequence continuations finalContinuation + | SynExpr.TypeTest (expr, targetType, _) -> visit expr (fun nodes -> nodes @ visitSynType targetType |> continuation) + | SynExpr.Upcast (expr, targetType, _) -> visit expr (fun nodes -> nodes @ visitSynType targetType |> continuation) + | SynExpr.Downcast (expr, targetType, _) -> visit expr (fun nodes -> nodes @ visitSynType targetType |> continuation) + | SynExpr.InferredUpcast (expr, _) -> visit expr continuation + | SynExpr.InferredDowncast (expr, _) -> visit expr continuation + | SynExpr.Null _ -> continuation [] + | SynExpr.AddressOf (expr = expr) -> visit expr continuation + | SynExpr.TraitCall (supportTys, traitSig, argExpr, _) -> + visit argExpr (fun nodes -> + [ + yield! visitSynType supportTys + yield! visitSynMemberSig traitSig + yield! nodes + ] + |> continuation) + | SynExpr.JoinIn (lhsExpr, _, rhsExpr, _) -> + let continuations = List.map visit [ lhsExpr; rhsExpr ] + Continuation.concatenate continuations continuation + | SynExpr.ImplicitZero _ -> continuation [] + | SynExpr.SequentialOrImplicitYield (_, expr1, expr2, _, _) -> + let continuations = List.map visit [ expr1; expr2 ] + Continuation.concatenate continuations continuation + | SynExpr.YieldOrReturn (expr = expr) -> visit expr continuation + | SynExpr.YieldOrReturnFrom (expr = expr) -> visit expr continuation + | SynExpr.LetOrUseBang (pat = pat; rhs = rhs; andBangs = andBangs; body = body) -> + let continuations = + let andBangExprs = List.map (fun (SynExprAndBang (body = body)) -> body) andBangs + List.map visit (body :: rhs :: andBangExprs) + + let finalContinuation nodes = + [ + yield! List.concat nodes + yield! visitPat pat + for SynExprAndBang (pat = pat) in andBangs do + yield! visitPat pat + ] + |> continuation + + Continuation.sequence continuations finalContinuation + | SynExpr.MatchBang (expr = expr; clauses = clauses) -> + visit expr (fun exprNodes -> + [ yield! exprNodes; yield! List.collect visitSynMatchClause clauses ] + |> continuation) + | SynExpr.DoBang (expr, _) -> visit expr continuation + | SynExpr.LibraryOnlyILAssembly (typeArgs = typeArgs; args = args; retTy = retTy) -> + let typeNodes = List.collect visitSynType (typeArgs @ retTy) + let continuations = List.map visit args + + let finalContinuation nodes = + List.concat nodes @ typeNodes |> continuation + + Continuation.sequence continuations finalContinuation + | SynExpr.LibraryOnlyStaticOptimization (constraints, expr, optimizedExpr, _) -> + let constraintTypes = + constraints + |> List.choose (function + | SynStaticOptimizationConstraint.WhenTyparTyconEqualsTycon (rhsType = t) -> Some t + | SynStaticOptimizationConstraint.WhenTyparIsStruct _ -> None) + + visit expr (fun eNodes -> + visit optimizedExpr (fun oNodes -> + [ + yield! List.collect visitSynType constraintTypes + yield! eNodes + yield! oNodes + ] + |> continuation)) + | SynExpr.LibraryOnlyUnionCaseFieldGet (expr, longId, _, _) -> + visit expr (fun eNodes -> visitLongIdent longId @ eNodes |> continuation) + | SynExpr.LibraryOnlyUnionCaseFieldSet (expr, longId, _, rhsExpr, _) -> + visit expr (fun eNodes -> + visit rhsExpr (fun rhsNodes -> [ yield! visitLongIdent longId; yield! eNodes; yield! rhsNodes ] |> continuation)) + | SynExpr.ArbitraryAfterError _ -> continuation [] + | SynExpr.FromParseError _ -> continuation [] + | SynExpr.DiscardAfterMissingQualificationAfterDot _ -> continuation [] + | SynExpr.Fixed (expr, _) -> visit expr continuation + | SynExpr.InterpolatedString (contents = contents) -> + let continuations = + List.map + visit + (List.choose + (function + | SynInterpolatedStringPart.FillExpr (fillExpr = e) -> Some e + | SynInterpolatedStringPart.String _ -> None) + contents) + + Continuation.concatenate continuations continuation + | SynExpr.DebugPoint _ -> continuation [] + | SynExpr.Dynamic (funcExpr, _, argExpr, _) -> + let continuations = List.map visit [ funcExpr; argExpr ] + Continuation.concatenate continuations continuation + + visit e id + +let visitPat (p: SynPat) : FileContentEntry list = + let rec visit (p: SynPat) (continuation: FileContentEntry list -> FileContentEntry list) : FileContentEntry list = + match p with + | SynPat.Paren (pat = pat) -> visit pat continuation + | SynPat.Typed (pat = pat; targetType = t) -> visit pat (fun nodes -> nodes @ visitSynType t) + | SynPat.Const _ -> continuation [] + | SynPat.Wild _ -> continuation [] + | SynPat.Named _ -> continuation [] + | SynPat.Attrib (pat, attributes, _) -> visit pat (fun nodes -> visitSynAttributes attributes @ nodes |> continuation) + | SynPat.Or (lhsPat, rhsPat, _, _) -> + let continuations = List.map visit [ lhsPat; rhsPat ] + Continuation.concatenate continuations continuation + | SynPat.ListCons (lhsPat, rhsPat, _, _) -> + let continuations = List.map visit [ lhsPat; rhsPat ] + Continuation.concatenate continuations continuation + | SynPat.Ands (pats, _) -> + let continuations = List.map visit pats + Continuation.concatenate continuations continuation + | SynPat.As (lhsPat, rhsPat, _) -> + let continuations = List.map visit [ lhsPat; rhsPat ] + Continuation.concatenate continuations continuation + | SynPat.LongIdent (longDotId = longDotId; typarDecls = typarDecls; argPats = argPats) -> + continuation + [ + yield! visitSynLongIdent longDotId + yield! collectFromOption visitSynValTyparDecls typarDecls + yield! visitSynArgPats argPats + ] + | SynPat.Tuple (_, elementPats, _) -> + let continuations = List.map visit elementPats + Continuation.concatenate continuations continuation + | SynPat.ArrayOrList (_, elementPats, _) -> + let continuations = List.map visit elementPats + Continuation.concatenate continuations continuation + | SynPat.Record (fieldPats, _) -> + let pats = List.map (fun (_, _, p) -> p) fieldPats + + let lids = + [ + for (l, _), _, _ in fieldPats do + yield! visitLongIdent l + ] + + let continuations = List.map visit pats + + let finalContinuation nodes = + [ yield! List.concat nodes; yield! lids ] |> continuation + + Continuation.sequence continuations finalContinuation + | SynPat.Null _ -> continuation [] + | SynPat.OptionalVal _ -> continuation [] + | SynPat.IsInst (t, _) -> continuation (visitSynType t) + | SynPat.QuoteExpr (expr, _) -> continuation (visitSynExpr expr) + | SynPat.DeprecatedCharRange _ -> continuation [] + | SynPat.InstanceMember _ -> continuation [] + | SynPat.FromParseError _ -> continuation [] + + visit p id + +let visitSynArgPats (argPat: SynArgPats) = + match argPat with + | SynArgPats.Pats args -> List.collect visitPat args + | SynArgPats.NamePatPairs (pats = pats) -> + [ + for _, _, p in pats do + yield! visitPat p + ] + +let visitSynSimplePat (pat: SynSimplePat) = + match pat with + | SynSimplePat.Id _ -> [] + | SynSimplePat.Attrib (pat, attributes, _) -> [ yield! visitSynSimplePat pat; yield! visitSynAttributes attributes ] + | SynSimplePat.Typed (pat, t, _) -> [ yield! visitSynSimplePat pat; yield! visitSynType t ] + +let visitSynSimplePats (pats: SynSimplePats) = + match pats with + | SynSimplePats.SimplePats (pats = pats) -> List.collect visitSynSimplePat pats + | SynSimplePats.Typed (pats, t, _) -> [ yield! visitSynSimplePats pats; yield! visitSynType t ] + +let visitSynMatchClause (SynMatchClause (pat = pat; whenExpr = whenExpr; resultExpr = resultExpr)) = + [ + yield! visitPat pat + yield! collectFromOption visitSynExpr whenExpr + yield! visitSynExpr resultExpr + ] + +let visitBinding (SynBinding (attributes = attributes; headPat = headPat; returnInfo = returnInfo; expr = expr)) : FileContentEntry list = + [ + yield! visitSynAttributes attributes + match headPat with + | SynPat.LongIdent(argPats = SynArgPats.Pats pats) -> yield! List.collect visitPat pats + | _ -> yield! visitPat headPat + yield! collectFromOption visitSynBindingReturnInfo returnInfo + yield! visitSynExpr expr + ] + +let visitSynBindingReturnInfo (SynBindingReturnInfo (typeName = typeName; attributes = attributes)) = + [ yield! visitSynAttributes attributes; yield! visitSynType typeName ] + +let visitSynMemberSig (ms: SynMemberSig) : FileContentEntry list = + match ms with + | SynMemberSig.Member (memberSig = memberSig) -> visitSynValSig memberSig + | SynMemberSig.Interface (interfaceType, _) -> visitSynType interfaceType + | SynMemberSig.Inherit (inheritedType, _) -> visitSynType inheritedType + | SynMemberSig.ValField (field, _) -> visitSynField field + | SynMemberSig.NestedType _ -> [] + +let mkFileContent (f: FileInProject) : FileContentEntry list = + [ + match f.ParsedInput with + | ParsedInput.SigFile (ParsedSigFileInput (contents = contents)) -> + for SynModuleOrNamespaceSig (longId = longId; kind = kind; decls = decls; attribs = attribs) in contents do + yield! List.collect visitSynAttributeList attribs + + match kind with + | SynModuleOrNamespaceKind.GlobalNamespace + | SynModuleOrNamespaceKind.AnonModule -> yield! List.collect visitSynModuleSigDecl decls + | SynModuleOrNamespaceKind.DeclaredNamespace -> + let path = longIdentToPath false longId + yield FileContentEntry.TopLevelNamespace(path, List.collect visitSynModuleSigDecl decls) + | SynModuleOrNamespaceKind.NamedModule -> + let path = longIdentToPath true longId + yield FileContentEntry.TopLevelNamespace(path, List.collect visitSynModuleSigDecl decls) + | ParsedInput.ImplFile (ParsedImplFileInput (contents = contents)) -> + for SynModuleOrNamespace (longId = longId; attribs = attribs; kind = kind; decls = decls) in contents do + yield! List.collect visitSynAttributeList attribs + + match kind with + | SynModuleOrNamespaceKind.GlobalNamespace + | SynModuleOrNamespaceKind.AnonModule -> yield! List.collect visitSynModuleDecl decls + | SynModuleOrNamespaceKind.DeclaredNamespace -> + let path = longIdentToPath false longId + yield FileContentEntry.TopLevelNamespace(path, List.collect visitSynModuleDecl decls) + | SynModuleOrNamespaceKind.NamedModule -> + let path = longIdentToPath true longId + yield FileContentEntry.TopLevelNamespace(path, List.collect visitSynModuleDecl decls) + ] diff --git a/src/Compiler/Driver/GraphChecking/FileContentMapping.fsi b/src/Compiler/Driver/GraphChecking/FileContentMapping.fsi new file mode 100644 index 00000000000..d5ef03867b5 --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/FileContentMapping.fsi @@ -0,0 +1,4 @@ +module internal rec FSharp.Compiler.GraphChecking.FileContentMapping + +/// Extract the FileContentEntries from the ParsedInput of a file. +val mkFileContent: f: FileInProject -> FileContentEntry list diff --git a/src/Compiler/Driver/GraphChecking/Graph.fs b/src/Compiler/Driver/GraphChecking/Graph.fs new file mode 100644 index 00000000000..0e776181a63 --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/Graph.fs @@ -0,0 +1,91 @@ +namespace FSharp.Compiler.GraphChecking + +open System.Collections.Generic +open System.Text +open FSharp.Compiler.IO + +/// Directed Acyclic Graph (DAG) of arbitrary nodes +type internal Graph<'Node> = IReadOnlyDictionary<'Node, 'Node array> + +module internal Graph = + let make (nodeDeps: ('Node * 'Node array) seq) = nodeDeps |> readOnlyDict + + let map (f: 'T -> 'U) (graph: Graph<'T>) : Graph<'U> = + graph + |> Seq.map (fun (KeyValue (node, deps)) -> f node, deps |> Array.map f) + |> make + + let addIfMissing<'Node when 'Node: equality> (nodes: 'Node seq) (graph: Graph<'Node>) : Graph<'Node> = + let missingNodes = nodes |> Seq.except graph.Keys |> Seq.toArray + + let entriesToAdd = + missingNodes |> Seq.map (fun n -> KeyValuePair(n, [||])) |> Seq.toArray + + graph + |> Seq.toArray + |> Array.append entriesToAdd + |> Array.map (fun (KeyValue (k, v)) -> k, v) + |> readOnlyDict + + let transitive<'Node when 'Node: equality> (graph: Graph<'Node>) : Graph<'Node> = + /// Find transitive dependencies of a single node. + let transitiveDeps (node: 'Node) = + let visited = HashSet<'Node>() + + let rec dfs (node: 'Node) = + graph[node] + // Add direct dependencies. + // Use HashSet.Add return value semantics to filter out those that were added previously. + |> Array.filter visited.Add + |> Array.iter dfs + + dfs node + visited |> Seq.toArray + + graph.Keys + |> Seq.toArray + |> Array.Parallel.map (fun node -> node, transitiveDeps node) + |> readOnlyDict + + /// Create a reverse of the graph + let reverse (originalGraph: Graph<'Node>) : Graph<'Node> = + originalGraph + // Collect all edges + |> Seq.collect (fun (KeyValue (idx, deps)) -> deps |> Array.map (fun dep -> idx, dep)) + // Group dependants of the same dependencies together + |> Seq.groupBy snd + // Construct reversed graph + |> Seq.map (fun (dep, edges) -> dep, edges |> Seq.map fst |> Seq.toArray) + |> readOnlyDict + |> addIfMissing originalGraph.Keys + + let printCustom (graph: Graph<'Node>) (nodePrinter: 'Node -> string) : unit = + printfn "Graph:" + let join (xs: string[]) = System.String.Join(", ", xs) + + graph + |> Seq.iter (fun (KeyValue (file, deps)) -> printfn $"{file} -> {deps |> Array.map nodePrinter |> join}") + + let print (graph: Graph<'Node>) : unit = + printCustom graph (fun node -> node.ToString()) + + let serialiseToMermaid path (graph: Graph) = + let sb = StringBuilder() + let appendLine (line: string) = sb.AppendLine(line) |> ignore + + appendLine "```mermaid" + appendLine "flowchart RL" + + for KeyValue ((idx, fileName), _) in graph do + appendLine $" %i{idx}[\"%s{fileName}\"]" + + for KeyValue ((idx, _), deps) in graph do + for depIdx, _depFileName in deps do + appendLine $" %i{idx} --> %i{depIdx}" + + appendLine "```" + + use out = + FileSystem.OpenFileForWriteShim(path, fileMode = System.IO.FileMode.Create) + + out.WriteAllText(sb.ToString()) diff --git a/src/Compiler/Driver/GraphChecking/Graph.fsi b/src/Compiler/Driver/GraphChecking/Graph.fsi new file mode 100644 index 00000000000..95542470d8a --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/Graph.fsi @@ -0,0 +1,21 @@ +namespace FSharp.Compiler.GraphChecking + +open System.Collections.Generic + +/// A Directed Acyclic Graph (DAG) of arbitrary nodes. +type internal Graph<'Node> = IReadOnlyDictionary<'Node, 'Node array> + +/// Functions for operating on the Graph type. +module internal Graph = + /// Build the graph. + val make: nodeDeps: seq<'Node * 'Node array> -> Graph<'Node> when 'Node: equality + val map<'T, 'U when 'U: equality> : f: ('T -> 'U) -> graph: Graph<'T> -> Graph<'U> + /// Create a transitive closure of the graph in O(n^2) time (but parallelize it). + /// The resulting graph contains edge A -> C iff the input graph contains a (directed) non-zero length path from A to C. + val transitive<'Node when 'Node: equality> : graph: Graph<'Node> -> Graph<'Node> + /// Create a reverse of the graph. + val reverse<'Node when 'Node: equality> : originalGraph: Graph<'Node> -> Graph<'Node> + /// Print the contents of the graph to the standard output. + val print: graph: Graph<'Node> -> unit + /// Create a simple Mermaid graph and save it under the path specified. + val serialiseToMermaid: path: string -> graph: Graph -> unit diff --git a/src/Compiler/Driver/GraphChecking/GraphProcessing.fs b/src/Compiler/Driver/GraphChecking/GraphProcessing.fs new file mode 100644 index 00000000000..332ed2a00ec --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/GraphProcessing.fs @@ -0,0 +1,163 @@ +module internal FSharp.Compiler.GraphChecking.GraphProcessing + +open System.Threading + +/// Information about the node in a graph, describing its relation with other nodes. +type NodeInfo<'Item> = + { + Item: 'Item + Deps: 'Item[] + TransitiveDeps: 'Item[] + Dependants: 'Item[] + } + +type IncrementableInt(value: int) = + let mutable value = value + member this.Value = value + // Increment the value in a thread-safe manner and return the new value. + member this.Increment() = Interlocked.Increment(&value) + +type GraphNode<'Item, 'Result> = + { + Info: NodeInfo<'Item> + /// Used to determine when all dependencies of this node have been resolved. + ProcessedDepsCount: IncrementableInt + mutable Result: 'Result option + } + +/// An already processed node in the graph, with its result available +type ProcessedNode<'Item, 'Result> = + { + Info: NodeInfo<'Item> + Result: 'Result + } + +let processGraph<'Item, 'Result when 'Item: equality and 'Item: comparison> + (graph: Graph<'Item>) + (work: ('Item -> ProcessedNode<'Item, 'Result>) -> NodeInfo<'Item> -> 'Result) + (parentCt: CancellationToken) + : ('Item * 'Result)[] = + let transitiveDeps = graph |> Graph.transitive + let dependants = graph |> Graph.reverse + // Cancellation source used to signal either an exception in one of the items or end of processing. + use localCts = new CancellationTokenSource() + use cts = CancellationTokenSource.CreateLinkedTokenSource(parentCt, localCts.Token) + + let makeNode (item: 'Item) : GraphNode<'Item, 'Result> = + let info = + let exists = graph.ContainsKey item + + if + not exists + || not (transitiveDeps.ContainsKey item) + || not (dependants.ContainsKey item) + then + printfn $"Unexpected inconsistent state of the graph for item '{item}'" + + { + Item = item + Deps = graph[item] + TransitiveDeps = transitiveDeps[item] + Dependants = dependants[item] + } + + { + Info = info + Result = None + ProcessedDepsCount = IncrementableInt(0) + } + + let nodes = graph.Keys |> Seq.map (fun item -> item, makeNode item) |> readOnlyDict + + let lookupMany items = + items |> Array.map (fun item -> nodes[item]) + + let leaves = + nodes.Values |> Seq.filter (fun n -> n.Info.Deps.Length = 0) |> Seq.toArray + + let getItemPublicNode item = + let node = nodes[item] + + { + ProcessedNode.Info = node.Info + ProcessedNode.Result = + node.Result + |> Option.defaultWith (fun () -> failwith $"Results for item '{node.Info.Item}' are not yet available") + } + + let processedCount = IncrementableInt(0) + + /// Create a setter and getter for an exception raised in one of the work items. + /// Only the first exception encountered is stored - this can cause non-deterministic errors if more than one item fails. + let raiseExn, getExn = + let mutable exn: ('Item * System.Exception) option = None + // Only set the exception if it hasn't been set already + let setExn newExn = + lock exn (fun () -> + match exn with + | Some _ -> () + | None -> exn <- newExn + + localCts.Cancel()) + + let getExn () = exn + setExn, getExn + + let incrementProcessedNodesCount () = + if processedCount.Increment() = nodes.Count then + localCts.Cancel() + + let rec queueNode node = + Async.Start( + async { + let! res = async { processNode node } |> Async.Catch + + match res with + | Choice1Of2 () -> () + | Choice2Of2 ex -> raiseExn (Some(node.Info.Item, ex)) + }, + cts.Token + ) + + and processNode (node: GraphNode<'Item, 'Result>) : unit = + + let info = node.Info + + let singleRes = work getItemPublicNode info + node.Result <- Some singleRes + + let unblockedDependants = + node.Info.Dependants + |> lookupMany + // For every dependant, increment its number of processed dependencies, + // and filter dependants which now have all dependencies processed (but didn't before). + |> Array.filter (fun dependant -> + let pdc = dependant.ProcessedDepsCount.Increment() + // Note: We cannot read 'dependant.ProcessedDepsCount' again to avoid returning the same item multiple times. + pdc = dependant.Info.Deps.Length) + + unblockedDependants |> Array.iter queueNode + incrementProcessedNodesCount () + + leaves |> Array.iter queueNode + + // Wait for end of processing, an exception, or an external cancellation request. + cts.Token.WaitHandle.WaitOne() |> ignore + // If we stopped early due to external cancellation, throw. + parentCt.ThrowIfCancellationRequested() + + // If we stopped early due to an exception, reraise it. + match getExn () with + | None -> () + | Some (item, ex) -> raise (System.Exception($"Encountered exception when processing item '{item}'", ex)) + + // All calculations succeeded - extract the results and sort in input order. + nodes.Values + |> Seq.map (fun node -> + let result = + node.Result + |> Option.defaultWith (fun () -> failwith $"Unexpected lack of result for item '{node.Info.Item}'") + + node.Info.Item, result) + |> Seq.sortBy fst + |> Seq.toArray diff --git a/src/Compiler/Driver/GraphChecking/GraphProcessing.fsi b/src/Compiler/Driver/GraphChecking/GraphProcessing.fsi new file mode 100644 index 00000000000..cb9a95a59f8 --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/GraphProcessing.fsi @@ -0,0 +1,35 @@ +/// Parallel processing of graph of work items with dependencies +module internal FSharp.Compiler.GraphChecking.GraphProcessing + +open System.Threading + +/// Information about the node in a graph, describing its relation with other nodes. +type NodeInfo<'Item> = + { Item: 'Item + Deps: 'Item[] + TransitiveDeps: 'Item[] + Dependants: 'Item[] } + +/// An already processed node in the graph, with its result available +type ProcessedNode<'Item, 'Result> = + { Info: NodeInfo<'Item> + Result: 'Result } + +/// +/// A generic method to generate results for a graph of work items in parallel. +/// Processes leaves first, and after each node has been processed, schedules any now unblocked dependants. +/// Returns a list of results, one per item. +/// Uses the Thread Pool to schedule work. +/// +/// Graph of work items +/// A function to generate results for a single item +/// Cancellation token +/// +/// An alternative scheduling approach is to schedule N parallel tasks that process items from a BlockingCollection. +/// My basic tests suggested it's faster, although confirming that would require more detailed testing. +/// +val processGraph<'Item, 'Result when 'Item: equality and 'Item: comparison> : + graph: Graph<'Item> -> + work: (('Item -> ProcessedNode<'Item, 'Result>) -> NodeInfo<'Item> -> 'Result) -> + parentCt: CancellationToken -> + ('Item * 'Result)[] diff --git a/src/Compiler/Driver/GraphChecking/TrieMapping.fs b/src/Compiler/Driver/GraphChecking/TrieMapping.fs new file mode 100644 index 00000000000..2435cfc167f --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/TrieMapping.fs @@ -0,0 +1,294 @@ +module internal FSharp.Compiler.GraphChecking.TrieMapping + +open System.Collections.Generic +open FSharp.Compiler.Syntax + +[] +module private HashSet = + /// Create a new HashSet<'T> with a single element. + let singleton value = HashSet(Seq.singleton value) + /// Create new new HashSet<'T> with zero elements. + let empty () = HashSet(Seq.empty) + +let autoOpenShapes = + set + [| + "FSharp.Core.AutoOpenAttribute" + "Core.AutoOpenAttribute" + "AutoOpenAttribute" + "FSharp.Core.AutoOpen" + "Core.AutoOpen" + "AutoOpen" + |] + +/// This isn't bullet proof, we do prompt a warning when the user is aliasing the AutoOpenAttribute. +let isAutoOpenAttribute (attribute: SynAttribute) = + match attribute.ArgExpr with + | SynExpr.Const(constant = SynConst.Unit) + | SynExpr.Const(constant = SynConst.String _) + | SynExpr.Paren(expr = SynExpr.Const(constant = SynConst.String _)) -> + let attributeName = + attribute.TypeName.LongIdent + |> List.map (fun ident -> ident.idText) + |> String.concat "." + + autoOpenShapes.Contains attributeName + | _ -> false + +let isAnyAttributeAutoOpen (attributes: SynAttributes) = + attributes + |> List.exists (fun (atl: SynAttributeList) -> List.exists isAutoOpenAttribute atl.Attributes) + +/// Checks to see if the top level ModuleOrNamespace exposes content that could be inferred by any of the subsequent files. +/// This can happen when a `namespace global` is used, or when a module (with a single ident name) has the `[]` attribute. +let doesFileExposeContentToTheRoot (ast: ParsedInput) : bool = + match ast with + | ParsedInput.SigFile (ParsedSigFileInput (contents = contents)) -> + contents + |> List.exists (fun (SynModuleOrNamespaceSig (attribs = attribs; longId = longId; kind = kind)) -> + (isAnyAttributeAutoOpen attribs && longId.Length < 2) + || kind = SynModuleOrNamespaceKind.GlobalNamespace) + + | ParsedInput.ImplFile (ParsedImplFileInput (contents = contents)) -> + contents + |> List.exists (fun (SynModuleOrNamespace (attribs = attribs; longId = longId; kind = kind)) -> + (isAnyAttributeAutoOpen attribs && longId.Length < 2) + || kind = SynModuleOrNamespaceKind.GlobalNamespace) + +let mergeTrieNodes (defaultChildSize: int) (tries: TrieNode array) = + let rec mergeTrieNodesAux (root: TrieNode) (KeyValue (k, v)) = + if root.Children.ContainsKey k then + let node = root.Children[k] + + match node.Current, v.Current with + | TrieNodeInfo.Namespace (filesThatExposeTypes = currentFiles), TrieNodeInfo.Namespace (filesThatExposeTypes = otherFiles) -> + for otherFile in otherFiles do + currentFiles.Add(otherFile) |> ignore + | _ -> () + + for kv in v.Children do + mergeTrieNodesAux node kv + + else + root.Children.Add(k, v) + + match Array.tryExactlyOne tries with + | Some ({ Current = TrieNodeInfo.Root _ } as singleTrie) -> singleTrie + | _ -> + let rootFiles = HashSet.empty () + + let root = + { + Current = TrieNodeInfo.Root rootFiles + Children = Dictionary<_, _>(defaultChildSize) + } + + for trie in tries do + for rootIndex in trie.Files do + rootFiles.Add rootIndex |> ignore + + match trie.Current with + | TrieNodeInfo.Root _ -> () + | current -> System.Diagnostics.Debug.Assert(false, $"The top level node info of a trie should be Root, got {current}") + + for kv in trie.Children do + mergeTrieNodesAux root kv + + root + +let mkDictFromKeyValuePairs (items: KeyValuePair<'TKey, 'TValue> list) = + let dict = Dictionary(Seq.length items) + + for KeyValue (k, v) in items do + dict.Add(k, v) + + dict + +let mkSingletonDict key value = + let dict = Dictionary(1) + dict.Add(key, value) + dict + +/// Process a top level SynModuleOrNamespace(Sig) +let processSynModuleOrNamespace<'Decl> + (mkTrieForDeclaration: FileIndex -> 'Decl -> KeyValuePair option) + (idx: FileIndex) + (name: LongIdent) + (attributes: SynAttributes) + (kind: SynModuleOrNamespaceKind) + (hasTypesOrAutoOpenNestedModules: bool) + (decls: 'Decl list) + : TrieNode = + let isNamespace = + match kind with + | SynModuleOrNamespaceKind.AnonModule + | SynModuleOrNamespaceKind.NamedModule -> false + | SynModuleOrNamespaceKind.DeclaredNamespace + | SynModuleOrNamespaceKind.GlobalNamespace -> true + + let children = + // Process the name of the ModuleOrNamespace. + // For each part in the name a TrieNode shall be created. + // Only the last node can be a module, depending on the SynModuleOrNamespaceKind. + let rec visit continuation (xs: LongIdent) = + match xs with + | [] -> failwith "should not be empty" + | [ finalPart ] -> + let name = finalPart.idText + + // A module always exposes the file index, as it could expose values and functions. + // A namespace only exposes the file when it has types or nested modules with an [] attribute. + // The reasoning is that a type could be inferred and a nested auto open module will lift its content one level up. + let current = + if isNamespace then + TrieNodeInfo.Namespace( + name, + (if hasTypesOrAutoOpenNestedModules then + HashSet.singleton idx + else + HashSet.empty ()) + ) + else + TrieNodeInfo.Module(name, idx) + + let children = + List.choose (mkTrieForDeclaration idx) decls |> mkDictFromKeyValuePairs + + mkSingletonDict + name + { + Current = current + Children = children + } + |> continuation + | head :: tail -> + let name = head.idText + + visit + (fun node -> + let files = + match tail with + | [ _ ] -> + // In case you have: + // [] + // module A.B + // + // We should consider the namespace A to expose the current file. + // Due to the [] we treat A the same way we would module B. + let topLevelModuleOrNamespaceHasAutoOpen = isAnyAttributeAutoOpen attributes + + if topLevelModuleOrNamespaceHasAutoOpen && not isNamespace then + HashSet.singleton idx + else + HashSet.empty () + | _ -> HashSet.empty () + + let current = TrieNodeInfo.Namespace(name, files) + + mkSingletonDict name { Current = current; Children = node } |> continuation) + tail + + if List.isEmpty name then + // This can happen for a namespace global. + // We collect the child nodes from the decls + decls |> List.choose (mkTrieForDeclaration idx) |> mkDictFromKeyValuePairs + else + visit id name + + { + Current = Root(HashSet.empty ()) + Children = children + } + +let rec mkTrieNodeFor (file: FileInProject) : TrieNode = + let idx = file.Idx + + if doesFileExposeContentToTheRoot file.ParsedInput then + // If a file exposes content which does not need an open statement to access, we consider the file to be part of the root. + { + Current = Root(HashSet.singleton idx) + Children = Dictionary(0) + } + else + match file.ParsedInput with + | ParsedInput.SigFile (ParsedSigFileInput (contents = contents)) -> + contents + |> List.map + (fun (SynModuleOrNamespaceSig (longId = longId + kind = kind + attribs = attribs + decls = decls + accessibility = _accessibility)) -> + let hasTypesOrAutoOpenNestedModules = + decls + |> List.exists (function + | SynModuleSigDecl.Types _ -> true + | SynModuleSigDecl.NestedModule(moduleInfo = SynComponentInfo (attributes = attributes)) -> + isAnyAttributeAutoOpen attributes + | _ -> false) + + processSynModuleOrNamespace mkTrieForSynModuleSigDecl idx longId attribs kind hasTypesOrAutoOpenNestedModules decls) + |> List.toArray + |> mergeTrieNodes contents.Length + | ParsedInput.ImplFile (ParsedImplFileInput (contents = contents)) -> + contents + |> List.map + (fun (SynModuleOrNamespace (longId = longId; attribs = attribs; kind = kind; decls = decls; accessibility = _accessibility)) -> + let hasTypesOrAutoOpenNestedModules = + List.exists + (function + | SynModuleDecl.Types _ -> true + | SynModuleDecl.NestedModule(moduleInfo = SynComponentInfo (attributes = attributes)) -> + isAnyAttributeAutoOpen attributes + | _ -> false) + decls + + processSynModuleOrNamespace mkTrieForSynModuleDecl idx longId attribs kind hasTypesOrAutoOpenNestedModules decls) + |> List.toArray + |> mergeTrieNodes contents.Length + +and mkTrieForSynModuleDecl (fileIndex: FileIndex) (decl: SynModuleDecl) : KeyValuePair option = + match decl with + | SynModuleDecl.NestedModule (moduleInfo = SynComponentInfo(longId = [ nestedModuleIdent ]); decls = decls) -> + let name = nestedModuleIdent.idText + + let children = + decls + |> List.choose (mkTrieForSynModuleDecl fileIndex) + |> mkDictFromKeyValuePairs + + Some( + KeyValuePair( + name, + { + Current = TrieNodeInfo.Module(name, fileIndex) + Children = children + } + ) + ) + | _ -> None + +and mkTrieForSynModuleSigDecl (fileIndex: FileIndex) (decl: SynModuleSigDecl) : KeyValuePair option = + match decl with + | SynModuleSigDecl.NestedModule (moduleInfo = SynComponentInfo(longId = [ nestedModuleIdent ]); moduleDecls = decls) -> + let name = nestedModuleIdent.idText + + let children = + decls + |> List.choose (mkTrieForSynModuleSigDecl fileIndex) + |> mkDictFromKeyValuePairs + + Some( + KeyValuePair( + name, + { + Current = TrieNodeInfo.Module(name, fileIndex) + Children = children + } + ) + ) + + | _ -> None + +let mkTrie (files: FileInProject array) : TrieNode = + mergeTrieNodes 0 (files |> Array.Parallel.map mkTrieNodeFor) diff --git a/src/Compiler/Driver/GraphChecking/TrieMapping.fsi b/src/Compiler/Driver/GraphChecking/TrieMapping.fsi new file mode 100644 index 00000000000..c9bca8e72a6 --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/TrieMapping.fsi @@ -0,0 +1,5 @@ +module internal FSharp.Compiler.GraphChecking.TrieMapping + +/// Process all the files (in parallel) in a project to construct a Root TrieNode. +/// When the project has signature files, the implementation counterparts will not be processed. +val mkTrie: files: FileInProject array -> TrieNode diff --git a/src/Compiler/Driver/GraphChecking/TypeCheckingGraphProcessing.fs b/src/Compiler/Driver/GraphChecking/TypeCheckingGraphProcessing.fs new file mode 100644 index 00000000000..7f184a58880 --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/TypeCheckingGraphProcessing.fs @@ -0,0 +1,104 @@ +module internal FSharp.Compiler.GraphChecking.TypeCheckingGraphProcessing + +open GraphProcessing +open System.Collections.Generic +open System.Threading + +// TODO Do we need to suppress some error logging if we +// TODO apply the same partial results multiple times? +// TODO Maybe we can enable logging only for the final fold +/// +/// Combine type-checking results of dependencies needed to type-check a 'higher' node in the graph +/// +/// Initial state +/// Direct dependencies of a node +/// Transitive dependencies of a node +/// A way to fold a single result into existing state +/// +/// Similar to 'processFileGraph', this function is generic yet specific to the type-checking process. +/// +let combineResults + (emptyState: 'State) + (deps: ProcessedNode<'Item, 'State * Finisher<'State, 'FinalFileResult>>[]) + (transitiveDeps: ProcessedNode<'Item, 'State * Finisher<'State, 'FinalFileResult>>[]) + (folder: 'State -> Finisher<'State, 'FinalFileResult> -> 'State) + : 'State = + match deps with + | [||] -> emptyState + | _ -> + // Instead of starting with empty state, + // reuse state produced by the dependency with the biggest number of transitive dependencies. + // This is to reduce the number of folds required to achieve the final state. + let biggestDependency = + let sizeMetric (node: ProcessedNode<_, _>) = node.Info.TransitiveDeps.Length + deps |> Array.maxBy sizeMetric + + let firstState = biggestDependency.Result |> fst + + // Find items not already included in the state. + // Note: Ordering is not preserved due to reusing results of the biggest child + // rather than starting with empty state + let itemsPresent = + set + [| + yield! biggestDependency.Info.TransitiveDeps + yield biggestDependency.Info.Item + |] + + let resultsToAdd = + transitiveDeps + |> Array.filter (fun dep -> itemsPresent.Contains dep.Info.Item = false) + |> Array.distinctBy (fun dep -> dep.Info.Item) + |> Array.map (fun dep -> dep.Result |> snd) + + // Fold results not already included and produce the final state + let state = Array.fold folder firstState resultsToAdd + state + +// TODO This function and its parameters are quite specific to type-checking despite using generic types. +// Perhaps we should make it either more specific and remove type parameters, or more generic. +/// +/// Process a graph of items. +/// A version of 'GraphProcessing.processGraph' with a signature slightly specific to type-checking. +/// +let processTypeCheckingGraph<'Item, 'ChosenItem, 'State, 'FinalFileResult when 'Item: equality and 'Item: comparison> + (graph: Graph<'Item>) + (work: 'Item -> 'State -> Finisher<'State, 'FinalFileResult>) + (folder: 'State -> Finisher<'State, 'FinalFileResult> -> 'FinalFileResult * 'State) + // Decides whether a result for an item should be included in the final state, and how to map the item if it should. + (finalStateChooser: 'Item -> 'ChosenItem option) + (emptyState: 'State) + (ct: CancellationToken) + : ('ChosenItem * 'FinalFileResult) list * 'State = + + let workWrapper + (getProcessedNode: 'Item -> ProcessedNode<'Item, 'State * Finisher<'State, 'FinalFileResult>>) + (node: NodeInfo<'Item>) + : 'State * Finisher<'State, 'FinalFileResult> = + let folder x y = folder x y |> snd + let deps = node.Deps |> Array.except [| node.Item |] |> Array.map getProcessedNode + + let transitiveDeps = + node.TransitiveDeps + |> Array.except [| node.Item |] + |> Array.map getProcessedNode + + let inputState = combineResults emptyState deps transitiveDeps folder + let singleRes = work node.Item inputState + let state = folder inputState singleRes + state, singleRes + + let results = processGraph graph workWrapper ct + + let finalFileResults, state: ('ChosenItem * 'FinalFileResult) list * 'State = + (([], emptyState), + results + |> Array.choose (fun (item, res) -> + match finalStateChooser item with + | Some item -> Some(item, res) + | None -> None)) + ||> Array.fold (fun (fileResults, state) (item, (_, itemRes)) -> + let fileResult, state = folder state itemRes + (item, fileResult) :: fileResults, state) + + finalFileResults, state diff --git a/src/Compiler/Driver/GraphChecking/TypeCheckingGraphProcessing.fsi b/src/Compiler/Driver/GraphChecking/TypeCheckingGraphProcessing.fsi new file mode 100644 index 00000000000..5db01dc3307 --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/TypeCheckingGraphProcessing.fsi @@ -0,0 +1,17 @@ +/// Parallel processing of a type-checking file graph. +module internal FSharp.Compiler.GraphChecking.TypeCheckingGraphProcessing + +open System.Threading + +/// +/// Process a graph of items. +/// A version of 'GraphProcessing.processGraph' with a signature slightly specific to type-checking. +/// +val processTypeCheckingGraph<'Item, 'ChosenItem, 'State, 'FinalFileResult when 'Item: equality and 'Item: comparison> : + graph: Graph<'Item> -> + work: ('Item -> 'State -> Finisher<'State, 'FinalFileResult>) -> + folder: ('State -> Finisher<'State, 'FinalFileResult> -> 'FinalFileResult * 'State) -> + finalStateChooser: ('Item -> 'ChosenItem option) -> + emptyState: 'State -> + ct: CancellationToken -> + ('ChosenItem * 'FinalFileResult) list * 'State diff --git a/src/Compiler/Driver/GraphChecking/Types.fs b/src/Compiler/Driver/GraphChecking/Types.fs new file mode 100644 index 00000000000..67a0c564931 --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/Types.fs @@ -0,0 +1,170 @@ +namespace FSharp.Compiler.GraphChecking + +open System.Collections.Generic +open FSharp.Compiler.Syntax + +/// The index of a file inside a project. +type internal FileIndex = int + +/// File name captured by ParsedInput.FileName. +type internal FileName = string + +/// Represents the string value of a single identifier in the syntax tree. +/// For example, `"Hello"` in `module Hello`. +type internal Identifier = string + +/// Represents one or more identifiers in the syntax tree. +/// For example, `[ "X"; "Y"; "Z" ]` in `open X.Y.Z` +type internal LongIdentifier = string list + +/// Combines the file name, index and parsed syntax tree of a file in a project. +type internal FileInProject = + { + Idx: FileIndex + FileName: FileName + ParsedInput: ParsedInput + } + +/// There is a subtle difference between a module and namespace. +/// A namespace does not necessarily expose a set of dependent files. +/// Only when the namespace exposes types that could later be inferred. +/// Children of a namespace don't automatically depend on each other for that reason +type internal TrieNodeInfo = + | Root of files: HashSet + | Module of name: Identifier * file: FileIndex + | Namespace of name: Identifier * filesThatExposeTypes: HashSet + + member x.Files: Set = + match x with + | Root files -> set files + | Module (file = file) -> Set.singleton file + | Namespace (filesThatExposeTypes = files) -> set files + +type internal TrieNode = + { + Current: TrieNodeInfo + Children: Dictionary + } + + member x.Files = x.Current.Files + +/// A significant construct found in the syntax tree of a file. +/// This construct needs to be processed in order to deduce potential links to other files in the project. +type internal FileContentEntry = + /// Any toplevel namespace a file might have. + /// In case a file has `module X.Y.Z`, then `X.Y` is considered to be the toplevel namespace + | TopLevelNamespace of path: LongIdentifier * content: FileContentEntry list + /// The `open X.Y.Z` syntax. + | OpenStatement of path: LongIdentifier + /// Any identifier that has more than one piece (LongIdent or SynLongIdent) in it. + /// The last part of the identifier should not be included. + | PrefixedIdentifier of path: LongIdentifier + /// Being explicit about nested modules allows for easier reasoning what namespaces (paths) are open. + /// We can scope an `OpenStatement` to the everything that is happening inside the nested module. + | NestedModule of name: string * nestedContent: FileContentEntry list + +type internal FileContent = + { + FileName: FileName + Idx: FileIndex + Content: FileContentEntry array + } + +type internal FileContentQueryState = + { + OwnNamespace: LongIdentifier option + OpenedNamespaces: Set + FoundDependencies: Set + CurrentFile: FileIndex + KnownFiles: Set + } + + static member Create (fileIndex: FileIndex) (knownFiles: Set) (filesAtRoot: Set) = + { + OwnNamespace = None + OpenedNamespaces = Set.empty + FoundDependencies = filesAtRoot + CurrentFile = fileIndex + KnownFiles = knownFiles + } + + member x.AddOwnNamespace(ns: LongIdentifier, ?files: Set) = + match files with + | None -> { x with OwnNamespace = Some ns } + | Some files -> + let foundDependencies = + Set.filter x.KnownFiles.Contains files |> Set.union x.FoundDependencies + + { x with + OwnNamespace = Some ns + FoundDependencies = foundDependencies + } + + member x.AddDependencies(files: Set) : FileContentQueryState = + let files = Set.filter x.KnownFiles.Contains files |> Set.union x.FoundDependencies + { x with FoundDependencies = files } + + member x.AddOpenNamespace(path: LongIdentifier, ?files: Set) = + match files with + | None -> + { x with + OpenedNamespaces = Set.add path x.OpenedNamespaces + } + | Some files -> + let foundDependencies = + Set.filter x.KnownFiles.Contains files |> Set.union x.FoundDependencies + + { x with + FoundDependencies = foundDependencies + OpenedNamespaces = Set.add path x.OpenedNamespaces + } + + member x.OpenNamespaces = + match x.OwnNamespace with + | None -> x.OpenedNamespaces + | Some ownNs -> Set.add ownNs x.OpenedNamespaces + +[] +type internal QueryTrieNodeResult = + /// No node was found for the path in the trie + | NodeDoesNotExist + /// A node was found but it yielded no file links + | NodeDoesNotExposeData + /// A node was found with one or more file links + | NodeExposesData of Set + +type internal QueryTrie = LongIdentifier -> QueryTrieNodeResult + +/// Helper class to help map signature files to implementation files and vice versa. +type internal FilePairMap(files: FileInProject array) = + let buildBiDirectionalMaps pairs = + Map.ofArray pairs, Map.ofArray (pairs |> Array.map (fun (a, b) -> (b, a))) + + let implToSig, sigToImpl = + files + |> Array.choose (fun f -> + match f.ParsedInput with + | ParsedInput.SigFile _ -> + files + |> Array.skip (f.Idx + 1) + |> Array.tryFind (fun (implFile: FileInProject) -> $"{implFile.FileName}i" = f.FileName) + |> Option.map (fun (implFile: FileInProject) -> (implFile.Idx, f.Idx)) + | ParsedInput.ImplFile _ -> None) + |> buildBiDirectionalMaps + + member x.GetSignatureIndex(implementationIndex: FileIndex) = Map.find implementationIndex implToSig + member x.GetImplementationIndex(signatureIndex: FileIndex) = Map.find signatureIndex sigToImpl + + member x.HasSignature(implementationIndex: FileIndex) = + Map.containsKey implementationIndex implToSig + + member x.TryGetSignatureIndex(implementationIndex: FileIndex) = + if x.HasSignature implementationIndex then + Some(x.GetSignatureIndex implementationIndex) + else + None + + member x.IsSignature(index: FileIndex) = Map.containsKey index sigToImpl + +/// Callback that returns a previously calculated 'Result and updates 'State accordingly. +type internal Finisher<'State, 'Result> = delegate of 'State -> 'Result * 'State diff --git a/src/Compiler/Driver/GraphChecking/Types.fsi b/src/Compiler/Driver/GraphChecking/Types.fsi new file mode 100644 index 00000000000..852b642bd5d --- /dev/null +++ b/src/Compiler/Driver/GraphChecking/Types.fsi @@ -0,0 +1,112 @@ +namespace FSharp.Compiler.GraphChecking + +open System.Collections.Generic +open FSharp.Compiler.Syntax + +/// The index of a file inside a project. +type internal FileIndex = int + +/// File name captured by ParsedInput.FileName. +type internal FileName = string + +/// Represents the string value of a single identifier in the syntax tree. +/// For example, `"Hello"` in `module Hello`. +type internal Identifier = string + +/// Represents one or more identifiers in the syntax tree. +/// For example, `[ "X"; "Y"; "Z" ]` in `open X.Y.Z` +type internal LongIdentifier = Identifier list + +/// Combines the file name, index and parsed syntax tree of a file in a project. +type internal FileInProject = + { Idx: FileIndex + FileName: FileName + ParsedInput: ParsedInput } + +/// There is a subtle difference between a module and namespace. +/// A namespace does not necessarily expose a set of dependent files. +/// Only when the namespace exposes types that could later be inferred. +/// Children of a namespace don't automatically depend on each other for that reason +type internal TrieNodeInfo = + | Root of files: HashSet + | Module of name: Identifier * file: FileIndex + | Namespace of name: Identifier * filesThatExposeTypes: HashSet + + member Files: Set + +/// A node in the Trie structure. +type internal TrieNode = + { + /// Information about this node. + Current: TrieNodeInfo + /// Child nodes + Children: Dictionary + } + + /// Zero or more files that define the LongIdentifier represented by this node. + member Files: Set + +/// A significant construct found in the syntax tree of a file. +/// This construct needs to be processed in order to deduce potential links to other files in the project. +type internal FileContentEntry = + /// Any toplevel namespace a file might have. + /// In case a file has `module X.Y.Z`, then `X.Y` is considered to be the toplevel namespace + | TopLevelNamespace of path: LongIdentifier * content: FileContentEntry list + /// The `open X.Y.Z` syntax. + | OpenStatement of path: LongIdentifier + /// Any identifier that has more than one piece (LongIdent or SynLongIdent) in it. + /// The last part of the identifier should not be included. + | PrefixedIdentifier of path: LongIdentifier + /// Being explicit about nested modules allows for easier reasoning what namespaces (paths) are open. + /// For example we can limit the scope of an `OpenStatement` to symbols defined inside the nested module. + | NestedModule of name: string * nestedContent: FileContentEntry list + +/// File identifiers and its content extract for dependency resolution +type internal FileContent = + { FileName: FileName + Idx: FileIndex + Content: FileContentEntry array } + +type internal FileContentQueryState = + { OwnNamespace: LongIdentifier option + OpenedNamespaces: Set + FoundDependencies: Set + CurrentFile: FileIndex + KnownFiles: Set } + + static member Create: + fileIndex: FileIndex -> knownFiles: Set -> filesAtRoot: Set -> FileContentQueryState + member AddOwnNamespace: ns: LongIdentifier * ?files: Set -> FileContentQueryState + member AddDependencies: files: Set -> FileContentQueryState + member AddOpenNamespace: path: LongIdentifier * ?files: Set -> FileContentQueryState + member OpenNamespaces: Set + +/// Result of querying a Trie Node. +[] +type internal QueryTrieNodeResult = + /// No node was found for the path in the trie. + | NodeDoesNotExist + /// A node was found but no file exposes data for the LongIdentifier in question. + /// + /// This could happen if there is a single file with a top-level module `module A.B`, + /// and we search for `A`. + /// Although the `A` path exists in the Trie, it does not contain any relevant definitions (beyond itself). + /// + | NodeDoesNotExposeData + /// A node was found with one or more files that contain relevant definitions required for type-checking. + | NodeExposesData of Set + +/// A function for querying a Trie (the Trie is defined within the function's context) +type internal QueryTrie = LongIdentifier -> QueryTrieNodeResult + +/// Helper class for mapping signature files to implementation files and vice versa. +type internal FilePairMap = + new: files: FileInProject array -> FilePairMap + member GetSignatureIndex: implementationIndex: FileIndex -> FileIndex + member GetImplementationIndex: signatureIndex: FileIndex -> FileIndex + member HasSignature: implementationIndex: FileIndex -> bool + member TryGetSignatureIndex: implementationIndex: FileIndex -> FileIndex option + member IsSignature: index: FileIndex -> bool + +/// Callback that returns a previously calculated 'Result and updates 'State accordingly. +type internal Finisher<'State, 'Result> = delegate of 'State -> 'Result * 'State diff --git a/src/Compiler/Driver/ParseAndCheckInputs.fs b/src/Compiler/Driver/ParseAndCheckInputs.fs index 45fb1ab34f7..309b858e4d2 100644 --- a/src/Compiler/Driver/ParseAndCheckInputs.fs +++ b/src/Compiler/Driver/ParseAndCheckInputs.fs @@ -6,6 +6,7 @@ module internal FSharp.Compiler.ParseAndCheckInputs open System open System.Diagnostics open System.IO +open System.Threading open System.Collections.Generic open Internal.Utilities.Collections @@ -1151,7 +1152,6 @@ let AddCheckResultsToTcState (tcGlobals, amap, hadSig, prefixPathOpt, tcSink, tcImplEnv, qualNameOfFile, implFileSigType) (tcState: TcState) = - let rootImpls = Zset.add qualNameOfFile tcState.tcsRootImpls // Only add it to the environment if it didn't have a signature @@ -1194,6 +1194,8 @@ let AddCheckResultsToTcState ccuSigForFile, tcState +type PartialResult = TcEnv * TopAttribs * CheckedImplFile option * ModuleOrNamespaceType + /// Typecheck a single file (or interactive entry into F# Interactive) let CheckOneInputAux ( @@ -1218,9 +1220,14 @@ let CheckOneInputAux let m = inp.Range let amap = tcImports.GetImportMap() + let conditionalDefines = + if tcConfig.noConditionalErasure then + None + else + Some tcConfig.conditionalDefines + match inp with | ParsedInput.SigFile file -> - let qualNameOfFile = file.QualifiedName // Check if we've seen this top module signature before. @@ -1231,12 +1238,6 @@ let CheckOneInputAux if Zset.contains qualNameOfFile tcState.tcsRootImpls then errorR (Error(FSComp.SR.buildImplementationAlreadyGivenDetail (qualNameOfFile.Text), m)) - let conditionalDefines = - if tcConfig.noConditionalErasure then - None - else - Some tcConfig.conditionalDefines - // Typecheck the signature file let! tcEnv, sigFileType, createsGeneratedProvidedTypes = CheckOneSigFile @@ -1284,12 +1285,6 @@ let CheckOneInputAux if Zset.contains qualNameOfFile tcState.tcsRootImpls then errorR (Error(FSComp.SR.buildImplementationAlreadyGiven (qualNameOfFile.Text), m)) - let conditionalDefines = - if tcConfig.noConditionalErasure then - None - else - Some tcConfig.conditionalDefines - let hadSig = rootSigOpt.IsSome match rootSigOpt with @@ -1350,17 +1345,16 @@ let CheckOneInputAux /// Typecheck a single file (or interactive entry into F# Interactive). If skipImplIfSigExists is set to true /// then implementations with signature files give empty results. let CheckOneInput - ( - checkForErrors, - tcConfig: TcConfig, - tcImports: TcImports, - tcGlobals, - prefixPathOpt, - tcSink, - tcState: TcState, - input: ParsedInput, - skipImplIfSigExists: bool - ) = + ((checkForErrors, + tcConfig: TcConfig, + tcImports: TcImports, + tcGlobals, + prefixPathOpt, + tcSink, + tcState: TcState, + input: ParsedInput, + skipImplIfSigExists: bool): (unit -> bool) * TcConfig * TcImports * TcGlobals * LongIdent option * TcResultsSink * TcState * ParsedInput * bool) + : Cancellable = cancellable { let! partialResult, tcState = CheckOneInputAux(checkForErrors, tcConfig, tcImports, tcGlobals, prefixPathOpt, tcSink, tcState, input, skipImplIfSigExists) @@ -1427,126 +1421,369 @@ let CheckMultipleInputsSequential (ctok, checkForErrors, tcConfig, tcImports, tc (tcState, inputs) ||> List.mapFold (CheckOneInputEntry(ctok, checkForErrors, tcConfig, tcImports, tcGlobals, prefixPathOpt, false)) -/// Use parallel checking of implementation files that have signature files -let CheckMultipleInputsInParallel - ( - ctok, - checkForErrors, - tcConfig: TcConfig, - tcImports, - tcGlobals, - prefixPathOpt, - tcState, - eagerFormat, - inputs - ) = +open FSharp.Compiler.GraphChecking - let diagnosticsLogger = DiagnosticsThreadStatics.DiagnosticsLogger +type State = TcState * bool +type FinalFileResult = TcEnv * TopAttribs * CheckedImplFile option * ModuleOrNamespaceType - // We create one CapturingDiagnosticLogger for each file we are processing and - // ensure the diagnostics are presented in deterministic order. - // - // eagerFormat is used to format diagnostics as they are emitted, just as they would be in the command-line - // compiler. This is necessary because some formatting of diagnostics is dependent on the - // type inference state at precisely the time the diagnostic is emitted. - UseMultipleDiagnosticLoggers (inputs, diagnosticsLogger, Some eagerFormat) (fun inputsWithLoggers -> +/// Auxiliary type for re-using signature information in TcEnvFromImpls. +/// +/// TcState has two typing environments: TcEnvFromSignatures && TcEnvFromImpls +/// When type checking a file, depending on the type (implementation or signature), it will use one of these typing environments (TcEnv). +/// Checking a file will populate the respective TcEnv. +/// +/// When a file has a dependencies, the information of the signature file in case a pair (implementation file backed by a signature) will suffice to type-check that file. +/// Example: if `B.fs` has a dependency on `A`, the information of `A.fsi` is enough for `B.fs` to type-check, on condition that information is available in the TcEnvFromImpls. +/// We introduce a special ArtificialImplFile node in the graph to satisfy this. `B.fs -> [ A.fsi ]` becomes `B.fs -> [ ArtificialImplFile A ]. +/// The `ArtificialImplFile A` node will duplicate the signature information which A.fsi provided earlier. +/// Processing a `ArtificialImplFile` node will add the information from the TcEnvFromSignatures to the TcEnvFromImpls. +/// This means `A` will be known in both TcEnvs and therefor `B.fs` can be type-checked. +/// By doing this, we can speed up the graph processing as type checking a signature file is less expensive than its implementation counterpart. +/// +/// When we need to actually type-check an implementation file backed by a signature, we cannot have the duplicate information of the signature file present in TcEnvFromImpls. +/// Example `A.fs -> [ A.fsi ]`. An implementation file always depends on its signature. +/// Type-checking `A.fs` will add the actual information to TcEnvFromImpls and we do not depend on the `ArtificialImplFile A` for `A.fs`. +/// +/// In order to deal correctly with the `ArtificialImplFile` logic, we need to transform the resolved graph to contain the additional pair nodes. +/// After we have type-checked the graph, we exclude the ArtificialImplFile nodes as they are not actual physical files in our project. +[] +type NodeToTypeCheck = + /// A real physical file in the current project. + /// This can be either an implementation or a signature file. + | PhysicalFile of fileIndex: FileIndex + /// An artificial node that will add the earlier processed signature information to the TcEnvFromImpls. + /// Dependants on this type of node will perceive that a file is known in both TcEnvFromSignatures and TcEnvFromImpls. + /// Even though the actual implementation file was not type-checked. + | ArtificialImplFile of signatureFileIndex: FileIndex + +let folder (state: State) (finisher: Finisher) : FinalFileResult * State = finisher.Invoke(state) - // Equip loggers to locally filter w.r.t. scope pragmas in each input - let inputsWithLoggers = - inputsWithLoggers - |> List.map (fun (input, oldLogger) -> - let logger = DiagnosticsLoggerForInput(tcConfig, input, oldLogger) - input, logger) +/// Typecheck a single file (or interactive entry into F# Interactive) +/// a callback functions that takes a `TcState` and will add the checked result to it. +let CheckOneInputWithCallback + ((checkForErrors, + tcConfig: TcConfig, + tcImports: TcImports, + tcGlobals, + prefixPathOpt, + tcSink, + tcState: TcState, + inp: ParsedInput, + _skipImplIfSigExists: bool): (unit -> bool) * TcConfig * TcImports * TcGlobals * LongIdent option * TcResultsSink * TcState * ParsedInput * bool) + : Cancellable> = + cancellable { + try + CheckSimulateException tcConfig - // In the first linear part of parallel checking, we use a 'checkForErrors' that checks either for errors - // somewhere in the files processed prior to each one, or in the processing of this particular file. - let priorErrors = checkForErrors () + let m = inp.Range + let amap = tcImports.GetImportMap() - // Do the first linear phase, checking all signatures and any implementation files that don't have a signature. - // Implementation files that do have a signature will result in a Choice2Of2 indicating to next do some of the - // checking in parallel. - let partialResults, (tcState, _) = - ((tcState, priorErrors), inputsWithLoggers) - ||> List.mapFold (fun (tcState, priorErrors) (input, logger) -> - use _ = UseDiagnosticsLogger logger + let conditionalDefines = + if tcConfig.noConditionalErasure then + None + else + Some tcConfig.conditionalDefines + + match inp with + | ParsedInput.SigFile file -> + let qualNameOfFile = file.QualifiedName + + // Check if we've seen this top module signature before. + if Zmap.mem qualNameOfFile tcState.tcsRootSigs then + errorR (Error(FSComp.SR.buildSignatureAlreadySpecified (qualNameOfFile.Text), m.StartRange)) + + // Check if the implementation came first in compilation order + if Zset.contains qualNameOfFile tcState.tcsRootImpls then + errorR (Error(FSComp.SR.buildImplementationAlreadyGivenDetail (qualNameOfFile.Text), m)) + + // Typecheck the signature file + let! tcEnv, sigFileType, createsGeneratedProvidedTypes = + CheckOneSigFile + (tcGlobals, + amap, + tcState.tcsCcu, + checkForErrors, + conditionalDefines, + tcSink, + tcConfig.internalTestSpanStackReferring, + tcConfig.diagnosticsOptions) + tcState.tcsTcSigEnv + file + + // Open the prefixPath for fsi.exe + let tcEnv, _openDecls1 = + match prefixPathOpt with + | None -> tcEnv, [] + | Some prefixPath -> + let m = qualNameOfFile.Range + TcOpenModuleOrNamespaceDecl tcSink tcGlobals amap m tcEnv (prefixPath, m) + + return + Finisher(fun tcState -> + let rootSigs = Zmap.add qualNameOfFile sigFileType tcState.tcsRootSigs + + let tcSigEnv = + AddLocalRootModuleOrNamespace TcResultsSink.NoSink tcGlobals amap m tcState.tcsTcSigEnv sigFileType + + // Add the signature to the signature env (unless it had an explicit signature) + let ccuSigForFile = CombineCcuContentFragments [ sigFileType; tcState.tcsCcuSig ] + + let partialResult = tcEnv, EmptyTopAttrs, None, ccuSigForFile - let checkForErrors2 () = priorErrors || (logger.ErrorCount > 0) + let tcState = + { tcState with + tcsTcSigEnv = tcSigEnv + tcsRootSigs = rootSigs + tcsCreatesGeneratedProvidedTypes = tcState.tcsCreatesGeneratedProvidedTypes || createsGeneratedProvidedTypes + } - let partialResult, tcState = - CheckOneInputAux( - checkForErrors2, - tcConfig, - tcImports, + partialResult, tcState) + + | ParsedInput.ImplFile file -> + let qualNameOfFile = file.QualifiedName + + // Check if we've got an interface for this fragment + let rootSigOpt = tcState.tcsRootSigs.TryFind qualNameOfFile + + // Typecheck the implementation file + let! topAttrs, implFile, tcEnvAtEnd, createsGeneratedProvidedTypes = + CheckOneImplFile( tcGlobals, - prefixPathOpt, - TcResultsSink.NoSink, - tcState, - input, - true + amap, + tcState.tcsCcu, + tcState.tcsImplicitOpenDeclarations, + checkForErrors, + conditionalDefines, + tcSink, + tcConfig.internalTestSpanStackReferring, + tcState.tcsTcImplEnv, + rootSigOpt, + file, + tcConfig.diagnosticsOptions ) - |> Cancellable.runWithoutCancellation - let priorErrors = checkForErrors2 () - partialResult, (tcState, priorErrors)) + return + Finisher(fun tcState -> + // Check if we've already seen an implementation for this fragment + if Zset.contains qualNameOfFile tcState.tcsRootImpls then + errorR (Error(FSComp.SR.buildImplementationAlreadyGiven (qualNameOfFile.Text), m)) + + let ccuSigForFile, fsTcState = + AddCheckResultsToTcState + (tcGlobals, amap, false, prefixPathOpt, tcSink, tcState.tcsTcImplEnv, qualNameOfFile, implFile.Signature) + tcState + + let partialResult = tcEnvAtEnd, topAttrs, Some implFile, ccuSigForFile + + let tcState = + { fsTcState with + tcsCreatesGeneratedProvidedTypes = + fsTcState.tcsCreatesGeneratedProvidedTypes || createsGeneratedProvidedTypes + } + + partialResult, tcState) + + with e -> + errorRecovery e range0 + return Finisher(fun tcState -> (tcState.TcEnvFromSignatures, EmptyTopAttrs, None, tcState.tcsCcuSig), tcState) + } + +let AddSignatureResultToTcImplEnv (tcImports: TcImports, tcGlobals, prefixPathOpt, tcSink, tcState, input: ParsedInput) = + let qualNameOfFile = input.QualifiedName + let rootSigOpt = tcState.tcsRootSigs.TryFind qualNameOfFile + + match rootSigOpt with + | None -> failwithf $"No signature data was found for %s{input.FileName}" + | Some rootSig -> + fun (tcState: TcState) -> + let amap = tcImports.GetImportMap() + + // Add the results of type checking the signature file to the TcEnv of implementation files. + let ccuSigForFile, tcState = + AddCheckResultsToTcState + (tcGlobals, amap, true, prefixPathOpt, tcSink, tcState.tcsTcImplEnv, qualNameOfFile, rootSig) + tcState + + // This partial result will be discarded in the end of the graph resolution. + let partialResult: PartialResult = + tcState.tcsTcSigEnv, EmptyTopAttrs, None, ccuSigForFile - // Do the parallel phase, checking all implementation files that did have a signature, in parallel. - let results, createsGeneratedProvidedTypesFlags = + partialResult, tcState - List.zip partialResults inputsWithLoggers +/// Constructs a file dependency graph and type-checks the files in parallel where possible. +let CheckMultipleInputsUsingGraphMode + ((ctok, checkForErrors, tcConfig: TcConfig, tcImports: TcImports, tcGlobals, prefixPathOpt, tcState, eagerFormat, inputs): 'a * (unit -> bool) * TcConfig * TcImports * TcGlobals * LongIdent option * TcState * (PhasedDiagnostic -> PhasedDiagnostic) * ParsedInput list) + : FinalFileResult list * TcState = + use cts = new CancellationTokenSource() + + let sourceFiles: FileInProject array = + inputs + |> List.toArray + |> Array.mapi (fun idx (input: ParsedInput) -> + { + Idx = idx + FileName = input.FileName + ParsedInput = input + }) + + let filePairs = FilePairMap(sourceFiles) + + let graph = + DependencyResolution.mkGraph tcConfig.compilingFSharpCore filePairs sourceFiles + + let nodeGraph = + let mkArtificialImplFile n = NodeToTypeCheck.ArtificialImplFile n + let mkPhysicalFile n = NodeToTypeCheck.PhysicalFile n + + /// Map any signature dependencies to the ArtificialImplFile counterparts, + /// unless the signature dependency is the backing file of the current (implementation) file. + let mapDependencies idx deps = + Array.map + (fun dep -> + if filePairs.IsSignature dep then + let implIdx = filePairs.GetImplementationIndex dep + + if implIdx = idx then + // This is the matching signature for the implementation. + // Retain the direct dependency onto the signature file. + mkPhysicalFile dep + else + mkArtificialImplFile dep + else + mkPhysicalFile dep) + deps + + // Transform the graph to include ArtificialImplFile nodes when necessary. + graph + |> Seq.collect (fun (KeyValue (fileIdx, deps)) -> + if filePairs.IsSignature fileIdx then + // Add an additional ArtificialImplFile node for the signature file. + [| + // Mark the current file as physical and map the dependencies. + mkPhysicalFile fileIdx, mapDependencies fileIdx deps + // Introduce a new node that depends on the signature. + mkArtificialImplFile fileIdx, [| mkPhysicalFile fileIdx |] + |] + else + [| mkPhysicalFile fileIdx, mapDependencies fileIdx deps |]) + |> Graph.make + + // Persist the graph to a Mermaid diagram if specified. + if tcConfig.typeCheckingConfig.DumpGraph then + tcConfig.outputFile + |> Option.iter (fun outputFile -> + let outputFile = FileSystem.GetFullPathShim(outputFile) + let graphFile = FileSystem.ChangeExtensionShim(outputFile, ".graph.md") + + graph + |> Graph.map (fun idx -> + let friendlyFileName = + sourceFiles[idx] + .FileName.Replace(tcConfig.implicitIncludeDir, "") + .TrimStart([| '\\'; '/' |]) + + (idx, friendlyFileName)) + |> Graph.serialiseToMermaid graphFile) + + let _ = ctok // TODO Use it + let diagnosticsLogger = DiagnosticsThreadStatics.DiagnosticsLogger + + // In the first linear part of parallel checking, we use a 'checkForErrors' that checks either for errors + // somewhere in the files processed prior to each one, or in the processing of this particular file. + let priorErrors = checkForErrors () + + let processArtificialImplFile (input: ParsedInput) ((currentTcState, _currentPriorErrors): State) : Finisher = + Finisher(fun (state: State) -> + let tcState, currentPriorErrors = state + + let f = + // Retrieve the type-checked signature information and add it to the TcEnvFromImpls. + AddSignatureResultToTcImplEnv(tcImports, tcGlobals, prefixPathOpt, TcResultsSink.NoSink, currentTcState, input) + + // The `partialResult` will be excluded at the end of `GraphProcessing.processGraph`. + // The important thing is that `nextTcState` will populated the necessary information to TcEnvFromImpls. + let partialResult, nextTcState = f tcState + partialResult, (nextTcState, currentPriorErrors)) + + let processFile + ((input, logger): ParsedInput * DiagnosticsLogger) + ((currentTcState, _currentPriorErrors): State) + : Finisher = + use _ = UseDiagnosticsLogger logger + let checkForErrors2 () = priorErrors || (logger.ErrorCount > 0) + let tcSink = TcResultsSink.NoSink + + let finisher = + CheckOneInputWithCallback(checkForErrors2, tcConfig, tcImports, tcGlobals, prefixPathOpt, tcSink, currentTcState, input, false) + |> Cancellable.runWithoutCancellation + + Finisher(fun (state: State) -> + let tcState, priorErrors = state + let (partialResult: PartialResult, tcState) = finisher.Invoke(tcState) + let hasErrors = logger.ErrorCount > 0 + let priorOrCurrentErrors = priorErrors || hasErrors + let state: State = tcState, priorOrCurrentErrors + partialResult, state) + + UseMultipleDiagnosticLoggers (inputs, diagnosticsLogger, Some eagerFormat) (fun inputsWithLoggers -> + // Equip loggers to locally filter w.r.t. scope pragmas in each input + let inputsWithLoggers = + inputsWithLoggers |> List.toArray - |> ArrayParallel.map (fun (partialResult, (_, logger)) -> - use _ = UseDiagnosticsLogger logger - use _ = UseBuildPhase BuildPhase.TypeCheck + |> Array.map (fun (input, oldLogger) -> + let logger = DiagnosticsLoggerForInput(tcConfig, input, oldLogger) + input, logger) - RequireCompilationThread ctok + let processFile (node: NodeToTypeCheck) (state: State) : Finisher = + match node with + | NodeToTypeCheck.ArtificialImplFile idx -> + let parsedInput, _ = inputsWithLoggers[idx] + processArtificialImplFile parsedInput state + | NodeToTypeCheck.PhysicalFile idx -> + let parsedInput, logger = inputsWithLoggers[idx] + processFile (parsedInput, logger) state - match partialResult with - | Choice1Of2 result -> result, false - | Choice2Of2 (amap, conditionalDefines, rootSig, priorErrors, file, tcStateForImplFile, ccuSigForFile) -> + let state: State = tcState, priorErrors - // In the first linear part of parallel checking, we use a 'checkForErrors' that checks either for errors - // somewhere in the files processed prior to this one, including from the first phase, or in the processing - // of this particular file. - let checkForErrors2 () = priorErrors || (logger.ErrorCount > 0) + let finalStateItemChooser node = + match node with + | NodeToTypeCheck.ArtificialImplFile _ -> None + | NodeToTypeCheck.PhysicalFile file -> Some file - let topAttrs, implFile, tcEnvAtEnd, createsGeneratedProvidedTypes = - CheckOneImplFile( - tcGlobals, - amap, - tcStateForImplFile.tcsCcu, - tcStateForImplFile.tcsImplicitOpenDeclarations, - checkForErrors2, - conditionalDefines, - TcResultsSink.NoSink, - tcConfig.internalTestSpanStackReferring, - tcStateForImplFile.tcsTcImplEnv, - Some rootSig, - file, - tcConfig.diagnosticsOptions - ) - |> Cancellable.runWithoutCancellation + let partialResults, (tcState, _) = + TypeCheckingGraphProcessing.processTypeCheckingGraph + nodeGraph + processFile + folder + finalStateItemChooser + state + cts.Token - let result = (tcEnvAtEnd, topAttrs, Some implFile, ccuSigForFile) - result, createsGeneratedProvidedTypes) - |> Array.toList - |> List.unzip - - let tcState = - { tcState with - tcsCreatesGeneratedProvidedTypes = - tcState.tcsCreatesGeneratedProvidedTypes - || (createsGeneratedProvidedTypesFlags |> List.exists id) - } + let partialResults = + partialResults + // Bring back the original, index-based file order. + |> List.sortBy fst + |> List.map snd - results, tcState) + partialResults, tcState) let CheckClosedInputSet (ctok, checkForErrors, tcConfig: TcConfig, tcImports, tcGlobals, prefixPathOpt, tcState, eagerFormat, inputs) = // tcEnvAtEndOfLastFile is the environment required by fsi.exe when incrementally adding definitions let results, tcState = - if tcConfig.parallelCheckingWithSignatureFiles then - CheckMultipleInputsInParallel(ctok, checkForErrors, tcConfig, tcImports, tcGlobals, prefixPathOpt, tcState, eagerFormat, inputs) - else - CheckMultipleInputsSequential(ctok, checkForErrors, tcConfig, tcImports, tcGlobals, prefixPathOpt, tcState, inputs) + match tcConfig.typeCheckingConfig.Mode with + | TypeCheckingMode.Graph when (not tcConfig.isInteractive && not tcConfig.deterministic) -> + CheckMultipleInputsUsingGraphMode( + ctok, + checkForErrors, + tcConfig, + tcImports, + tcGlobals, + prefixPathOpt, + tcState, + eagerFormat, + inputs + ) + | _ -> CheckMultipleInputsSequential(ctok, checkForErrors, tcConfig, tcImports, tcGlobals, prefixPathOpt, tcState, inputs) let (tcEnvAtEndOfLastFile, topAttrs, implFiles, _), tcState = CheckMultipleInputsFinish(results, tcState) diff --git a/src/Compiler/Driver/ParseAndCheckInputs.fsi b/src/Compiler/Driver/ParseAndCheckInputs.fsi index 13ed6801ad2..c9a7f69a338 100644 --- a/src/Compiler/Driver/ParseAndCheckInputs.fsi +++ b/src/Compiler/Driver/ParseAndCheckInputs.fsi @@ -113,7 +113,6 @@ val GetInitialTcEnv: assemblyName: string * range * TcConfig * TcImports * TcGlo /// Represents the incremental type checking state for a set of inputs [] type TcState = - /// The CcuThunk for the current assembly being checked member Ccu: CcuThunk diff --git a/src/Compiler/FSComp.txt b/src/Compiler/FSComp.txt index 063a6b973c8..90666571fb6 100644 --- a/src/Compiler/FSComp.txt +++ b/src/Compiler/FSComp.txt @@ -1677,3 +1677,4 @@ featureEscapeBracesInFormattableString,"Escapes curly braces before calling Form 3558,chkExplicitFieldsDeclarationsOnStaticClasses,"If a type uses both [] and [] attributes, it means it is static. Explicit field declarations are not allowed." 3559,typrelNeverRefinedAwayFromTop,"A type has been implicitly inferred as 'obj', which may be unintended. Consider adding explicit type annotations. You can disable this warning by using '#nowarn \"3559\"' or '--nowarn:3559'." 3560,tcCopyAndUpdateRecordChangesAllFields,"This copy-and-update record expression changes all fields of record type '%s'. Consider using the record construction syntax instead." +3561,chkAutoOpenAttributeInTypeAbbrev,"FSharp.Core.AutoOpenAttribute should not be aliased." diff --git a/src/Compiler/FSharp.Compiler.Service.fsproj b/src/Compiler/FSharp.Compiler.Service.fsproj index 692ce647354..2558f69a431 100644 --- a/src/Compiler/FSharp.Compiler.Service.fsproj +++ b/src/Compiler/FSharp.Compiler.Service.fsproj @@ -381,6 +381,23 @@ + + + + + + + + + + + + + + + + + diff --git a/src/Compiler/Service/IncrementalBuild.fs b/src/Compiler/Service/IncrementalBuild.fs index a15fc768778..87d3bdd29e9 100644 --- a/src/Compiler/Service/IncrementalBuild.fs +++ b/src/Compiler/Service/IncrementalBuild.fs @@ -1459,7 +1459,6 @@ type IncrementalBuilder(initialState: IncrementalBuilderInitialState, state: Inc keepAllBackgroundSymbolUses, enableBackgroundItemKeyStoreAndSemanticClassification, enablePartialTypeChecking: bool, - enableParallelCheckingWithSignatureFiles: bool, dependencyProvider, parallelReferenceResolution, captureIdentifiersWhenParsing, @@ -1545,7 +1544,6 @@ type IncrementalBuilder(initialState: IncrementalBuilderInitialState, state: Inc } |> Some - tcConfigB.parallelCheckingWithSignatureFiles <- enableParallelCheckingWithSignatureFiles tcConfigB.parallelReferenceResolution <- parallelReferenceResolution tcConfigB.captureIdentifiersWhenParsing <- captureIdentifiersWhenParsing diff --git a/src/Compiler/Service/IncrementalBuild.fsi b/src/Compiler/Service/IncrementalBuild.fsi index c527ca8a70b..ee9b06380eb 100644 --- a/src/Compiler/Service/IncrementalBuild.fsi +++ b/src/Compiler/Service/IncrementalBuild.fsi @@ -265,7 +265,6 @@ type internal IncrementalBuilder = keepAllBackgroundSymbolUses: bool * enableBackgroundItemKeyStoreAndSemanticClassification: bool * enablePartialTypeChecking: bool * - enableParallelCheckingWithSignatureFiles: bool * dependencyProvider: DependencyProvider option * parallelReferenceResolution: ParallelReferenceResolution * captureIdentifiersWhenParsing: bool * diff --git a/src/Compiler/Service/service.fs b/src/Compiler/Service/service.fs index ad84d6e4b69..65537dd67b8 100644 --- a/src/Compiler/Service/service.fs +++ b/src/Compiler/Service/service.fs @@ -192,7 +192,6 @@ type BackgroundCompiler keepAllBackgroundSymbolUses, enableBackgroundItemKeyStoreAndSemanticClassification, enablePartialTypeChecking, - enableParallelCheckingWithSignatureFiles, parallelReferenceResolution, captureIdentifiersWhenParsing, getSource: (string -> ISourceText option) option, @@ -326,7 +325,6 @@ type BackgroundCompiler keepAllBackgroundSymbolUses, enableBackgroundItemKeyStoreAndSemanticClassification, enablePartialTypeChecking, - enableParallelCheckingWithSignatureFiles, dependencyProvider, parallelReferenceResolution, captureIdentifiersWhenParsing, @@ -1262,7 +1260,6 @@ type FSharpChecker keepAllBackgroundSymbolUses, enableBackgroundItemKeyStoreAndSemanticClassification, enablePartialTypeChecking, - enableParallelCheckingWithSignatureFiles, parallelReferenceResolution, captureIdentifiersWhenParsing, getSource, @@ -1280,7 +1277,6 @@ type FSharpChecker keepAllBackgroundSymbolUses, enableBackgroundItemKeyStoreAndSemanticClassification, enablePartialTypeChecking, - enableParallelCheckingWithSignatureFiles, parallelReferenceResolution, captureIdentifiersWhenParsing, getSource, @@ -1326,7 +1322,6 @@ type FSharpChecker ?keepAllBackgroundSymbolUses, ?enableBackgroundItemKeyStoreAndSemanticClassification, ?enablePartialTypeChecking, - ?enableParallelCheckingWithSignatureFiles, ?parallelReferenceResolution: bool, ?captureIdentifiersWhenParsing: bool, ?documentSource: DocumentSource @@ -1350,7 +1345,6 @@ type FSharpChecker defaultArg enableBackgroundItemKeyStoreAndSemanticClassification false let enablePartialTypeChecking = defaultArg enablePartialTypeChecking false - let enableParallelCheckingWithSignatureFiles = defaultArg enableParallelCheckingWithSignatureFiles false let captureIdentifiersWhenParsing = defaultArg captureIdentifiersWhenParsing false let useChangeNotifications = @@ -1373,7 +1367,6 @@ type FSharpChecker keepAllBackgroundSymbolUses, enableBackgroundItemKeyStoreAndSemanticClassification, enablePartialTypeChecking, - enableParallelCheckingWithSignatureFiles, parallelReferenceResolution, captureIdentifiersWhenParsing, (match documentSource with diff --git a/src/Compiler/Service/service.fsi b/src/Compiler/Service/service.fsi index 40b6b978690..52a355c9162 100644 --- a/src/Compiler/Service/service.fsi +++ b/src/Compiler/Service/service.fsi @@ -38,7 +38,6 @@ type public FSharpChecker = /// Indicate whether all symbol uses should be kept in background checking /// Indicates whether a table of symbol keys should be kept for background compilation /// Indicates whether to perform partial type checking. Cannot be set to true if keepAssmeblyContents is true. If set to true, can cause duplicate type-checks when richer information on a file is needed, but can skip background type-checking entirely on implementation files with signature files. - /// Type check implementation files that are backed by a signature file in parallel. /// Indicates whether to resolve references in parallel. /// When set to true we create a set of all identifiers for each parsed file which can be used to speed up finding references. /// Default: FileSystem. You can use Custom source to provide a function that will return the source for a given file path instead of reading it from the file system. Note that with this option the FSharpChecker will also not monitor the file system for file changes. It will expect to be notified of changes via the NotifyFileChanged method. @@ -52,7 +51,6 @@ type public FSharpChecker = ?keepAllBackgroundSymbolUses: bool * ?enableBackgroundItemKeyStoreAndSemanticClassification: bool * ?enablePartialTypeChecking: bool * - ?enableParallelCheckingWithSignatureFiles: bool * ?parallelReferenceResolution: bool * ?captureIdentifiersWhenParsing: bool * [] ?documentSource: DocumentSource -> diff --git a/src/Compiler/TypedTree/TypeProviders.fs b/src/Compiler/TypedTree/TypeProviders.fs index 02f6fafd05d..08d2eda28d7 100644 --- a/src/Compiler/TypedTree/TypeProviders.fs +++ b/src/Compiler/TypedTree/TypeProviders.fs @@ -136,7 +136,7 @@ let CreateTypeProvider ( SystemRuntimeAssemblyVersion = systemRuntimeAssemblyVersion) #else TypeProviderConfig(systemRuntimeContainsType, - getReferencedAssemblies, + ReferencedAssemblies=getReferencedAssemblies(), ResolutionFolder=resolutionEnvironment.ResolutionFolder, RuntimeAssembly=runtimeAssemblyPath, TemporaryFolder=resolutionEnvironment.TemporaryFolder, diff --git a/src/Compiler/Utilities/FileSystem.fs b/src/Compiler/Utilities/FileSystem.fs index 1614400bb48..5148efd8ac7 100644 --- a/src/Compiler/Utilities/FileSystem.fs +++ b/src/Compiler/Utilities/FileSystem.fs @@ -508,6 +508,8 @@ type IFileSystem = abstract IsStableFileHeuristic: fileName: string -> bool + abstract ChangeExtensionShim: path: string * extension: string -> string + // note: do not add members if you can put generic implementation under StreamExtensions below. [] @@ -698,6 +700,10 @@ type DefaultFileSystem() as this = || directory.Contains("packages\\") || directory.Contains("lib/mono/") + abstract ChangeExtensionShim: path: string * extension: string -> string + + default _.ChangeExtensionShim(path: string, extension: string) : string = Path.ChangeExtension(path, extension) + interface IFileSystem with member _.AssemblyLoader = this.AssemblyLoader @@ -734,6 +740,9 @@ type DefaultFileSystem() as this = member _.EnumerateDirectoriesShim(path: string) = this.EnumerateDirectoriesShim path member _.IsStableFileHeuristic(fileName: string) = this.IsStableFileHeuristic fileName + member _.ChangeExtensionShim(path: string, extension: string) = + this.ChangeExtensionShim(path, extension) + [] module public StreamExtensions = let utf8noBOM = UTF8Encoding(false, true) :> Encoding diff --git a/src/Compiler/Utilities/FileSystem.fsi b/src/Compiler/Utilities/FileSystem.fsi index 9b23e58a3f6..847a86ac958 100644 --- a/src/Compiler/Utilities/FileSystem.fsi +++ b/src/Compiler/Utilities/FileSystem.fsi @@ -215,6 +215,9 @@ type public IFileSystem = /// Used to determine if a file will not be subject to deletion during the lifetime of a typical client process. abstract IsStableFileHeuristic: fileName: string -> bool + /// A shim over Path.ChangeExtension + abstract ChangeExtensionShim: path: string * extension: string -> string + /// Represents a default (memory-mapped) implementation of the file system type DefaultFileSystem = /// Create a default implementation of the file system @@ -284,6 +287,9 @@ type DefaultFileSystem = abstract IsStableFileHeuristic: fileName: string -> bool override IsStableFileHeuristic: fileName: string -> bool + abstract ChangeExtensionShim: path: string * extension: string -> string + override ChangeExtensionShim: path: string * extension: string -> string + interface IFileSystem [] diff --git a/src/Compiler/xlf/FSComp.txt.cs.xlf b/src/Compiler/xlf/FSComp.txt.cs.xlf index b6ad7c62f3a..6fa872a8aac 100644 --- a/src/Compiler/xlf/FSComp.txt.cs.xlf +++ b/src/Compiler/xlf/FSComp.txt.cs.xlf @@ -32,6 +32,11 @@ Pokud typ používá atribut [<Sealed>] i [<AbstractClass>], znamená to, že je statický. Další konstruktor není povolený. + + FSharp.Core.AutoOpenAttribute should not be aliased. + FSharp.Core.AutoOpenAttribute should not be aliased. + + If a type uses both [<Sealed>] and [<AbstractClass>] attributes, it means it is static. Constructor with arguments is not allowed. Pokud typ používá atribut [<Sealed>] i [<AbstractClass>], znamená to, že je statický. Konstruktor s argumenty není povolený. diff --git a/src/Compiler/xlf/FSComp.txt.de.xlf b/src/Compiler/xlf/FSComp.txt.de.xlf index 52f3d52cf26..42df3dc8072 100644 --- a/src/Compiler/xlf/FSComp.txt.de.xlf +++ b/src/Compiler/xlf/FSComp.txt.de.xlf @@ -32,6 +32,11 @@ Wenn ein Typ sowohl das Attribute [<Sealed>] wie auch [<AbstractClass>] verwendet, bedeutet dies, dass er statisch ist. Ein zusätzlicher Konstruktor ist nicht zulässig. + + FSharp.Core.AutoOpenAttribute should not be aliased. + FSharp.Core.AutoOpenAttribute should not be aliased. + + If a type uses both [<Sealed>] and [<AbstractClass>] attributes, it means it is static. Constructor with arguments is not allowed. Wenn ein Typ sowohl das Attribute [<Sealed>] wie auch [<AbstractClass>] verwendet, bedeutet dies, dass er statisch ist. Der Konstruktor mit Argumenten ist nicht zulässig. diff --git a/src/Compiler/xlf/FSComp.txt.es.xlf b/src/Compiler/xlf/FSComp.txt.es.xlf index 298deb31137..1115e15e19c 100644 --- a/src/Compiler/xlf/FSComp.txt.es.xlf +++ b/src/Compiler/xlf/FSComp.txt.es.xlf @@ -32,6 +32,11 @@ Si un tipo usa los atributos [<Sealed>] y [<AbstractClass>], significa que es estático. No se permite un constructor adicional. + + FSharp.Core.AutoOpenAttribute should not be aliased. + FSharp.Core.AutoOpenAttribute should not be aliased. + + If a type uses both [<Sealed>] and [<AbstractClass>] attributes, it means it is static. Constructor with arguments is not allowed. Si un tipo usa los atributos [<Sealed>] y [<AbstractClass>], significa que es estático. No se permite un constructor con argumentos. diff --git a/src/Compiler/xlf/FSComp.txt.fr.xlf b/src/Compiler/xlf/FSComp.txt.fr.xlf index d41d7f05761..4d2a2603dc8 100644 --- a/src/Compiler/xlf/FSComp.txt.fr.xlf +++ b/src/Compiler/xlf/FSComp.txt.fr.xlf @@ -32,6 +32,11 @@ Si un type utilise les attributs [<Sealed>] et [<AbstractClass>], cela signifie qu’il est statique. Un constructeur supplémentaire n’est pas autorisé. + + FSharp.Core.AutoOpenAttribute should not be aliased. + FSharp.Core.AutoOpenAttribute should not be aliased. + + If a type uses both [<Sealed>] and [<AbstractClass>] attributes, it means it is static. Constructor with arguments is not allowed. Si un type utilise les attributs [<Sealed>] et [<AbstractClass>], cela signifie qu’il est statique. Le constructeur avec des arguments n’est pas autorisé. diff --git a/src/Compiler/xlf/FSComp.txt.it.xlf b/src/Compiler/xlf/FSComp.txt.it.xlf index 083f98e4dfa..88496aa294a 100644 --- a/src/Compiler/xlf/FSComp.txt.it.xlf +++ b/src/Compiler/xlf/FSComp.txt.it.xlf @@ -32,6 +32,11 @@ Se un tipo usa entrambi gli attributi [<Sealed>] e [<AbstractClass>], significa che è statico. Non sono ammessi costruttori aggiuntivi. + + FSharp.Core.AutoOpenAttribute should not be aliased. + FSharp.Core.AutoOpenAttribute should not be aliased. + + If a type uses both [<Sealed>] and [<AbstractClass>] attributes, it means it is static. Constructor with arguments is not allowed. Se un tipo usa entrambi gli attributi [<Sealed>] e [<AbstractClass>], significa che è statico. Costruttore con argomenti non consentito. diff --git a/src/Compiler/xlf/FSComp.txt.ja.xlf b/src/Compiler/xlf/FSComp.txt.ja.xlf index 85735c3b4e2..b187de25455 100644 --- a/src/Compiler/xlf/FSComp.txt.ja.xlf +++ b/src/Compiler/xlf/FSComp.txt.ja.xlf @@ -32,6 +32,11 @@ 型が [<Sealed>] と [<AbstractClass>] の両方の属性を使用する場合、それは静的であることを意味します。追加のコンストラクターは許可されていません。 + + FSharp.Core.AutoOpenAttribute should not be aliased. + FSharp.Core.AutoOpenAttribute should not be aliased. + + If a type uses both [<Sealed>] and [<AbstractClass>] attributes, it means it is static. Constructor with arguments is not allowed. 型が [<Sealed>] と [<AbstractClass>] の両方の属性を使用する場合、それは静的であることを意味します。引数を持つコンストラクターは許可されていません。 diff --git a/src/Compiler/xlf/FSComp.txt.ko.xlf b/src/Compiler/xlf/FSComp.txt.ko.xlf index 2b32dd65949..8217894a157 100644 --- a/src/Compiler/xlf/FSComp.txt.ko.xlf +++ b/src/Compiler/xlf/FSComp.txt.ko.xlf @@ -32,6 +32,11 @@ 형식이 [<Sealed>] 및 [<AbstractClass>] 특성을 모두 사용하는 경우 정적임을 의미합니다. 추가 생성자는 허용되지 않습니다. + + FSharp.Core.AutoOpenAttribute should not be aliased. + FSharp.Core.AutoOpenAttribute should not be aliased. + + If a type uses both [<Sealed>] and [<AbstractClass>] attributes, it means it is static. Constructor with arguments is not allowed. 형식이 [<Sealed>] 및 [<AbstractClass>] 특성을 모두 사용하는 경우 정적임을 의미합니다. 인수가 있는 생성자는 허용되지 않습니다. diff --git a/src/Compiler/xlf/FSComp.txt.pl.xlf b/src/Compiler/xlf/FSComp.txt.pl.xlf index ee6b6afe9ac..a622d07a64f 100644 --- a/src/Compiler/xlf/FSComp.txt.pl.xlf +++ b/src/Compiler/xlf/FSComp.txt.pl.xlf @@ -32,6 +32,11 @@ Jeśli typ używa obu [<Sealed>] i [< AbstractClass>] atrybutów, oznacza to, że jest statyczny. Konstruktor jest również niedozwolony. + + FSharp.Core.AutoOpenAttribute should not be aliased. + FSharp.Core.AutoOpenAttribute should not be aliased. + + If a type uses both [<Sealed>] and [<AbstractClass>] attributes, it means it is static. Constructor with arguments is not allowed. Jeśli typ używa obu [<Sealed>] i [< AbstractClass>] atrybutów, oznacza to, że jest statyczny. Konstruktor z argumentami jest niedozwolony. diff --git a/src/Compiler/xlf/FSComp.txt.pt-BR.xlf b/src/Compiler/xlf/FSComp.txt.pt-BR.xlf index 8ad44f41ea6..2bfb237ffd1 100644 --- a/src/Compiler/xlf/FSComp.txt.pt-BR.xlf +++ b/src/Compiler/xlf/FSComp.txt.pt-BR.xlf @@ -32,6 +32,11 @@ Se um tipo usa os atributos [<Sealed>] e [<AbstractClass>], significa que é estático. Construtor adicional não é permitido. + + FSharp.Core.AutoOpenAttribute should not be aliased. + FSharp.Core.AutoOpenAttribute should not be aliased. + + If a type uses both [<Sealed>] and [<AbstractClass>] attributes, it means it is static. Constructor with arguments is not allowed. Se um tipo usa os atributos [<Sealed>] e [<AbstractClass>], significa que é estático. Construtor com argumentos não é permitido. diff --git a/src/Compiler/xlf/FSComp.txt.ru.xlf b/src/Compiler/xlf/FSComp.txt.ru.xlf index 0cbd637dd4d..ab08af6adaf 100644 --- a/src/Compiler/xlf/FSComp.txt.ru.xlf +++ b/src/Compiler/xlf/FSComp.txt.ru.xlf @@ -32,6 +32,11 @@ Если тип использует атрибуты [<Sealed>] и [<AbstractClass>], это означает, что он статический. Дополнительный конструктор не разрешен. + + FSharp.Core.AutoOpenAttribute should not be aliased. + FSharp.Core.AutoOpenAttribute should not be aliased. + + If a type uses both [<Sealed>] and [<AbstractClass>] attributes, it means it is static. Constructor with arguments is not allowed. Если тип использует атрибуты [<Sealed>] и [<AbstractClass>], это означает, что он статический. Конструктор с аргументами не разрешен. diff --git a/src/Compiler/xlf/FSComp.txt.tr.xlf b/src/Compiler/xlf/FSComp.txt.tr.xlf index c9713648da3..d04abc1d159 100644 --- a/src/Compiler/xlf/FSComp.txt.tr.xlf +++ b/src/Compiler/xlf/FSComp.txt.tr.xlf @@ -32,6 +32,11 @@ Bir tür, hem [<Sealed>] hem de [< AbstractClass>] özniteliklerini kullanıyorsa bu statik olduğu anlamına gelir. Ek oluşturucuya izin verilmez. + + FSharp.Core.AutoOpenAttribute should not be aliased. + FSharp.Core.AutoOpenAttribute should not be aliased. + + If a type uses both [<Sealed>] and [<AbstractClass>] attributes, it means it is static. Constructor with arguments is not allowed. Bir tür, hem [<Sealed>] hem de [< AbstractClass>] özniteliklerini kullanıyorsa bu statik olduğu anlamına gelir.Bağımsız değişkenlere sahip oluşturucuya izin verilmez. diff --git a/src/Compiler/xlf/FSComp.txt.zh-Hans.xlf b/src/Compiler/xlf/FSComp.txt.zh-Hans.xlf index 4072aaad792..9c5b3a8ef99 100644 --- a/src/Compiler/xlf/FSComp.txt.zh-Hans.xlf +++ b/src/Compiler/xlf/FSComp.txt.zh-Hans.xlf @@ -32,6 +32,11 @@ 如果类型同时使用 [<Sealed>] 和 [<AbstractClass>] 属性,则表示它是静态的。不允许使用其他构造函数。 + + FSharp.Core.AutoOpenAttribute should not be aliased. + FSharp.Core.AutoOpenAttribute should not be aliased. + + If a type uses both [<Sealed>] and [<AbstractClass>] attributes, it means it is static. Constructor with arguments is not allowed. 如果类型同时使用 [<Sealed>] 和 [<AbstractClass>] 属性,则表示它是静态的。不允许使用带参数的构造函数。 diff --git a/src/Compiler/xlf/FSComp.txt.zh-Hant.xlf b/src/Compiler/xlf/FSComp.txt.zh-Hant.xlf index bd871bbba49..0b6971e9350 100644 --- a/src/Compiler/xlf/FSComp.txt.zh-Hant.xlf +++ b/src/Compiler/xlf/FSComp.txt.zh-Hant.xlf @@ -32,6 +32,11 @@ 如果類型同時使用 [<Sealed>] 和 [<AbstractClass>] 屬性,表示其為靜態。不允許其他建構函式。 + + FSharp.Core.AutoOpenAttribute should not be aliased. + FSharp.Core.AutoOpenAttribute should not be aliased. + + If a type uses both [<Sealed>] and [<AbstractClass>] attributes, it means it is static. Constructor with arguments is not allowed. 如果類型同時使用 [<Sealed>] 和 [<AbstractClass>] 屬性,表示其為靜態。不允許具有引數的建構函式。 diff --git a/src/FSharp.Core/prim-types-prelude.fsi b/src/FSharp.Core/prim-types-prelude.fsi index 1704a18e75f..f9f8715c1ff 100644 --- a/src/FSharp.Core/prim-types-prelude.fsi +++ b/src/FSharp.Core/prim-types-prelude.fsi @@ -1,5 +1,9 @@ // Copyright (c) Microsoft Corporation. All Rights Reserved. See License.txt in the project root for license information. +// NOTE: The exact name of this file is depended upon in graph-based type-checking. +// When renaming the file, the name used there needs to be updated. +// For context see https://github.com/dotnet/fsharp/pull/14494. + #nowarn "35" // This construct is deprecated: the treatment of this operator is now handled directly by the F# compiler and its meaning may not be redefined. #nowarn "61" // The containing type can use null as a representation value for its nullary union case. This member will be compiled as a static member. #nowarn "62" // This construct is for ML compatibility. The syntax module ... : sig .. end is deprecated unless OCaml compatibility is enabled. Consider using module ... = begin .. end'. diff --git a/tests/FSharp.Compiler.ComponentTests/Conformance/BasicTypeAndModuleDefinitions/TypeAbbreviations/WarnForAutoOpenAttributeAlias.fs b/tests/FSharp.Compiler.ComponentTests/Conformance/BasicTypeAndModuleDefinitions/TypeAbbreviations/WarnForAutoOpenAttributeAlias.fs new file mode 100644 index 00000000000..7f9e5edf053 --- /dev/null +++ b/tests/FSharp.Compiler.ComponentTests/Conformance/BasicTypeAndModuleDefinitions/TypeAbbreviations/WarnForAutoOpenAttributeAlias.fs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft Corporation. All Rights Reserved. See License.txt in the project root for license information. + +namespace FSharp.Compiler.ComponentTests.Conformance.BasicTypeAndModuleDefinitions + +open Xunit +open FSharp.Test.Compiler + +module WarnForAutoOpenAttributeAlias = + [] + let ``Warn user when aliasing FSharp.Core.AutoOpenAttribute`` () = + Fsx """ +type ByItsOwnNatureUnBottledAttribute = Microsoft.FSharp.Core.AutoOpenAttribute + +[] +module Foo = + let bar = 0 +""" + |> withLangVersionPreview + |> compile + |> shouldFail + |> withDiagnostics [ + (Warning 3561, Line 2, Col 6, Line 2, Col 38, "FSharp.Core.AutoOpenAttribute should not be aliased."); + ] diff --git a/tests/FSharp.Compiler.ComponentTests/FSharp.Compiler.ComponentTests.fsproj b/tests/FSharp.Compiler.ComponentTests/FSharp.Compiler.ComponentTests.fsproj index 237aad841d5..4b314625120 100644 --- a/tests/FSharp.Compiler.ComponentTests/FSharp.Compiler.ComponentTests.fsproj +++ b/tests/FSharp.Compiler.ComponentTests/FSharp.Compiler.ComponentTests.fsproj @@ -38,6 +38,7 @@ + @@ -190,7 +191,16 @@ - + + + + + + + + + + @@ -215,6 +225,7 @@ + diff --git a/tests/FSharp.Compiler.ComponentTests/Miscellaneous/GraphTests.fs b/tests/FSharp.Compiler.ComponentTests/Miscellaneous/GraphTests.fs new file mode 100644 index 00000000000..9fb8aa9360a --- /dev/null +++ b/tests/FSharp.Compiler.ComponentTests/Miscellaneous/GraphTests.fs @@ -0,0 +1,41 @@ +module FSharp.Compiler.ComponentTests.Miscellaneous.GraphTests + +open Xunit +open FSharp.Compiler.GraphChecking + +[] +let ``Create graph from sequence`` () = + let graph = + Graph.make ( + seq { + yield (0, Array.empty) + yield (1, [| 0 |]) + yield (2, [| 0; 1 |]) + } + ) + + Assert.Equal(3, graph.Count) + +[] +let ``Map graph`` () = + let graph = Graph.make [| 0, Array.empty; 1, [| 0 |]; 2, [| 0; 1 |] |] + let mapped = Graph.map string graph + Assert.True(mapped.ContainsKey("0")) + let value = mapped["1"] + Assert.Equal([| "0" |], value) + +[] +let ``Calculate transitive graph`` () = + let graph = Graph.make [ "a", Array.empty; "b", [| "a" |]; "c", [| "b" |] ] + let transitiveGraph = Graph.transitive graph + let values = transitiveGraph["c"] |> Set.ofArray + Assert.Equal>(set [ "a"; "b" ], values) + +[] +let ``Reverse graph`` () = + let graph = Graph.make [ "a", Array.empty; "b", [| "a" |] ] + let reserved = Graph.reverse graph + let valueA = reserved["a"] + Assert.Equal([| "b" |], valueA) + let valueB = reserved["b"] + Assert.Equal(Array.empty, valueB) diff --git a/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/CompilationFromCmdlineArgsTests.fs b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/CompilationFromCmdlineArgsTests.fs new file mode 100644 index 00000000000..2936ec34fb1 --- /dev/null +++ b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/CompilationFromCmdlineArgsTests.fs @@ -0,0 +1,56 @@ +module FSharp.Compiler.ComponentTests.TypeChecks.Graph.CompilationFromCmdlineArgsTests + +open System +open System.IO +open FSharp.Compiler.CodeAnalysis +open NUnit.Framework +open CompilationTests + +// Point to a generated args.txt file. +// Use scrape.fsx to generate an args.txt from a binary log file. +// The path needs to be absolute. +let localProjects: string list = + [ + @"C:\Projects\fantomas\src\Fantomas.Core\Fantomas.Core.args.txt" + @"C:\Projects\FsAutoComplete\src\FsAutoComplete\FsAutoComplete.args.txt" + @"C:\Projects\fsharp\src\Compiler\FSharp.Compiler.Service.args.txt" + @"C:\Projects\fsharp\tests\FSharp.Compiler.ComponentTests\FSharp.Compiler.ComponentTests.args.txt" + ] + +let checker = FSharpChecker.Create() + +let testCompilerFromArgs (method: Method) (projectArgumentsFilePath: string) : unit = + let oldWorkDir = Environment.CurrentDirectory + + try + Environment.CurrentDirectory <- FileInfo(projectArgumentsFilePath).Directory.FullName + + let args = + let argsFromFile = File.ReadAllLines(projectArgumentsFilePath) + + [| + yield "fsc.exe" + yield! argsFromFile + if not (Array.contains "--times" argsFromFile) then + yield "--times" + yield! methodOptions method + |] + + let diagnostics, exitCode = checker.Compile(args) |> Async.RunSynchronously + + for diag in diagnostics do + printfn "%A" diag + + Assert.That(exitCode, Is.Zero) + finally + Environment.CurrentDirectory <- oldWorkDir + +[] +[] +let ``Test sequential type-checking`` (projectArgumentsFilePath: string) = + testCompilerFromArgs Method.Sequential projectArgumentsFilePath + +[] +[] +let ``Test graph-based type-checking`` (projectArgumentsFilePath: string) = + testCompilerFromArgs Method.Graph projectArgumentsFilePath diff --git a/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/CompilationTests.fs b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/CompilationTests.fs new file mode 100644 index 00000000000..c1df009ffff --- /dev/null +++ b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/CompilationTests.fs @@ -0,0 +1,55 @@ +module FSharp.Compiler.ComponentTests.TypeChecks.Graph.CompilationTests + +open FSharp.Test +open FSharp.Test.Compiler +open NUnit.Framework +open Scenarios + +[] +type Method = + | Sequential + | Graph + +let methodOptions (method: Method) = + match method with + | Method.Sequential -> [] + | Method.Graph -> [ "--test:GraphBasedChecking"; "--test:DumpCheckingGraph"; "--deterministic-" ] + +let withMethod (method: Method) (cu: CompilationUnit) : CompilationUnit = + match cu with + | CompilationUnit.FS cs -> + FS + { cs with + Options = cs.Options @ (methodOptions method) + } + | cu -> cu + +let compileAValidScenario (scenario: Scenario) (method: Method) = + let cUnit = + let files = + scenario.Files + |> Array.map (fun (f: FileInScenario) -> SourceCodeFileKind.Create(f.FileWithAST.File, f.Content)) + |> Array.toList + + match files with + | [] -> failwith "empty files" + | first :: rest -> + let f = fsFromString first |> FS + f |> withAdditionalSourceFiles rest + + cUnit + |> withOutputType CompileOutput.Library + |> withMethod method + |> compile + |> shouldSucceed + |> ignore + +let scenarios = codebases + +[] +let ``Compile a valid scenario using graph-based type-checking`` (scenario: Scenario) = + compileAValidScenario scenario Method.Graph + +[] +let ``Compile a valid scenario using sequential type-checking`` (scenario: Scenario) = + compileAValidScenario scenario Method.Sequential diff --git a/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/DependencyResolutionTests.fs b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/DependencyResolutionTests.fs new file mode 100644 index 00000000000..4d6e806bd29 --- /dev/null +++ b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/DependencyResolutionTests.fs @@ -0,0 +1,19 @@ +module FSharp.Compiler.ComponentTests.TypeChecks.Graph.DependencyResolutionTests + +open FSharp.Compiler.ComponentTests.TypeChecks.Graph.TestUtils +open NUnit.Framework +open FSharp.Compiler.GraphChecking +open Scenarios + +let scenarios = codebases + +[] +let ``Supported scenario`` (scenario: Scenario) = + let files = scenario.Files |> Array.map (fun f -> TestFileWithAST.Map f.FileWithAST) + let filePairs = FilePairMap(files) + let graph = DependencyResolution.mkGraph false filePairs files + + for file in scenario.Files do + let expectedDeps = file.ExpectedDependencies + let actualDeps = set graph.[file.FileWithAST.Idx] + Assert.AreEqual(expectedDeps, actualDeps, $"Dependencies don't match for {System.IO.Path.GetFileName file.FileWithAST.File}") diff --git a/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/FileContentMappingTests.fs b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/FileContentMappingTests.fs new file mode 100644 index 00000000000..78a364cbae7 --- /dev/null +++ b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/FileContentMappingTests.fs @@ -0,0 +1,123 @@ +module FSharp.Compiler.ComponentTests.TypeChecks.Graph.FileContentMappingTests + +open NUnit.Framework +open FSharp.Compiler.GraphChecking +open TestUtils + +let private getContent isSignature sourceCode = + let fileName = if isSignature then "Test.fsi" else "Test.fs" + let ast = parseSourceCode ("Test.fs", sourceCode) + FileContentMapping.mkFileContent { Idx = 0; FileName = fileName; ParsedInput = ast } + +let private (|TopLevelNamespace|_|) value e = + match e with + | FileContentEntry.TopLevelNamespace(path, content) -> + let combined = String.concat "." path + if combined = value then Some content else None + | _ -> None + +let private (|OpenStatement|_|) value e = + match e with + | FileContentEntry.OpenStatement path -> + let combined = String.concat "." path + if combined = value then Some() else None + | _ -> None + +let private (|PrefixedIdentifier|_|) value e = + match e with + | FileContentEntry.PrefixedIdentifier path -> + let combined = String.concat "." path + if combined = value then Some() else None + | _ -> None + +let private (|NestedModule|_|) value e = + match e with + | FileContentEntry.NestedModule(name, nestedContent) -> if name = value then Some(nestedContent) else None + | _ -> None + +[] +let ``Top level module only exposes namespace`` () = + let content = + getContent + false + """ +module X.Y.Z +""" + + match content with + | [ TopLevelNamespace "X.Y" [] ] -> Assert.Pass() + | content -> Assert.Fail($"Unexpected content: {content}") + +[] +let ``Top level namespace`` () = + let content = + getContent + false + """ +namespace X.Y +""" + + match content with + | [ TopLevelNamespace "X.Y" [] ] -> Assert.Pass() + | content -> Assert.Fail($"Unexpected content: {content}") + +[] +let ``Open statement in top level module`` () = + let content = + getContent + true + """ +module X.Y.Z + +open A.B.C +""" + + match content with + | [ TopLevelNamespace "X.Y" [ OpenStatement "A.B.C" ] ] -> Assert.Pass() + | content -> Assert.Fail($"Unexpected content: {content}") + +[] +let ``PrefixedIdentifier in type annotation`` () = + let content = + getContent + false + """ +module X.Y.Z + +let fn (a: A.B.CType) = () +""" + + match content with + | [ TopLevelNamespace "X.Y" [ PrefixedIdentifier "A.B" ] ] -> Assert.Pass() + | content -> Assert.Fail($"Unexpected content: {content}") + +[] +let ``Nested module`` () = + let content = + getContent + true + """ +module X.Y + +module Z = + type A = int +""" + + match content with + | [ TopLevelNamespace "X" [ NestedModule "Z" [] ] ] -> Assert.Pass() + | content -> Assert.Fail($"Unexpected content: {content}") + +[] +let ``Single ident module abbreviation`` () = + let content = + getContent + true + """ +module A + +module B = C +""" + + match content with + | [ TopLevelNamespace "" [ PrefixedIdentifier "C" ] ] -> Assert.Pass() + | content -> Assert.Fail($"Unexpected content: {content}") diff --git a/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/QueryTrieTests.fs b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/QueryTrieTests.fs new file mode 100644 index 00000000000..03ccc68b635 --- /dev/null +++ b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/QueryTrieTests.fs @@ -0,0 +1,791 @@ +module FSharp.Compiler.ComponentTests.TypeChecks.Graph.QueryTrieTests + +open System.Collections.Generic +open NUnit.Framework +open FSharp.Compiler.GraphChecking +open FSharp.Compiler.GraphChecking.DependencyResolution + +// This file contains some hard coded data to easily debug the various aspects of the dependency resolution. + +// Some helper DSL functions to construct the FileContentEntry items + +type System.String with + + member x.Split(v: string) = + x.Split([| v |], System.StringSplitOptions.RemoveEmptyEntries) + +let private topLevelNS (topLevelNamespaceString: string) (content: FileContentEntry list) = + topLevelNamespaceString.Split(".") + |> Array.toList + |> fun name -> FileContentEntry.TopLevelNamespace(name, content) + +let private topLevelMod (topLevelModuleString: string) (content: FileContentEntry list) = + let parts = topLevelModuleString.Split(".") + + parts + |> Array.take (parts.Length - 1) + |> Array.toList + |> fun name -> FileContentEntry.TopLevelNamespace(name, content) + +let private openSt (openStatement: string) = + openStatement.Split(".") |> Array.toList |> FileContentEntry.OpenStatement + +let private nestedModule name content = + FileContentEntry.NestedModule(name, content) + +let private prefIdent (lid: string) = + let parts = lid.Split(".") + Array.take (parts.Length - 1) parts |> List.ofArray |> PrefixedIdentifier + +// Some hardcoded files that reflect the file content of the first files in the Fantomas.Core project. +// See https://github.com/fsprojects/fantomas/tree/0938a3daabec80a22d2e17f82aba38456bb793df/src/Fantomas.Core +let private files = + [| + { + FileName = "AssemblyInfo.fs" + Idx = 0 + Content = + [| + topLevelNS + "System" + [ + openSt "System.Runtime.CompilerServices" + nestedModule "AssemblyVersionInformation" [] + ] + |] + } + { + FileName = "ISourceTextExtensions.fs" + Idx = 1 + Content = + [| + topLevelMod + "Fantomas.Core.ISourceTextExtensions" + [ + openSt "System.Text" + openSt "FSharp.Compiler.Text" + prefIdent "range.StartLine" + prefIdent "this.GetLineString" + prefIdent "range.StartLine" + prefIdent "range.EndLine" + prefIdent "range.EndColumn" + prefIdent "range.StartColumn" + prefIdent "line.Substring" + prefIdent "sb.AppendLine" + prefIdent "lastLine.Substring" + ] + |] + } + { + FileName = "RangeHelpers.fs" + Idx = 2 + Content = + [| + topLevelNS + "Fantomas.Core" + [ + openSt "FSharp.Compiler.Text" + nestedModule + "RangeHelpers" + [ + prefIdent "Position.posGeq" + prefIdent "b.Start" + prefIdent "a.Start" + prefIdent "a.End" + prefIdent "b.End" + prefIdent "Range.equals" + prefIdent "r1.FileName" + prefIdent "r2.FileName" + prefIdent "r1.End" + prefIdent "r2.Start" + prefIdent "r1.EndColumn" + prefIdent "r2.StartColumn" + prefIdent "Range.mkRange" + prefIdent "r.FileName" + prefIdent "r.Start" + prefIdent "Position.mkPos" + prefIdent "r.StartLine" + prefIdent "r.StartColumn" + prefIdent "r.EndLine" + prefIdent "r.EndColumn" + prefIdent "r.End" + prefIdent "List.sortBy" + prefIdent "List.reduce" + prefIdent "Range.unionRanges" + ] + nestedModule + "RangePatterns" + [ + prefIdent "RangeHelpers.mkStartEndRange" + prefIdent "range.FileName" + prefIdent "range.Start" + prefIdent "range.StartLine" + prefIdent "range.StartColumn" + ] + ] + |] + } + { + FileName = "AstExtensions.fsi" + Idx = 3 + Content = + [| + topLevelMod "Fantomas.Core.AstExtensions" [ openSt "FSharp.Compiler.Text"; openSt "FSharp.Compiler.Syntax" ] + |] + } + { + FileName = "AstExtensions.fs" + Idx = 4 + Content = + [| + topLevelMod + "Fantomas.Core.AstExtensions" + [ + openSt "FSharp.Compiler.SyntaxTrivia" + openSt "FSharp.Compiler.Text" + openSt "FSharp.Compiler.Text.Range" + openSt "FSharp.Compiler.Syntax" + prefIdent "range.Zero" + prefIdent "h.idRange" + prefIdent "List.last" + prefIdent "ident.idRange" + prefIdent "IdentTrivia.OriginalNotationWithParen" + prefIdent "IdentTrivia.HasParenthesis" + prefIdent "IdentTrivia.OriginalNotation" + prefIdent "Range.Zero" + prefIdent "single.FullRange" + prefIdent "List.fold" + prefIdent "head.FullRange" + prefIdent "fieldName.FullRange" + prefIdent "expr.Range" + prefIdent "SynModuleOrNamespaceKind.AnonModule" + prefIdent "List.tryHead" + prefIdent "List.tryLast" + prefIdent "d.Range" + prefIdent "s.Range" + prefIdent "e.Range" + prefIdent "this.Range" + prefIdent "CommentTrivia.LineComment" + prefIdent "CommentTrivia.BlockComment" + prefIdent "ConditionalDirectiveTrivia.If" + prefIdent "ConditionalDirectiveTrivia.Else" + prefIdent "ConditionalDirectiveTrivia.EndIf" + prefIdent "List.map" + prefIdent "c.Range" + prefIdent "acc.StartLine" + prefIdent "triviaRange.StartLine" + prefIdent "acc.EndLine" + prefIdent "triviaRange.EndLine" + prefIdent "ParsedInput.ImplFile" + prefIdent "r.Start" + prefIdent "m.FullRange.Start" + prefIdent "Range.Zero.Start" + prefIdent "Range.Zero.End" + prefIdent "r.End" + prefIdent "lastModule.FullRange.End" + prefIdent "this.Range.FileName" + prefIdent "trivia.CodeComments" + prefIdent "trivia.ConditionalDirectives" + prefIdent "ParsedInput.SigFile" + prefIdent "SynInterpolatedStringPart.String" + prefIdent "SynInterpolatedStringPart.FillExpr" + prefIdent "i.idRange" + prefIdent "std.FullRange" + prefIdent "a.Range" + prefIdent "RangeHelpers.mergeRanges" + prefIdent "synTypar.Range" + prefIdent "sf.FullRange" + prefIdent "head.Range" + prefIdent "b.FullRange" + prefIdent "xmlDoc.IsEmpty" + prefIdent "xmlDoc.Range" + prefIdent "attributes.IsEmpty" + prefIdent "attributes.Head.Range" + prefIdent "trivia.LeadingKeyword" + prefIdent "SynLeadingKeyword.Member" + prefIdent "SynPat.LongIdent" + prefIdent "pat.Range" + prefIdent "trivia.LeadingKeyword.Range" + ] + |] + } + { + FileName = "TriviaTypes.fs" + Idx = 5 + Content = + [| + topLevelMod "Fantomas.Core.TriviaTypes" [ openSt "FSharp.Compiler.Text"; openSt "FSharp.Compiler.Syntax" ] + |] + } + { + FileName = "Utils.fs" + Idx = 6 + Content = + [| + topLevelNS + "Fantomas.Core" + [ + openSt "System" + openSt "System.Text.RegularExpressions" + nestedModule "Char" [ prefIdent "c.ToString" ] + nestedModule + "String" + [ + prefIdent "str.Replace" + prefIdent "str.StartsWith" + prefIdent "StringComparison.Ordinal" + prefIdent "String.Empty" + prefIdent "source.Split" + prefIdent "StringSplitOptions.None" + prefIdent "Array.mapi" + prefIdent "Regex.IsMatch" + prefIdent "Array.choose" + prefIdent "Array.toList" + prefIdent "List.tryHead" + prefIdent "List.map" + prefIdent "String.concat" + prefIdent "List.zip" + prefIdent "String.length" + prefIdent "String.IsNullOrEmpty" + prefIdent "String.IsNullOrWhiteSpace" + prefIdent "String.exists" + ] + nestedModule + "Cache" + [ + prefIdent "System.Collections.Generic.HashSet" + prefIdent "HashIdentity.Reference" + prefIdent "cache.Contains" + prefIdent "cache.Add" + prefIdent "System.Collections.Concurrent.ConcurrentDictionary" + prefIdent "HashIdentity.Structural" + prefIdent "cache.GetOrAdd" + prefIdent "this.Equals" + prefIdent "Object.ReferenceEquals" + prefIdent "this.GetHashCode" + ] + nestedModule + "Dict" + [ + prefIdent "System.Collections.Generic.IDictionary" + prefIdent "d.TryGetValue" + ] + nestedModule + "List" + [ + prefIdent "List.takeWhile" + prefIdent "List.choose" + prefIdent "List.isEmpty" + prefIdent "List.rev" + ] + nestedModule "Map" [ prefIdent "Map.tryFind" ] + nestedModule "Async" [ prefIdent "async.Bind"; prefIdent "async.Return" ] + nestedModule "Continuation" [] + ] + |] + } + { + FileName = "SourceParser.fs" + Idx = 7 + Content = + [| + topLevelMod + "Fantomas.Core.SourceParser" + [ + openSt "System" + openSt "FSharp.Compiler.Syntax" + openSt "FSharp.Compiler.Syntax.PrettyNaming" + openSt "FSharp.Compiler.SyntaxTrivia" + openSt "FSharp.Compiler.Text" + openSt "FSharp.Compiler.Xml" + openSt "Fantomas.Core" + openSt "Fantomas.Core.AstExtensions" + openSt "Fantomas.Core.TriviaTypes" + openSt "Fantomas.Core.RangePatterns" + prefIdent "SynTypar.SynTypar" + prefIdent "TyparStaticReq.None" + prefIdent "TyparStaticReq.HeadType" + prefIdent "SynRationalConst.Integer" + prefIdent "SynRationalConst.Rational" + prefIdent "SynRationalConst.Negate" + prefIdent "SynConst.Unit" + prefIdent "ParsedInput.ImplFile" + prefIdent "ParsedInput.SigFile" + prefIdent "ParsedImplFileInput.ParsedImplFileInput" + prefIdent "ParsedSigFileInput.ParsedSigFileInput" + prefIdent "SynModuleOrNamespace.SynModuleOrNamespace" + prefIdent "trivia.LeadingKeyword" + prefIdent "m.FullRange" + prefIdent "SynModuleOrNamespaceSig.SynModuleOrNamespaceSig" + prefIdent "a.TypeName" + prefIdent "a.ArgExpr" + prefIdent "a.Target" + prefIdent "px.ToXmlDoc" + prefIdent "xmlDoc.UnprocessedLines" + prefIdent "xmlDoc.Range" + prefIdent "SynModuleDecl.Open" + prefIdent "SynOpenDeclTarget.ModuleOrNamespace" + prefIdent "SynOpenDeclTarget.Type" + prefIdent "SynType.LongIdent" + prefIdent "SynModuleDecl.ModuleAbbrev" + prefIdent "SynModuleDecl.HashDirective" + prefIdent "SynModuleDecl.NamespaceFragment" + prefIdent "SynModuleDecl.Attributes" + prefIdent "SynModuleDecl.Let" + prefIdent "SynModuleDecl.Expr" + prefIdent "SynModuleDecl.Types" + prefIdent "SynModuleDecl.NestedModule" + prefIdent "trivia.ModuleKeyword" + prefIdent "trivia.EqualsRange" + prefIdent "SynModuleDecl.Exception" + prefIdent "SynModuleSigDecl.Open" + prefIdent "SynModuleSigDecl.ModuleAbbrev" + prefIdent "SynModuleSigDecl.HashDirective" + prefIdent "SynModuleSigDecl.NamespaceFragment" + prefIdent "SynModuleSigDecl.Val" + prefIdent "SynModuleSigDecl.Types" + prefIdent "SynModuleSigDecl.NestedModule" + prefIdent "SynModuleSigDecl.Exception" + prefIdent "SynExceptionDefnRepr.SynExceptionDefnRepr" + prefIdent "SynExceptionDefn.SynExceptionDefn" + prefIdent "SynExceptionSig.SynExceptionSig" + prefIdent "px.IsEmpty" + prefIdent "trivia.BarRange" + prefIdent "Range.unionRanges" + prefIdent "SynUnionCaseKind.Fields" + prefIdent "SynUnionCaseKind.FullType" + prefIdent "Option.map" + prefIdent "i.idRange" + prefIdent "t.Range" + prefIdent "SynMemberDefn.NestedType" + prefIdent "SynMemberDefn.Open" + prefIdent "SynMemberDefn.ImplicitInherit" + prefIdent "SynMemberDefn.Inherit" + prefIdent "SynMemberDefn.ValField" + prefIdent "SynMemberDefn.ImplicitCtor" + prefIdent "SynMemberDefn.Member" + prefIdent "SynMemberDefn.LetBindings" + prefIdent "SynType.Fun" + prefIdent "SynMemberKind.PropertyGet" + prefIdent "SynMemberKind.PropertySet" + prefIdent "SynMemberKind.PropertyGetSet" + prefIdent "SynMemberDefn.AbstractSlot" + prefIdent "trivia.WithKeyword" + prefIdent "mf.MemberKind" + prefIdent "SynMemberDefn.Interface" + prefIdent "SynMemberDefn.AutoProperty" + prefIdent "SynMemberDefn.GetSetMember" + prefIdent "SynPat.LongIdent" + prefIdent "Position.posLt" + prefIdent "getKeyword.Start" + prefIdent "setKeyword.Start" + prefIdent "SynMemberKind.ClassConstructor" + prefIdent "SynMemberKind.Constructor" + prefIdent "SynMemberKind.Member" + prefIdent "mf.IsInstance" + prefIdent "mf.IsOverrideOrExplicitImpl" + prefIdent "SynExpr.Typed" + prefIdent "RangeHelpers.rangeEq" + prefIdent "t1.Range" + prefIdent "t2.Range" + prefIdent "Option.bind" + prefIdent "trivia.ColonRange" + prefIdent "b.FullRange" + prefIdent "SynBindingKind.Do" + prefIdent "SynLeadingKeyword.Extern" + prefIdent "SynExpr.TraitCall" + prefIdent "SynExpr.Quote" + prefIdent "SynExpr.Paren" + prefIdent "SynExpr.Lazy" + prefIdent "SynExpr.InferredDowncast" + prefIdent "SynExpr.InferredUpcast" + prefIdent "SynExpr.Assert" + prefIdent "SynExpr.AddressOf" + prefIdent "SynExpr.YieldOrReturn" + prefIdent "SynExpr.YieldOrReturnFrom" + prefIdent "SynExpr.Do" + prefIdent "SynExpr.DoBang" + prefIdent "SynExpr.Fixed" + prefIdent "SynExpr.TypeTest" + prefIdent "SynExpr.Downcast" + prefIdent "SynExpr.Upcast" + prefIdent "SynExpr.While" + prefIdent "SynExpr.For" + prefIdent "SynExpr.Null" + prefIdent "SynExpr.Const" + prefIdent "SynExpr.TypeApp" + prefIdent "SynExpr.Match" + prefIdent "trivia.MatchKeyword" + prefIdent "SynExpr.MatchBang" + prefIdent "trivia.MatchBangKeyword" + prefIdent "SynExpr.Sequential" + prefIdent "SynExpr.Ident" + prefIdent "SynExpr.LongIdent" + prefIdent "SynExpr.ComputationExpr" + prefIdent "SynExpr.App" + prefIdent "ExprAtomicFlag.NonAtomic" + prefIdent "compExpr.Range" + prefIdent "SynExpr.ArrayOrListComputed" + prefIdent "RangeHelpers.mkStartEndRange" + prefIdent "SynExpr.ArrayOrList" + prefIdent "SynExpr.Tuple" + prefIdent "SynExpr.InterpolatedString" + prefIdent "SynExpr.IndexRange" + prefIdent "SynExpr.IndexFromEnd" + prefIdent "SynExpr.Typar" + prefIdent "SynConst.Double" + prefIdent "SynConst.Decimal" + prefIdent "SynConst.Single" + prefIdent "SynConst.Int16" + prefIdent "SynConst.Int32" + prefIdent "SynConst.Int64" + prefIdent "List.moreThanOne" + prefIdent "SynExpr.Dynamic" + prefIdent "IdentTrivia.OriginalNotationWithParen" + prefIdent "originalNotation.Length" + prefIdent "originalNotation.StartsWith" + prefIdent "List.rev" + prefIdent "SynExpr.DotGet" + prefIdent "SynExpr.Lambda" + prefIdent "SynExpr.MatchLambda" + prefIdent "SynExpr.New" + prefIdent "IdentTrivia.OriginalNotation" + prefIdent "ident.idText" + prefIdent "newLineInfixOps.Contains" + prefIdent "List.length" + prefIdent "SynExpr.JoinIn" + prefIdent "SynExpr.LetOrUse" + prefIdent "xs.Length" + prefIdent "List.mapi" + prefIdent "trivia.InKeyword" + prefIdent "List.map" + prefIdent "SynExpr.LetOrUseBang" + prefIdent "List.collect" + prefIdent "Continuation.sequence" + prefIdent "SynExpr.ForEach" + prefIdent "SynExpr.DotIndexedSet" + prefIdent "SynExpr.NamedIndexedPropertySet" + prefIdent "SynExpr.DotNamedIndexedPropertySet" + prefIdent "SynExpr.DotIndexedGet" + prefIdent "SynExpr.DotSet" + prefIdent "SynExpr.IfThenElse" + prefIdent "trivia.IfKeyword" + prefIdent "trivia.IsElif" + prefIdent "trivia.ThenKeyword" + prefIdent "trivia.ElseKeyword" + prefIdent "unitRange.StartColumn" + prefIdent "unitRange.EndColumn" + prefIdent "SynExpr.Record" + prefIdent "SynExpr.AnonRecd" + prefIdent "SynExpr.ObjExpr" + prefIdent "SynExpr.LongIdentSet" + prefIdent "SynExpr.TryWith" + prefIdent "trivia.TryKeyword" + prefIdent "SynExpr.TryFinally" + prefIdent "trivia.FinallyKeyword" + prefIdent "SynExpr.ArbitraryAfterError" + prefIdent "SynExpr.FromParseError" + prefIdent "SynExpr.DiscardAfterMissingQualificationAfterDot" + prefIdent "SynExpr.LibraryOnlyILAssembly" + prefIdent "SynExpr.LibraryOnlyStaticOptimization" + prefIdent "FSharp.Core" + prefIdent "SynExpr.LibraryOnlyUnionCaseFieldGet" + prefIdent "SynExpr.LibraryOnlyUnionCaseFieldSet" + prefIdent "SynPat.OptionalVal" + prefIdent "SynPat.Attrib" + prefIdent "SynPat.Or" + prefIdent "p.Range" + prefIdent "SynPat.Ands" + prefIdent "SynPat.Null" + prefIdent "SynPat.Wild" + prefIdent "SynPat.Tuple" + prefIdent "SynPat.ArrayOrList" + prefIdent "SynPat.Typed" + prefIdent "SynPat.Named" + prefIdent "SynPat.As" + prefIdent "SynArgPats.NamePatPairs" + prefIdent "SynArgPats.Pats" + prefIdent "SynPat.ListCons" + prefIdent "trivia.ColonColonRange" + prefIdent "synLongIdent.IdentsWithTrivia" + prefIdent "synIdent.FullRange" + prefIdent "synLongIdent.FullRange" + prefIdent "SynPat.Paren" + prefIdent "SynPat.Record" + prefIdent "SynPat.Const" + prefIdent "SynPat.IsInst" + prefIdent "SynPat.QuoteExpr" + prefIdent "newIdent.idText" + prefIdent "pat.Range" + prefIdent "SynSimplePats.SimplePats" + prefIdent "SynSimplePats.Typed" + prefIdent "SynSimplePat.Attrib" + prefIdent "SynSimplePat.Id" + prefIdent "SynSimplePat.Typed" + prefIdent "trivia.ArrowRange" + prefIdent "SynMatchClause.SynMatchClause" + prefIdent "matchRange.Start" + prefIdent "clause.Range.Start" + prefIdent "me.Range" + prefIdent "SynTypeDefnSimpleRepr.Enum" + prefIdent "SynTypeDefnSimpleRepr.Union" + prefIdent "SynTypeDefnSimpleRepr.Record" + prefIdent "SynTypeDefnSimpleRepr.None" + prefIdent "SynTypeDefnSimpleRepr.TypeAbbrev" + prefIdent "SynTypeDefnSimpleRepr.General" + prefIdent "SynTypeDefnSimpleRepr.LibraryOnlyILAssembly" + prefIdent "SynTypeDefnSimpleRepr.Exception" + prefIdent "SynTypeDefnRepr.Simple" + prefIdent "SynTypeDefnRepr.ObjectModel" + prefIdent "SynTypeDefnRepr.Exception" + prefIdent "List.tryFind" + prefIdent "List.filter" + prefIdent "SynTypeDefnSigRepr.Simple" + prefIdent "SynTypeDefnSigRepr.ObjectModel" + prefIdent "SynTypeDefnSigRepr.Exception" + prefIdent "SynTypeDefnKind.Unspecified" + prefIdent "SynTypeDefnKind.Class" + prefIdent "SynTypeDefnKind.Interface" + prefIdent "SynTypeDefnKind.Struct" + prefIdent "SynTypeDefnKind.Record" + prefIdent "SynTypeDefnKind.Union" + prefIdent "SynTypeDefnKind.Abbrev" + prefIdent "SynTypeDefnKind.Opaque" + prefIdent "SynTypeDefnKind.Augmentation" + prefIdent "SynTypeDefnKind.IL" + prefIdent "SynTypeDefnKind.Delegate" + prefIdent "std.FullRange" + prefIdent "SynTyparDecls.PostfixList" + prefIdent "SynType.HashConstraint" + prefIdent "SynType.MeasurePower" + prefIdent "SynType.MeasureDivide" + prefIdent "SynType.StaticConstant" + prefIdent "SynType.StaticConstantExpr" + prefIdent "SynType.StaticConstantNamed" + prefIdent "SynType.Array" + prefIdent "SynType.Anon" + prefIdent "SynType.Var" + prefIdent "SynType.App" + prefIdent "SynType.LongIdentApp" + prefIdent "SynType.Tuple" + prefIdent "SynType.WithGlobalConstraints" + prefIdent "SynType.AnonRecd" + prefIdent "SynType.Paren" + prefIdent "SynType.SignatureParameter" + prefIdent "SynType.Or" + prefIdent "trivia.OrKeyword" + prefIdent "lid.idText" + prefIdent "x.ToString" + prefIdent "SynTypeConstraint.WhereTyparIsValueType" + prefIdent "SynTypeConstraint.WhereTyparIsReferenceType" + prefIdent "SynTypeConstraint.WhereTyparIsUnmanaged" + prefIdent "SynTypeConstraint.WhereTyparSupportsNull" + prefIdent "SynTypeConstraint.WhereTyparIsComparable" + prefIdent "SynTypeConstraint.WhereTyparIsEquatable" + prefIdent "SynTypeConstraint.WhereTyparDefaultsToType" + prefIdent "SynTypeConstraint.WhereTyparSubtypeOfType" + prefIdent "SynTypeConstraint.WhereTyparSupportsMember" + prefIdent "SynTypeConstraint.WhereTyparIsEnum" + prefIdent "SynTypeConstraint.WhereTyparIsDelegate" + prefIdent "SynTypeConstraint.WhereSelfConstrained" + prefIdent "SynMemberSig.Member" + prefIdent "SynMemberSig.Interface" + prefIdent "SynMemberSig.Inherit" + prefIdent "SynMemberSig.ValField" + prefIdent "SynMemberSig.NestedType" + prefIdent "ident.idRange" + prefIdent "e.Range" + prefIdent "List.tryLast" + prefIdent "IdentTrivia.HasParenthesis" + prefIdent "lp.idText" + prefIdent "Seq.tryHead" + prefIdent "Char.IsUpper" + prefIdent "Option.defaultValue" + prefIdent "ExprAtomicFlag.Atomic" + prefIdent "RangeHelpers.isAdjacentTo" + prefIdent "identifierExpr.Range" + prefIdent "argExpr.Range" + prefIdent "Seq.toList" + prefIdent "Seq.singleton" + prefIdent "List.exists" + ] + |] + } + |] + +let dictionary<'key, 'value when 'key: equality> (entries: ('key * 'value) seq) = + let dict = Dictionary(Seq.length entries) + + for k, v in entries do + dict.Add(k, v) + + dict + +let private noChildren = Dictionary(0) +let emptyHS () = HashSet(0) + +let indexOf name = + Array.find (fun (fc: FileContent) -> fc.FileName = name) files |> fun fc -> fc.Idx + +// This should be constructed from the AST, again a hard coded subset of Fantomas.Core +let private fantomasCoreTrie: TrieNode = + { + Current = TrieNodeInfo.Root(emptyHS ()) + Children = + dictionary + [| + "System", + { + Current = TrieNodeInfo.Namespace("System", emptyHS ()) + Children = + dictionary + [| + "AssemblyVersionInformation", + { + Current = TrieNodeInfo.Module("AssemblyVersionInformation", indexOf "AssemblyInfo.fs") + Children = noChildren + } + |] + } + "Fantomas", + { + Current = TrieNodeInfo.Namespace("Fantomas", emptyHS ()) + Children = + dictionary + [| + "Core", + { + Current = TrieNodeInfo.Namespace("Core", emptyHS ()) + Children = + dictionary + [| + "ISourceTextExtensions", + { + Current = + TrieNodeInfo.Module("ISourceTextExtensions", indexOf "ISourceTextExtensions.fs") + Children = noChildren + } + "RangeHelpers", + { + Current = TrieNodeInfo.Module("RangeHelpers", indexOf "RangeHelpers.fs") + Children = noChildren + } + "RangePatterns", + { + Current = TrieNodeInfo.Module("RangePatterns", indexOf "RangeHelpers.fs") + Children = noChildren + } + "AstExtensions", + { + Current = TrieNodeInfo.Module("AstExtensions", indexOf "AstExtensions.fsi") + Children = noChildren + } + "TriviaTypes", + { + Current = TrieNodeInfo.Module("TriviaTypes", indexOf "TriviaTypes.fs") + Children = noChildren + } + "Char", + { + Current = TrieNodeInfo.Module("Char", indexOf "Utils.fs") + Children = noChildren + } + "String", + { + Current = TrieNodeInfo.Module("String", indexOf "Utils.fs") + Children = noChildren + } + "Cache", + { + Current = TrieNodeInfo.Module("Cache", indexOf "Utils.fs") + Children = noChildren + } + "Dict", + { + Current = TrieNodeInfo.Module("Dict", indexOf "Utils.fs") + Children = noChildren + } + "List", + { + Current = TrieNodeInfo.Module("List", indexOf "Utils.fs") + Children = noChildren + } + "Map", + { + Current = TrieNodeInfo.Module("Map", indexOf "Utils.fs") + Children = noChildren + } + "Async", + { + Current = TrieNodeInfo.Module("Async", indexOf "Utils.fs") + Children = noChildren + } + "Continuation", + { + Current = TrieNodeInfo.Module("Continuation", indexOf "Utils.fs") + Children = noChildren + } + "SourceParser", + { + Current = TrieNodeInfo.Module("SourceParser", indexOf "SourceParser.fs") + Children = noChildren + } + |] + } + |] + } + |] + } + +[] +let ``Query non existing node in trie`` () = + let result = + queryTrie fantomasCoreTrie [ "System"; "System"; "Runtime"; "CompilerServices" ] + + match result with + | QueryTrieNodeResult.NodeDoesNotExist -> Assert.Pass() + | result -> Assert.Fail $"Unexpected result: %A{result}" + +[] +let ``Query node that does not expose data in trie`` () = + let result = queryTrie fantomasCoreTrie [ "Fantomas"; "Core" ] + + match result with + | QueryTrieNodeResult.NodeDoesNotExposeData -> Assert.Pass() + | result -> Assert.Fail $"Unexpected result: %A{result}" + +[] +let ``Query module node that exposes one file`` () = + let result = + queryTrie fantomasCoreTrie [ "Fantomas"; "Core"; "ISourceTextExtensions" ] + + match result with + | QueryTrieNodeResult.NodeExposesData file -> + let file = Seq.exactlyOne file + Assert.AreEqual(indexOf "ISourceTextExtensions.fs", file) + | result -> Assert.Fail $"Unexpected result: %A{result}" + +[] +let ``ProcessOpenStatement full path match`` () = + let sourceParser = + Array.find (fun (f: FileContent) -> f.FileName = "SourceParser.fs") files + + let state = + FileContentQueryState.Create + sourceParser.Idx + (set + [| + indexOf "AssemblyInfo.fs" + indexOf "ISourceTextExtensions.fs" + indexOf "RangeHelpers.fs" + indexOf "AstExtensions.fsi" + indexOf "TriviaTypes.fs" + indexOf "Utils.fs" + |]) + Set.empty + + let result = + processOpenPath (queryTrie fantomasCoreTrie) [ "Fantomas"; "Core"; "AstExtensions" ] state + + let dep = Seq.exactlyOne result.FoundDependencies + Assert.AreEqual(indexOf "AstExtensions.fsi", dep) diff --git a/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/Scenarios.fs b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/Scenarios.fs new file mode 100644 index 00000000000..ad1a7717a72 --- /dev/null +++ b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/Scenarios.fs @@ -0,0 +1,580 @@ +module FSharp.Compiler.ComponentTests.TypeChecks.Graph.Scenarios + +open TestUtils + +type Scenario = + { + Name: string + Files: FileInScenario array + } + + override x.ToString() = x.Name + +and FileInScenario = + { + FileWithAST: TestFileWithAST + ExpectedDependencies: Set + Content: string + } + +let private scenario name files = + let files = files |> List.toArray |> Array.mapi (fun idx f -> f idx) + { Name = name; Files = files } + +let private sourceFile fileName content (dependencies: Set) = + fun idx -> + let fileWithAST = + { + Idx = idx + AST = parseSourceCode (fileName, content) + File = fileName + } + + { + FileWithAST = fileWithAST + ExpectedDependencies = dependencies + Content = content + } + +let internal codebases = + [ + scenario + "Link via full open statement" + [ + sourceFile + "A.fs" + """ +module A +do () +""" + Set.empty + sourceFile + "B.fs" + """ +module B +open A +""" + (set [| 0 |]) + ] + scenario + "Partial open statement" + [ + sourceFile + "A.fs" + """ +module Company.A +let a = () +""" + Set.empty + sourceFile + "B.fs" + """ +module Other.B +open Company +open A +""" + (set [| 0 |]) + ] + scenario + "Link via fully qualified identifier" + [ + sourceFile + "X.fs" + """ +module X.Y.Z + +let z = 9 +""" + Set.empty + sourceFile + "Y.fs" + """ +module A.B + +let a = 1 + X.Y.Z.z +""" + (set [| 0 |]) + ] + scenario + "Link via partial open and prefixed identifier" + [ + sourceFile + "A.fs" + """ +module A.B.C + +let d = 1 +""" + Set.empty + sourceFile + "B.fs" + """ +module X.Y.Z + +open A.B + +let e = C.d + 1 +""" + (set [| 0 |]) + ] + scenario + "Modules sharing a namespace do not link them automatically" + [ + sourceFile + "A.fs" + """ +module Project.A + +let a = 0 +""" + Set.empty + sourceFile + "B.fs" + """ +module Project.B + +let b = 0 +""" + Set.empty + sourceFile + "C.fs" + """ +module Project.C + +let c = 0 +""" + Set.empty + sourceFile + "D.fs" + """ +module Project.D + +let d = 0 +""" + Set.empty + ] + scenario + "Files which add types to a namespace are automatically linked to files that share said namespace" + [ + sourceFile + "A.fs" + """ +namespace Product + +type X = { Y : string } +""" + Set.empty + // There is no way to infer what `b` is in this example + // It could be the type defined in A, so we cannot take any risks here. + // We link A as dependency of B because A exposes a type in the shared namespace `Product`. + sourceFile + "B.fs" + """ +module Product.Feature + +let a b = b.Y + "z" +""" + (set [| 0 |]) + ] + scenario + "Toplevel AutoOpen attribute will link to all the subsequent files" + [ + sourceFile + "A.fs" + """ +[] +module Utils + +let a b c = b - c +""" + Set.empty + sourceFile + "B.fs" + """ +namespace X + +type Y = { Q: int } +""" + (set [| 0 |]) + ] + // Notice how we link B.fs to A.fsi, this will always be the case for signature/implementation pairs. + // When debugging, notice that the `Helpers` will be not a part of the trie. + scenario + "Signature files are being used to construct the Trie" + [ + sourceFile // 0 + "A.fsi" + """ +module A + +val a: int -> int +""" + Set.empty + sourceFile // 1 + "A.fs" + """ +module A + +module Helpers = + let impl a = a + 1 + +let a b = Helpers.impl b +""" + (set [| 0 |]) + sourceFile // 2 + "B.fs" + """ +module B + +let b = A.a 42 +""" + (set [| 0 |]) + ] + scenario + "A partial open statement still links to a file as a last resort" + [ + sourceFile + "A.fs" + """ +module X.A + +let a = 0 +""" + Set.empty + sourceFile + "B.fs" + """ +module X.B + +let b = 0 +""" + Set.empty + sourceFile + "C.fs" + """ +module Y.C + +// This open statement does not do anything. +// It can safely be removed, but because of its presence we need to link it to something that exposes the namespace X. +// We try and pick the file with the lowest index +open X + +let c = 0 +""" + (set [| 0 |]) + ] + // This is a very last resort measure to link C to all files that came before it. + // `open X` does exist but there is no file that is actively contributing to the X namespace + // This is a trade-off scenario, if A.fs had a type or nested module we would consider it to contribute to the X namespace. + // As it is empty, we don't include the file index in the trie. + scenario + "A open statement that leads nowhere should link to every file that came above it." + [ + sourceFile + "A.fs" + """ +namespace X +""" + Set.empty + sourceFile + "B.fs" + """ +namespace Y +""" + Set.empty + sourceFile + "C.fs" + """ +namespace Z + +open X +""" + (set [| 0; 1 |]) + ] + // The nested module in this case adds content to the namespace + // Similar if a namespace had a type. + scenario + "Nested module with auto open attribute" + [ + sourceFile + "A.fs" + """ +namespace Product + +[] +module X = + let x: int = 0 +""" + Set.empty + sourceFile + "B.fs" + """ +module Product.Feature + +let a b = x + b +""" + (set [| 0 |]) + ] + // Similar to a top level auto open attribute, the global namespace also introduces a link to all the files that come after it. + scenario + "Global namespace always introduces a link" + [ + sourceFile + "A.fs" + """ +namespace global + +type A = { B : int } +""" + Set.empty + sourceFile + "B.fs" + """ +module Product.Feature + +let f a = a.B +""" + (set [| 0 |]) + ] + scenario + "Reference to property of static member from nested module is detected" + [ + sourceFile + "A.fs" + """ +module A + +module B = + type Person = {Name : string} + type C = + static member D: Person = failwith "" +""" + Set.empty + sourceFile + "B.fs" + """ +module B +let person: string = A.B.C.D.Name +""" + (set [| 0 |]) + ] + // Diamond scenario + scenario + "Dependent signature files" + [ + sourceFile // 0 + "A.fsi" + """ +module A + +type AType = class end +""" + Set.empty + sourceFile // 1 + "A.fs" + """ +module A + +type AType = class end + """ + (set [| 0 |]) + sourceFile // 2 + "B.fsi" + """ +module B + +open A + +val b: AType -> unit +""" + (set [| 0 |]) + sourceFile // 3 + "B.fs" + """ +module B + +open A + +let b (a:AType) = () +""" + (set [| 0; 2 |]) + sourceFile // 4 + "C.fsi" + """ + module C + + type CType = class end + """ + Set.empty + sourceFile // 5 + "C.fs" + """ +module C + +type CType = class end + """ + (set [| 4 |]) + sourceFile // 6 + "D.fsi" + """ +module D + +open A +open C + +val d: CType -> unit + """ + (set [| 0; 4 |]) + sourceFile // 7 + "D.fs" + """ +module D + +open A +open B +open C + +let d (c: CType) = + let a : AType = failwith "todo" + b a + """ + (set [| 0; 2; 4; 6 |]) + ] + scenario + "Module abbreviations with shared namespace" + [ + sourceFile + "A.fsi" + """ +module internal FSharp.Compiler.CheckExpressions + +exception BakedInMemberConstraintName of string +""" + Set.empty + sourceFile + "A.fs" + """ +module internal FSharp.Compiler.CheckExpressions + +exception BakedInMemberConstraintName of string +""" + (set [| 0 |]) + sourceFile + "B.fs" + """ +namespace FSharp.Compiler.CodeAnalysis + +open FSharp.Compiler + +module Tc = CheckExpressions +""" + (set [| 0 |]) + ] + scenario + "Top level module with auto open and namespace prefix" + [ + // This file is added to make ensure that B.fs links to A.fs because of the contents of A. + // If A didn't have the AutoOpen attribute, as a last resort it would be linked to A anyway because of the ghost dependency mechanism. + sourceFile + "Ghost.fs" + """ +namespace A +""" + Set.empty + sourceFile + "A.fs" + """ +[] +module A.B + +let a = 0 +""" + Set.empty + sourceFile + "B.fs" + """ +module Library + +open A +let b = a + 1 +""" + (set [| 1 |]) + ] + scenario + "Top level module with AutoOpen attribute and namespace prefix" + [ + sourceFile + "A.fs" + """ +[] +module X.Y.Z + +type A = { A : int } +""" + Set.empty + sourceFile + "Library.fs" + """ +module Library + +open X.Y + +let fn (a: A) = () +""" + (set [| 0 |]) + ] + scenario + "Nested AutoOpen module in namespace is accessed via namespace open" + [ + sourceFile + "Z.fs" + """ +namespace X.Y + +[] +module Z = + + type A = { A: int } +""" + Set.empty + sourceFile + "Library.fs" + """ +module Library + +open X.Y + +let fn (a: A) = () +""" + (set [| 0 |]) + ] + scenario + "Implementation uses something defined above and in signature" + [ + sourceFile + "A.fsi" + """ +module Bar + +type Bar = + new: unit -> Bar + static member Foo: unit -> unit + +val Foo: unit -> unit +""" + Set.empty + sourceFile + "A.fs" + """ +module Bar + +type Bar() = + static member Foo () : unit = + failwith "" + +let Foo () : unit = + Bar.Foo () +""" + (set [| 0 |]) + ] + ] diff --git a/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/TrieMappingTests.fs b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/TrieMappingTests.fs new file mode 100644 index 00000000000..838f9846a5c --- /dev/null +++ b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/TrieMappingTests.fs @@ -0,0 +1,315 @@ +module FSharp.Compiler.ComponentTests.TypeChecks.Graph.TrieMappingTests + +open NUnit.Framework +open FSharp.Compiler.GraphChecking +open TestUtils + +let private noDependencies = Set.empty + +[] +let ``Basic trie`` () = + let sampleFiles = + [| + "A.fs", + """ +module X.Y.A + +let a = [] + """ + "B.fs", + """ +module X.Y.B + +let b = [] + """ + "C.fs", + """ +namespace X.Y + +type C = { CX: int; CY: int } + """ + |] + + let files = + sampleFiles + |> Array.mapi (fun idx (fileName, code) -> + { + Idx = idx + FileName = fileName + ParsedInput = parseSourceCode (fileName, code) + } : FileInProject) + + let trie = TrieMapping.mkTrie files + + match trie.Current with + | TrieNodeInfo.Root _ -> () + | current -> Assert.Fail($"mkTrie should always return a TrieNodeInfo.Root, got {current}") + + let xNode = trie.Children.["X"] + Assert.AreEqual(1, xNode.Children.Count) + Assert.True(Seq.isEmpty xNode.Files) + + let yNode = xNode.Children["Y"] + Assert.AreEqual(2, yNode.Children.Count) + Assert.AreEqual(set [| 2 |], yNode.Files) + + let aNode = yNode.Children["A"] + Assert.AreEqual(0, aNode.Children.Count) + Assert.AreEqual(set [| 0 |], aNode.Files) + + let bNode = yNode.Children["B"] + Assert.AreEqual(0, bNode.Children.Count) + Assert.AreEqual(set [| 1 |], bNode.Files) + +[] +let ``Toplevel AutoOpen module with prefixed namespace`` () = + let trie = + TrieMapping.mkTrie + [| + { + Idx = 0 + FileName = "A.fs" + ParsedInput = + parseSourceCode ( + "A.fs", + """ +[] +module A.B + +let a = 0 +""" + ) + } + |] + + // Assert that both A and B expose file index 0 + let aNode = trie.Children.["A"] + Assert.AreEqual(set [| 0 |], aNode.Files) + let bNode = aNode.Children.["B"] + Assert.AreEqual(set [| 0 |], bNode.Files) + +[] +let ``Toplevel AutoOpen module with multi prefixed namespace`` () = + let trie = + TrieMapping.mkTrie + [| + { + Idx = 0 + FileName = "A.fsi" + ParsedInput = + parseSourceCode ( + "A.fsi", + """ +[] +module A.B.C + +let a = 0 +""" + ) + } + |] + + // Assert that B and C expose file index 0, namespace A should not. + let aNode = trie.Children.["A"] + Assert.AreEqual(noDependencies, aNode.Files) + let bNode = aNode.Children.["B"] + Assert.AreEqual(set [| 0 |], bNode.Files) + let cNode = bNode.Children.["C"] + Assert.AreEqual(set [| 0 |], cNode.Files) + +[] +let ``Global namespace should link files to the root node`` () = + let trie = + TrieMapping.mkTrie + [| + { + Idx = 0 + FileName = "A.fs" + ParsedInput = + parseSourceCode ( + "A.fs", + """ +namespace global + +type A = { A : int } +""" + ) + } + { + Idx = 1 + FileName = "B.fsi" + ParsedInput = + parseSourceCode ( + "B.fsi", + """ +namespace global + +type B = { Y : int } +""" + ) + } + |] + + Assert.AreEqual(set [| 0; 1 |], trie.Files) + +[] +let ``Module with a single ident and AutoOpen attribute should link files to root`` () = + let trie = + TrieMapping.mkTrie + [| + { + Idx = 0 + FileName = "A.fs" + ParsedInput = + parseSourceCode ( + "A.fs", + """ +[] +module A + +type A = { A : int } +""" + ) + } + { + Idx = 1 + FileName = "B.fsi" + ParsedInput = + parseSourceCode ( + "B.fsi", + """ +[] +module B + +type B = { Y : int } +""" + ) + } + |] + + Assert.AreEqual(set [| 0; 1 |], trie.Files) + Assert.AreEqual(0, trie.Children.Count) + +[] +let ``Module with AutoOpen attribute and two ident should expose file at two levels`` () = + let trie = + TrieMapping.mkTrie + [| + { + Idx = 0 + FileName = "Y.fs" + ParsedInput = + parseSourceCode ( + "Y.fs", + """ +[] +module X.Y + +type A = { A : int } +""" + ) + } + |] + + Assert.AreEqual(noDependencies, trie.Files) + let xNode = trie.Children.["X"] + Assert.AreEqual(set [| 0 |], xNode.Files) + let yNode = xNode.Children.["Y"] + Assert.AreEqual(set [| 0 |], yNode.Files) + +[] +let ``Module with AutoOpen attribute and three ident should expose file at last two levels`` () = + let trie = + TrieMapping.mkTrie + [| + { + Idx = 0 + FileName = "Z.fsi" + ParsedInput = + parseSourceCode ( + "Z.fsi", + """ +[] +module X.Y.Z + +type A = { A : int } +""" + ) + } + |] + + Assert.AreEqual(noDependencies, trie.Files) + let xNode = trie.Children.["X"] + Assert.AreEqual(noDependencies, xNode.Files) + let yNode = xNode.Children.["Y"] + Assert.AreEqual(set [| 0 |], yNode.Files) + let zNode = yNode.Children.["Z"] + Assert.AreEqual(set [| 0 |], zNode.Files) + +[] +let ``Nested AutoOpen module in namespace will expose the file to the namespace node`` () = + let trie = + TrieMapping.mkTrie + [| + { + Idx = 0 + FileName = "Z.fs" + ParsedInput = + parseSourceCode ( + "Z.fs", + """ +namespace X.Y + +[] +module Z = + + type A = { A: int } +""" + ) + } + |] + + Assert.AreEqual(noDependencies, trie.Files) + let xNode = trie.Children.["X"] + Assert.AreEqual(noDependencies, xNode.Files) + let yNode = xNode.Children.["Y"] + Assert.AreEqual(set [| 0 |], yNode.Files) + let zNode = yNode.Children.["Z"] + Assert.AreEqual(set [| 0 |], zNode.Files) + +[] +let ``Two modules with the same name, only the first file exposes the index`` () = + let trie = + TrieMapping.mkTrie + [| + { + Idx = 0 + FileName = "A.fs" + ParsedInput = + parseSourceCode ( + "A.fs", + """ +module A + +type B = { C: int } +""" + ) + } + { + Idx = 1 + FileName = "A2.fs" + ParsedInput = + parseSourceCode ( + "A2.fs", + """ +module A + +let _ = () +""" + ) + } + |] + + Assert.AreEqual(1, trie.Children.Count) + let aNode = trie.Children.["A"] + Assert.AreEqual(set [| 0 |], aNode.Files) diff --git a/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/TypedTreeGraph.fs b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/TypedTreeGraph.fs new file mode 100644 index 00000000000..ef94bb027d9 --- /dev/null +++ b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/TypedTreeGraph.fs @@ -0,0 +1,197 @@ +module FSharp.Compiler.ComponentTests.TypeChecks.Graph.TypedTreeGraphTests + +open System +open System.Collections.Concurrent +open System.Collections.Generic +open System.IO +open FSharp.Compiler.CodeAnalysis +open FSharp.Compiler.Text +open FSharp.Compiler.Symbols +open NUnit.Framework +open FSharp.Compiler.GraphChecking +open FSharp.Compiler.ComponentTests.TypeChecks.Graph.TestUtils + +let localProjects = CompilationFromCmdlineArgsTests.localProjects + +let checker = FSharpChecker.Create(keepAssemblyContents = true) + +type DepCollector(filesThatCameBeforeIt: Set) = + let deps = HashSet() + + member this.Add(declarationLocation: range) : unit = + let sourceLocation = Path.GetFullPath declarationLocation.FileName + let ext = Path.GetExtension sourceLocation + + if + (ext = ".fs" || ext = ".fsi") + && Set.contains sourceLocation filesThatCameBeforeIt + then + deps.Add(sourceLocation) |> ignore + + member this.Deps = Seq.toArray deps + +let rec collectFromSymbol (collector: DepCollector) (s: FSharpSymbol) = + match s with + | :? FSharpMemberOrFunctionOrValue as mfv -> + if mfv.ImplementationLocation.IsSome || mfv.SignatureLocation.IsSome then + collector.Add mfv.DeclarationLocation + + collectFromSymbol collector mfv.ReturnParameter + + for cpg in mfv.CurriedParameterGroups do + for p in cpg do + collectFromSymbol collector p + + | :? FSharpParameter as fp -> + if fp.Type.HasTypeDefinition then + collector.Add fp.Type.TypeDefinition.DeclarationLocation + + | :? FSharpEntity as e -> + if + not (e.IsFSharpModule || e.IsNamespace) + && (e.ImplementationLocation.IsSome || e.SignatureLocation.IsSome) + then + collector.Add e.DeclarationLocation + + | :? FSharpActivePatternCase as apc -> collector.Add apc.DeclarationLocation + | _ -> () + +// Fair warning: this isn't fast or optimized code +let graphFromTypedTree (checker: FSharpChecker) (projectOptions: FSharpProjectOptions) : Async * Graph> = + async { + let files = ConcurrentDictionary() + + let! filesWithDeps = + projectOptions.SourceFiles + |> Array.mapi (fun idx fileName -> + async { + let sourceText = (File.ReadAllText >> SourceText.ofString) fileName + let! parseResult, checkResult = checker.ParseAndCheckFileInProject(fileName, 1, sourceText, projectOptions) + + match checkResult with + | FSharpCheckFileAnswer.Aborted -> return failwith "aborted" + | FSharpCheckFileAnswer.Succeeded fileResult -> + let allSymbols = fileResult.GetAllUsesOfAllSymbolsInFile() |> Seq.toArray + let filesItCanKnow = set projectOptions.SourceFiles.[0 .. (idx - 1)] + let collector = DepCollector(filesItCanKnow) + + for s in allSymbols do + collectFromSymbol collector s.Symbol + + let file: TestFileWithAST = + { + Idx = idx + AST = parseResult.ParseTree + File = fileName + } + + files.TryAdd(idx, file) |> ignore + + let depIndexes = + collector.Deps + |> Array.map (fun dep -> projectOptions.SourceFiles |> Array.findIndex (fun file -> file = dep)) + + return (idx, depIndexes) + }) + |> Async.Parallel + + let graph = readOnlyDict filesWithDeps + + return files, graph + } + +[] +[] +let ``Create Graph from typed tree`` (projectArgumentsFilePath: string) = + let previousDir = Environment.CurrentDirectory + + async { + try + let pwd = FileInfo(projectArgumentsFilePath).Directory.FullName + Environment.CurrentDirectory <- pwd + + let args = File.ReadAllLines(projectArgumentsFilePath) + let fileName = Path.GetFileNameWithoutExtension(args.[0].Replace("-o:", "")) + + let sourceFiles, otherOptions = + args + |> Array.partition (fun option -> + not (option.StartsWith("-")) + && (option.EndsWith(".fs") || option.EndsWith(".fsi"))) + + let sourceFiles = sourceFiles |> Array.map Path.GetFullPath + + let proj = + { + ProjectFileName = fileName + ProjectId = None + SourceFiles = sourceFiles + OtherOptions = otherOptions + ReferencedProjects = [||] + IsIncompleteTypeCheckEnvironment = false + UseScriptResolutionRules = false + LoadTime = DateTime.Now + UnresolvedReferences = None + OriginalLoadReferences = [] + Stamp = None + } + + let! files, graphFromTypedTree = graphFromTypedTree checker proj + + graphFromTypedTree + |> Graph.map (fun n -> n,files.[n].File) + |> Graph.serialiseToMermaid $"{fileName}.typed-tree.deps.md" + + let collectAllDeps (graph: Graph) = + (Map.empty, [ 0 .. (sourceFiles.Length - 1) ]) + ||> List.fold (fun acc idx -> + let deps = graph.[idx] + + let allDeps = + set [| yield! deps; yield! (Seq.collect (fun dep -> Map.find dep acc) deps) |] + + Map.add idx allDeps acc) + + let typedTreeMap = collectAllDeps graphFromTypedTree + + let filePairs = files.Values |> Seq.map TestFileWithAST.Map |> Seq.toArray |> FilePairMap + + let graphFromHeuristic = + let isFSharpCore = Path.GetFileNameWithoutExtension(projectArgumentsFilePath).StartsWith("FSharp.Core") + files.Values |> Seq.map TestFileWithAST.Map |> Seq.toArray |> DependencyResolution.mkGraph isFSharpCore filePairs + + graphFromHeuristic + |> Graph.map (fun n -> n, files.[n].File) + |> Graph.serialiseToMermaid $"{fileName}.heuristic-tree.deps.md" + + let heuristicMap = collectAllDeps graphFromHeuristic + + let relativePath (file: string) = + Path.Combine(Path.GetDirectoryName(file), Path.GetFileName(file)) + + let depNames (deps: Set) = + deps |> Seq.map (fun idx -> relativePath files.[idx].File) |> String.concat " " + + /// Compare the found dependencies of a specified heuristic versus the dependencies found in the typed tree + let compareDeps source fileName idx (depsFromHeuristic: Set) = + let depsFromTypedTree = Map.find idx typedTreeMap + + if Set.isEmpty depsFromTypedTree && not (Set.isEmpty depsFromHeuristic) then + printfn $"{source}:{relativePath fileName} has %A{(depNames depsFromHeuristic)} while the typed tree had none!" + else + let isSuperSet = Set.isSuperset depsFromHeuristic depsFromTypedTree + let delta = Set.difference depsFromTypedTree depsFromHeuristic + + Assert.IsTrue( + isSuperSet, + $"""{relativePath fileName} did not contain a superset of the typed tree dependencies: +{source} is missing dependencies: %A{depNames delta}.""" + ) + + [| 0 .. (sourceFiles.Length - 1) |] + |> Array.iter (fun (fileIdx: int) -> + let file = files.[fileIdx] + compareDeps "Trie heuristic" file.File file.Idx (Map.find file.Idx heuristicMap)) + finally + Environment.CurrentDirectory <- previousDir + } diff --git a/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/Utils.fs b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/Utils.fs new file mode 100644 index 00000000000..397892eccd9 --- /dev/null +++ b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/Utils.fs @@ -0,0 +1,35 @@ +module FSharp.Compiler.ComponentTests.TypeChecks.Graph.TestUtils + +open FSharp.Compiler.CodeAnalysis +open FSharp.Compiler.GraphChecking +open FSharp.Compiler.Text +open FSharp.Compiler.Syntax + +let private checker = FSharpChecker.Create() + +let parseSourceCode (name: string, code: string) = + let sourceText = SourceText.ofString code + + let parsingOptions = + { FSharpParsingOptions.Default with + SourceFiles = [| name |] + } + + let result = + checker.ParseFile(name, sourceText, parsingOptions) |> Async.RunSynchronously + + result.ParseTree + +type TestFileWithAST = + { + Idx: int + File: string + AST: ParsedInput + } + + static member internal Map (x:TestFileWithAST) : FileInProject = + { + Idx = x.Idx + FileName = x.File + ParsedInput = x.AST + } \ No newline at end of file diff --git a/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/scrape.fsx b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/scrape.fsx new file mode 100644 index 00000000000..c20afd627c3 --- /dev/null +++ b/tests/FSharp.Compiler.ComponentTests/TypeChecks/Graph/scrape.fsx @@ -0,0 +1,56 @@ +#r "nuget: MSBuild.StructuredLogger, 2.1.746" + +open System.IO +open Microsoft.Build.Logging.StructuredLogger + +/// Create a text file with the F# compiler arguments scrapped from an binary log file. +/// Run `dotnet build --no-incremental -bl` to create the binlog file. +/// The --no-incremental flag is essential for this scraping code. +let mkCompilerArgsFromBinLog file = + let build = BinaryLog.ReadBuild file + + let projectName = + build.Children + |> Seq.choose (function + | :? Project as p -> Some p.Name + | _ -> None) + |> Seq.distinct + |> Seq.exactlyOne + + let message (fscTask: FscTask) = + fscTask.Children + |> Seq.tryPick (function + | :? Message as m when m.Text.Contains "fsc" -> Some m.Text + | _ -> None) + + let mutable args = None + + build.VisitAllChildren(fun task -> + match task with + | :? FscTask as fscTask -> + match fscTask.Parent.Parent with + | :? Project as p when p.Name = projectName -> args <- message fscTask + | _ -> () + | _ -> ()) + + match args with + | None -> printfn "Could not process the binlog file. Did you build using '--no-incremental'?" + | Some args -> + let content = + let idx = args.IndexOf "-o:" + args.Substring(idx) + + let directory = FileInfo(file).Directory.FullName + + let argsPath = + Path.Combine(directory, $"{Path.GetFileNameWithoutExtension(projectName)}.args.txt") + + File.WriteAllText(argsPath, content) + printfn "Wrote %s" argsPath + +// Example: +// The binlog was created by running `dotnet build --no-incremental -bl` +mkCompilerArgsFromBinLog @"C:\Projects\fantomas\src\Fantomas.Core\msbuild.binlog" +mkCompilerArgsFromBinLog @"C:\Projects\FsAutoComplete\src\FsAutoComplete\msbuild.binlog" +mkCompilerArgsFromBinLog @"C:\Projects\fsharp\src\Compiler\msbuild.binlog" +mkCompilerArgsFromBinLog @"C:\Projects\fsharp\tests\FSharp.Compiler.ComponentTests\msbuild.binlog" diff --git a/tests/FSharp.Compiler.ComponentTests/TypeChecks/ParallelCheckingWithSignatureFilesTests.fs b/tests/FSharp.Compiler.ComponentTests/TypeChecks/ParallelCheckingWithSignatureFilesTests.fs deleted file mode 100644 index 9e187de1388..00000000000 --- a/tests/FSharp.Compiler.ComponentTests/TypeChecks/ParallelCheckingWithSignatureFilesTests.fs +++ /dev/null @@ -1,61 +0,0 @@ -module FSharp.Compiler.ComponentTests.TypeChecks.ParallelCheckingWithSignatureFilesTests - -open Xunit -open FSharp.Test -open FSharp.Test.Compiler - -[] -let ``Parallel type checking when signature files are available`` () = - // File structure: - // Encode.fsi - // Encode.fs - // Decode.fsi - // Decode.fs - // Program.fs - - let encodeFsi = - Fsi - """ -module Encode - -val encode: obj -> string -""" - - let encodeFs = - SourceCodeFileKind.Create( - "Encode.fs", - """ -module Encode - -let encode (v: obj) : string = failwith "todo" -""" - ) - - let decodeFsi = - SourceCodeFileKind.Create( - "Decode.fsi", - """ -module Decode - -val decode: string -> obj -""" - ) - - let decodeFs = - SourceCodeFileKind.Create( - "Decode.fs", - """ -module Decode - -let decode (v: string) : obj = failwith "todo" -""" - ) - - let programFs = SourceCodeFileKind.Create("Program.fs", "printfn \"Hello from F#\"") - - encodeFsi - |> withAdditionalSourceFiles [ encodeFs; decodeFsi; decodeFs; programFs ] - |> withOptions [ "--test:ParallelCheckingWithSignatureFilesOn" ] - |> asExe - |> compile - |> shouldSucceed diff --git a/tests/FSharp.Compiler.Service.Tests/FSharp.Compiler.Service.SurfaceArea.netstandard20.debug.bsl b/tests/FSharp.Compiler.Service.Tests/FSharp.Compiler.Service.SurfaceArea.netstandard20.debug.bsl index 1bce6c61d8d..defa231a1c2 100644 --- a/tests/FSharp.Compiler.Service.Tests/FSharp.Compiler.Service.SurfaceArea.netstandard20.debug.bsl +++ b/tests/FSharp.Compiler.Service.Tests/FSharp.Compiler.Service.SurfaceArea.netstandard20.debug.bsl @@ -2028,7 +2028,7 @@ FSharp.Compiler.CodeAnalysis.FSharpCheckProjectResults: FSharp.Compiler.Symbols. FSharp.Compiler.CodeAnalysis.FSharpCheckProjectResults: System.String ToString() FSharp.Compiler.CodeAnalysis.FSharpCheckProjectResults: System.String[] DependencyFiles FSharp.Compiler.CodeAnalysis.FSharpCheckProjectResults: System.String[] get_DependencyFiles() -FSharp.Compiler.CodeAnalysis.FSharpChecker: FSharp.Compiler.CodeAnalysis.FSharpChecker Create(Microsoft.FSharp.Core.FSharpOption`1[System.Int32], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[FSharp.Compiler.CodeAnalysis.LegacyReferenceResolver], Microsoft.FSharp.Core.FSharpOption`1[Microsoft.FSharp.Core.FSharpFunc`2[System.Tuple`2[System.String,System.DateTime],Microsoft.FSharp.Core.FSharpOption`1[System.Tuple`3[System.Object,System.IntPtr,System.Int32]]]], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[FSharp.Compiler.CodeAnalysis.DocumentSource]) +FSharp.Compiler.CodeAnalysis.FSharpChecker: FSharp.Compiler.CodeAnalysis.FSharpChecker Create(Microsoft.FSharp.Core.FSharpOption`1[System.Int32], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[FSharp.Compiler.CodeAnalysis.LegacyReferenceResolver], Microsoft.FSharp.Core.FSharpOption`1[Microsoft.FSharp.Core.FSharpFunc`2[System.Tuple`2[System.String,System.DateTime],Microsoft.FSharp.Core.FSharpOption`1[System.Tuple`3[System.Object,System.IntPtr,System.Int32]]]], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[FSharp.Compiler.CodeAnalysis.DocumentSource]) FSharp.Compiler.CodeAnalysis.FSharpChecker: FSharp.Compiler.CodeAnalysis.FSharpChecker Instance FSharp.Compiler.CodeAnalysis.FSharpChecker: FSharp.Compiler.CodeAnalysis.FSharpChecker get_Instance() FSharp.Compiler.CodeAnalysis.FSharpChecker: FSharp.Compiler.CodeAnalysis.FSharpProjectOptions GetProjectOptionsFromCommandLineArgs(System.String, System.String[], Microsoft.FSharp.Core.FSharpOption`1[System.DateTime], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean]) @@ -4078,6 +4078,7 @@ FSharp.Compiler.IO.DefaultFileSystem: System.DateTime GetCreationTimeShim(System FSharp.Compiler.IO.DefaultFileSystem: System.DateTime GetLastWriteTimeShim(System.String) FSharp.Compiler.IO.DefaultFileSystem: System.IO.Stream OpenFileForReadShim(System.String, Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean]) FSharp.Compiler.IO.DefaultFileSystem: System.IO.Stream OpenFileForWriteShim(System.String, Microsoft.FSharp.Core.FSharpOption`1[System.IO.FileMode], Microsoft.FSharp.Core.FSharpOption`1[System.IO.FileAccess], Microsoft.FSharp.Core.FSharpOption`1[System.IO.FileShare]) +FSharp.Compiler.IO.DefaultFileSystem: System.String ChangeExtensionShim(System.String, System.String) FSharp.Compiler.IO.DefaultFileSystem: System.String DirectoryCreateShim(System.String) FSharp.Compiler.IO.DefaultFileSystem: System.String GetDirectoryNameShim(System.String) FSharp.Compiler.IO.DefaultFileSystem: System.String GetFullFilePathInDirectoryShim(System.String, System.String) @@ -4106,6 +4107,7 @@ FSharp.Compiler.IO.IFileSystem: System.DateTime GetCreationTimeShim(System.Strin FSharp.Compiler.IO.IFileSystem: System.DateTime GetLastWriteTimeShim(System.String) FSharp.Compiler.IO.IFileSystem: System.IO.Stream OpenFileForReadShim(System.String, Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean]) FSharp.Compiler.IO.IFileSystem: System.IO.Stream OpenFileForWriteShim(System.String, Microsoft.FSharp.Core.FSharpOption`1[System.IO.FileMode], Microsoft.FSharp.Core.FSharpOption`1[System.IO.FileAccess], Microsoft.FSharp.Core.FSharpOption`1[System.IO.FileShare]) +FSharp.Compiler.IO.IFileSystem: System.String ChangeExtensionShim(System.String, System.String) FSharp.Compiler.IO.IFileSystem: System.String DirectoryCreateShim(System.String) FSharp.Compiler.IO.IFileSystem: System.String GetDirectoryNameShim(System.String) FSharp.Compiler.IO.IFileSystem: System.String GetFullFilePathInDirectoryShim(System.String, System.String) diff --git a/tests/FSharp.Compiler.Service.Tests/FSharp.Compiler.Service.SurfaceArea.netstandard20.release.bsl b/tests/FSharp.Compiler.Service.Tests/FSharp.Compiler.Service.SurfaceArea.netstandard20.release.bsl index 1bce6c61d8d..defa231a1c2 100644 --- a/tests/FSharp.Compiler.Service.Tests/FSharp.Compiler.Service.SurfaceArea.netstandard20.release.bsl +++ b/tests/FSharp.Compiler.Service.Tests/FSharp.Compiler.Service.SurfaceArea.netstandard20.release.bsl @@ -2028,7 +2028,7 @@ FSharp.Compiler.CodeAnalysis.FSharpCheckProjectResults: FSharp.Compiler.Symbols. FSharp.Compiler.CodeAnalysis.FSharpCheckProjectResults: System.String ToString() FSharp.Compiler.CodeAnalysis.FSharpCheckProjectResults: System.String[] DependencyFiles FSharp.Compiler.CodeAnalysis.FSharpCheckProjectResults: System.String[] get_DependencyFiles() -FSharp.Compiler.CodeAnalysis.FSharpChecker: FSharp.Compiler.CodeAnalysis.FSharpChecker Create(Microsoft.FSharp.Core.FSharpOption`1[System.Int32], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[FSharp.Compiler.CodeAnalysis.LegacyReferenceResolver], Microsoft.FSharp.Core.FSharpOption`1[Microsoft.FSharp.Core.FSharpFunc`2[System.Tuple`2[System.String,System.DateTime],Microsoft.FSharp.Core.FSharpOption`1[System.Tuple`3[System.Object,System.IntPtr,System.Int32]]]], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[FSharp.Compiler.CodeAnalysis.DocumentSource]) +FSharp.Compiler.CodeAnalysis.FSharpChecker: FSharp.Compiler.CodeAnalysis.FSharpChecker Create(Microsoft.FSharp.Core.FSharpOption`1[System.Int32], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[FSharp.Compiler.CodeAnalysis.LegacyReferenceResolver], Microsoft.FSharp.Core.FSharpOption`1[Microsoft.FSharp.Core.FSharpFunc`2[System.Tuple`2[System.String,System.DateTime],Microsoft.FSharp.Core.FSharpOption`1[System.Tuple`3[System.Object,System.IntPtr,System.Int32]]]], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[FSharp.Compiler.CodeAnalysis.DocumentSource]) FSharp.Compiler.CodeAnalysis.FSharpChecker: FSharp.Compiler.CodeAnalysis.FSharpChecker Instance FSharp.Compiler.CodeAnalysis.FSharpChecker: FSharp.Compiler.CodeAnalysis.FSharpChecker get_Instance() FSharp.Compiler.CodeAnalysis.FSharpChecker: FSharp.Compiler.CodeAnalysis.FSharpProjectOptions GetProjectOptionsFromCommandLineArgs(System.String, System.String[], Microsoft.FSharp.Core.FSharpOption`1[System.DateTime], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean]) @@ -4078,6 +4078,7 @@ FSharp.Compiler.IO.DefaultFileSystem: System.DateTime GetCreationTimeShim(System FSharp.Compiler.IO.DefaultFileSystem: System.DateTime GetLastWriteTimeShim(System.String) FSharp.Compiler.IO.DefaultFileSystem: System.IO.Stream OpenFileForReadShim(System.String, Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean]) FSharp.Compiler.IO.DefaultFileSystem: System.IO.Stream OpenFileForWriteShim(System.String, Microsoft.FSharp.Core.FSharpOption`1[System.IO.FileMode], Microsoft.FSharp.Core.FSharpOption`1[System.IO.FileAccess], Microsoft.FSharp.Core.FSharpOption`1[System.IO.FileShare]) +FSharp.Compiler.IO.DefaultFileSystem: System.String ChangeExtensionShim(System.String, System.String) FSharp.Compiler.IO.DefaultFileSystem: System.String DirectoryCreateShim(System.String) FSharp.Compiler.IO.DefaultFileSystem: System.String GetDirectoryNameShim(System.String) FSharp.Compiler.IO.DefaultFileSystem: System.String GetFullFilePathInDirectoryShim(System.String, System.String) @@ -4106,6 +4107,7 @@ FSharp.Compiler.IO.IFileSystem: System.DateTime GetCreationTimeShim(System.Strin FSharp.Compiler.IO.IFileSystem: System.DateTime GetLastWriteTimeShim(System.String) FSharp.Compiler.IO.IFileSystem: System.IO.Stream OpenFileForReadShim(System.String, Microsoft.FSharp.Core.FSharpOption`1[System.Boolean], Microsoft.FSharp.Core.FSharpOption`1[System.Boolean]) FSharp.Compiler.IO.IFileSystem: System.IO.Stream OpenFileForWriteShim(System.String, Microsoft.FSharp.Core.FSharpOption`1[System.IO.FileMode], Microsoft.FSharp.Core.FSharpOption`1[System.IO.FileAccess], Microsoft.FSharp.Core.FSharpOption`1[System.IO.FileShare]) +FSharp.Compiler.IO.IFileSystem: System.String ChangeExtensionShim(System.String, System.String) FSharp.Compiler.IO.IFileSystem: System.String DirectoryCreateShim(System.String) FSharp.Compiler.IO.IFileSystem: System.String GetDirectoryNameShim(System.String) FSharp.Compiler.IO.IFileSystem: System.String GetFullFilePathInDirectoryShim(System.String, System.String) diff --git a/tests/FSharp.Test.Utilities/Compiler.fs b/tests/FSharp.Test.Utilities/Compiler.fs index 252271bdbaa..74250c83cdf 100644 --- a/tests/FSharp.Test.Utilities/Compiler.fs +++ b/tests/FSharp.Test.Utilities/Compiler.fs @@ -479,17 +479,18 @@ module rec Compiler = | CS cs -> CS { cs with LangVersion = ver } | _ -> failwith "Only supported in C#" - let asLibrary (cUnit: CompilationUnit) : CompilationUnit = - match cUnit with - | FS fs -> FS { fs with OutputType = CompileOutput.Library } - | _ -> failwith "TODO: Implement asLibrary where applicable." - - let asExe (cUnit: CompilationUnit) : CompilationUnit = + let withOutputType (outputType : CompileOutput) (cUnit: CompilationUnit) : CompilationUnit = match cUnit with - | FS x -> FS { x with OutputType = Exe } - | CS x -> CS { x with OutputType = Exe } + | FS x -> FS { x with OutputType = outputType } + | CS x -> CS { x with OutputType = outputType } | _ -> failwith "TODO: Implement where applicable." + let asLibrary (cUnit: CompilationUnit) : CompilationUnit = + withOutputType CompileOutput.Library cUnit + + let asExe (cUnit: CompilationUnit) : CompilationUnit = + withOutputType CompileOutput.Exe cUnit + let withPlatform (platform:ExecutionPlatform) (cUnit: CompilationUnit) : CompilationUnit = match cUnit with | FS _ -> diff --git a/tests/FSharp.Test.Utilities/ProjectGeneration.fs b/tests/FSharp.Test.Utilities/ProjectGeneration.fs index 4e2e4823908..a2abe1d67b1 100644 --- a/tests/FSharp.Test.Utilities/ProjectGeneration.fs +++ b/tests/FSharp.Test.Utilities/ProjectGeneration.fs @@ -82,7 +82,8 @@ type SyntheticProject = ProjectDir: string SourceFiles: SyntheticSourceFile list DependsOn: SyntheticProject list - RecursiveNamespace: bool } + RecursiveNamespace: bool + OtherOptions: string list } static member Create(?name: string) = let name = defaultArg name $"TestProject_{Guid.NewGuid().ToString()[..7]}" @@ -92,7 +93,8 @@ type SyntheticProject = ProjectDir = dir ++ name SourceFiles = [] DependsOn = [] - RecursiveNamespace = false } + RecursiveNamespace = false + OtherOptions = [] } static member Create([] sourceFiles: SyntheticSourceFile[]) = { SyntheticProject.Create() with SourceFiles = sourceFiles |> List.ofArray } @@ -154,7 +156,8 @@ type SyntheticProject = [| yield! baseOptions.OtherOptions "--optimize+" for p in this.DependsOn do - $"-r:{p.OutputFilename}" |] + $"-r:{p.OutputFilename}" + yield! this.OtherOptions |] ReferencedProjects = [| for p in this.DependsOn do FSharpReferencedProject.CreateFSharp(p.OutputFilename, p.GetProjectOptions checker) |] @@ -688,6 +691,23 @@ type ProjectWorkflowBuilder return ctx } + [] + member this.Compile(workflow: Async) = + async { + let! ctx = workflow + let projectOptions = ctx.Project.GetProjectOptions(checker) + let arguments = + [| + yield "fsc.exe" + yield! projectOptions.OtherOptions + yield! projectOptions.SourceFiles + |] + let! _diagnostics, exitCode = checker.Compile(arguments) + if exitCode <> 0 then + exn $"Compilation failed with exit code {exitCode}" |> raise + return ctx + } + /// Execute a set of operations on a given synthetic project. /// The project is saved to disk and type checked at the start. let projectWorkflow project = ProjectWorkflowBuilder project diff --git a/tests/benchmarks/FCSBenchmarks/CompilerServiceBenchmarks/FSharp.Compiler.Benchmarks.fsproj b/tests/benchmarks/FCSBenchmarks/CompilerServiceBenchmarks/FSharp.Compiler.Benchmarks.fsproj index 07af4baf87e..62662e1e5ab 100644 --- a/tests/benchmarks/FCSBenchmarks/CompilerServiceBenchmarks/FSharp.Compiler.Benchmarks.fsproj +++ b/tests/benchmarks/FCSBenchmarks/CompilerServiceBenchmarks/FSharp.Compiler.Benchmarks.fsproj @@ -19,6 +19,7 @@ + diff --git a/tests/benchmarks/FCSBenchmarks/CompilerServiceBenchmarks/FileCascadeBenchmarks.fs b/tests/benchmarks/FCSBenchmarks/CompilerServiceBenchmarks/FileCascadeBenchmarks.fs index accffc76c12..17822adeb96 100644 --- a/tests/benchmarks/FCSBenchmarks/CompilerServiceBenchmarks/FileCascadeBenchmarks.fs +++ b/tests/benchmarks/FCSBenchmarks/CompilerServiceBenchmarks/FileCascadeBenchmarks.fs @@ -95,8 +95,6 @@ type FileCascadeBenchmarks() = [] member val PartialCheck = true with get,set [] - member val ParaChecking = true with get,set - [] member val GenerateFSI = true with get,set member val Checker = Unchecked.defaultof with get, set @@ -104,11 +102,10 @@ type FileCascadeBenchmarks() = [] member this.Setup() = - printfn $"Running Setup(). Partial = {this.PartialCheck}, Para = {this.ParaChecking}, FSIGen = {this.GenerateFSI}" + printfn $"Running Setup(). Partial = {this.PartialCheck}, FSIGen = {this.GenerateFSI}" this.Checker <- FSharpChecker.Create( projectCacheSize = 5, - enablePartialTypeChecking = this.PartialCheck, - enableParallelCheckingWithSignatureFiles = this.ParaChecking) + enablePartialTypeChecking = this.PartialCheck) let projectFolder = Directory.CreateDirectory(Path.Combine(Directory.GetCurrentDirectory(),"CascadeBenchmarkProject")) diff --git a/tests/benchmarks/FCSBenchmarks/CompilerServiceBenchmarks/GraphTypeCheckingBenchmarks.fs b/tests/benchmarks/FCSBenchmarks/CompilerServiceBenchmarks/GraphTypeCheckingBenchmarks.fs new file mode 100644 index 00000000000..dd16e8b13a0 --- /dev/null +++ b/tests/benchmarks/FCSBenchmarks/CompilerServiceBenchmarks/GraphTypeCheckingBenchmarks.fs @@ -0,0 +1,77 @@ +module FSharp.Benchmarks.GraphTypeCheckingBenchmarks + +open System.IO +open BenchmarkDotNet.Attributes +open FSharp.Compiler.CodeAnalysis +open FSharp.Test.ProjectGeneration + +[] +let FSharpCategory = "fsharp" + +[] +[] +type GraphTypeCheckingBenchmarks() = + + let size = 250 + + let somethingToCompile = + File.ReadAllText(__SOURCE_DIRECTORY__ ++ "SomethingToCompile.fs") + + member val Benchmark = Unchecked.defaultof<_> with get, set + + [] + member val GraphTypeChecking = true with get, set + + member this.setup(project) = + let checker = FSharpChecker.Create() + this.Benchmark <- ProjectWorkflowBuilder(project, checker = checker).CreateBenchmarkBuilder() + saveProject project false checker |> Async.RunSynchronously + + // Each file depends on the previous one + [] + member this.SingleDependentChain_Setup() = + this.setup + { SyntheticProject.Create() with + SourceFiles = + [ + for i in 0..size do + { sourceFile + $"File%04d{i}" + [ + if i > 0 then + $"File%04d{i - 1}" + ] with + ExtraSource = somethingToCompile + } + ] + OtherOptions = + [ + if this.GraphTypeChecking then + "--test:GraphBasedChecking" + ] + } + + [] + member this.SingleDependentChain() = this.Benchmark { compileWithFSC } + + // No file has any dependency + [] + member this.NoDependencies_Setup() = + this.setup + { SyntheticProject.Create() with + SourceFiles = + [ + for i in 0..size do + { sourceFile $"File%04d{i}" [] with + ExtraSource = somethingToCompile + } + ] + OtherOptions = + [ + if this.GraphTypeChecking then + "--test:GraphBasedChecking" + ] + } + + [] + member this.NoDependencies() = this.Benchmark { compileWithFSC } diff --git a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs index 8b418d2c5b7..821a0ab8aad 100644 --- a/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs +++ b/vsintegration/src/FSharp.Editor/LanguageService/LanguageService.fs @@ -116,9 +116,6 @@ type internal FSharpWorkspaceServiceFactory |> Option.map f |> Option.defaultValue defaultValue - let enableParallelCheckingWithSignatureFiles = - getOption (fun options -> options.LanguageServicePerformance.EnableParallelCheckingWithSignatureFiles) false - let enableParallelReferenceResolution = getOption (fun options -> options.LanguageServicePerformance.EnableParallelReferenceResolution) false @@ -134,7 +131,6 @@ type internal FSharpWorkspaceServiceFactory keepAllBackgroundSymbolUses = false, enableBackgroundItemKeyStoreAndSemanticClassification = true, enablePartialTypeChecking = true, - enableParallelCheckingWithSignatureFiles = enableParallelCheckingWithSignatureFiles, parallelReferenceResolution = enableParallelReferenceResolution, captureIdentifiersWhenParsing = true, documentSource = (if enableLiveBuffers then DocumentSource.Custom getSource else DocumentSource.FileSystem)) diff --git a/vsintegration/src/FSharp.Editor/Options/EditorOptions.fs b/vsintegration/src/FSharp.Editor/Options/EditorOptions.fs index b782f40d245..78f751999eb 100644 --- a/vsintegration/src/FSharp.Editor/Options/EditorOptions.fs +++ b/vsintegration/src/FSharp.Editor/Options/EditorOptions.fs @@ -63,14 +63,12 @@ type LanguageServicePerformanceOptions = { EnableInMemoryCrossProjectReferences: bool AllowStaleCompletionResults: bool TimeUntilStaleCompletion: int - EnableParallelCheckingWithSignatureFiles: bool EnableParallelReferenceResolution: bool EnableFastFindReferences: bool } static member Default = { EnableInMemoryCrossProjectReferences = true AllowStaleCompletionResults = true TimeUntilStaleCompletion = 2000 // In ms, so this is 2 seconds - EnableParallelCheckingWithSignatureFiles = false EnableParallelReferenceResolution = false EnableFastFindReferences = FSharpExperimentalFeaturesEnabledAutomatically }