From 14b4882f724d5963316ccefd3661aaaec0bda1a8 Mon Sep 17 00:00:00 2001 From: Marshall Roch Date: Fri, 20 Nov 2020 07:45:53 -0800 Subject: [PATCH 01/43] [refactor] plumbing for incomplete autocomplete results Summary: LSP `CompletionList` supports an `isIncomplete` property. when false (the default), the IDE triggers a completion request when you type a trigger character and filters locally as you keep typing, without making further requests. when true, the editor continues sending completion requests as the user types the same token, allowing the server to do its own filtering (useful when there are too many results). we currently always set `false`, but this change sets up a later change. Reviewed By: vrama628 Differential Revision: D23962030 fbshipit-source-id: 90eace31ebab197117c34102e26edaca97ba411d --- src/commands/autocompleteCommand.ml | 23 +++--- src/common/flow_lsp_conversions.ml | 14 +++- src/server/command_handler/commandHandler.ml | 30 ++++--- src/server/protocol/serverProt.ml | 5 +- .../autocomplete/autocompleteService_js.ml | 78 ++++++++++--------- .../autocomplete/autocompleteService_js.mli | 2 +- 6 files changed, 84 insertions(+), 68 deletions(-) diff --git a/src/commands/autocompleteCommand.ml b/src/commands/autocompleteCommand.ml index 5be7bb09fc0..97343b92447 100644 --- a/src/commands/autocompleteCommand.ml +++ b/src/commands/autocompleteCommand.ml @@ -107,8 +107,8 @@ let autocomplete_response_to_json ~strip_root response = ("result", JSON_Array []); (* TODO: remove this? kept for BC *) ] - | Ok completions -> - let results = Base.List.map ~f:(autocomplete_result_to_json ~strip_root) completions in + | Ok { ServerProt.Response.Completion.items; is_incomplete = _ } -> + let results = Base.List.map ~f:(autocomplete_result_to_json ~strip_root) items in JSON_Object [("result", JSON_Array results)]) let main base_flags option_values json pretty root strip_root wait_for_recheck lsp args () = @@ -147,25 +147,26 @@ let main base_flags option_values json pretty root strip_root wait_for_recheck l if lsp then Base.Result.iter results - ~f: - (List.iter - ( Flow_lsp_conversions.flow_completion_to_lsp - ~is_snippet_supported:true - ~is_preselect_supported:true - %> Lsp_fmt.print_completionItem ~key:(Path.to_string root) - %> Hh_json.print_json_endline ~pretty:true )) + ~f:(fun { ServerProt.Response.Completion.items; is_incomplete = _ } -> + List.iter + ( Flow_lsp_conversions.flow_completion_item_to_lsp + ~is_snippet_supported:true + ~is_preselect_supported:true + %> Lsp_fmt.print_completionItem ~key:(Path.to_string root) + %> Hh_json.print_json_endline ~pretty:true ) + items) else if json || pretty then results |> autocomplete_response_to_json ~strip_root |> Hh_json.print_json_endline ~pretty else ( match results with | Error error -> prerr_endlinef "Error: %s" error - | Ok completions -> + | Ok { ServerProt.Response.Completion.items; is_incomplete = _ } -> List.iter (fun res -> let name = res.ServerProt.Response.Completion.name in let detail = res.ServerProt.Response.Completion.detail in print_endline (Printf.sprintf "%s %s" name detail)) - completions + items ) let command = CommandSpec.command spec main diff --git a/src/common/flow_lsp_conversions.ml b/src/common/flow_lsp_conversions.ml index 079c3650512..0dee44dc8bc 100644 --- a/src/common/flow_lsp_conversions.ml +++ b/src/common/flow_lsp_conversions.ml @@ -78,7 +78,7 @@ let flow_signature_help_to_lsp in Some { signatures; activeSignature = 0; activeParameter = active_parameter } -let flow_completion_to_lsp +let flow_completion_item_to_lsp ~is_snippet_supported:(_ : bool) ~(is_preselect_supported : bool) (item : ServerProt.Response.Completion.completion_item) : Lsp.Completion.completionItem = @@ -115,6 +115,18 @@ let flow_completion_to_lsp data = None; } +let flow_completions_to_lsp + ~(is_snippet_supported : bool) + ~(is_preselect_supported : bool) + (completions : ServerProt.Response.Completion.t) : Lsp.Completion.result = + let { ServerProt.Response.Completion.items; is_incomplete } = completions in + let items = + Base.List.map + ~f:(flow_completion_item_to_lsp ~is_snippet_supported ~is_preselect_supported) + items + in + { Lsp.Completion.isIncomplete = is_incomplete; items } + let file_key_to_uri (file_key_opt : File_key.t option) : (Lsp.DocumentUri.t, string) result = let ( >>| ) = Base.Result.( >>| ) in let ( >>= ) = Base.Result.( >>= ) in diff --git a/src/server/command_handler/commandHandler.ml b/src/server/command_handler/commandHandler.ml index d573351c260..32ee0e22044 100644 --- a/src/server/command_handler/commandHandler.ml +++ b/src/server/command_handler/commandHandler.ml @@ -156,27 +156,28 @@ let autocomplete ~trigger_character ~reader ~options ~env ~profiling ~filename ~ let (response, json_props_to_log) = let open Hh_json in match results_res with - | AcResult { results; errors_to_log } -> + | AcResult { result; errors_to_log } -> + let { ServerProt.Response.Completion.items; is_incomplete = _ } = result in let result_string = - match (results, errors_to_log) with + match (items, errors_to_log) with | (_, []) -> "SUCCESS" | ([], _ :: _) -> "FAILURE" | (_ :: _, _ :: _) -> "PARTIAL" in let at_least_one_result_has_documentation = Base.List.exists - results + items ~f:(fun ServerProt.Response.Completion.{ documentation; _ } -> Base.Option.is_some documentation) in - ( Ok results, + ( Ok result, ("result", JSON_String result_string) - :: ("count", JSON_Number (results |> List.length |> string_of_int)) + :: ("count", JSON_Number (items |> List.length |> string_of_int)) :: ("errors", JSON_Array (Base.List.map ~f:(fun s -> JSON_String s) errors_to_log)) :: ("documentation", JSON_Bool at_least_one_result_has_documentation) :: json_props_to_log ) | AcEmpty reason -> - ( Ok [], + ( Ok { ServerProt.Response.Completion.items = []; is_incomplete = false }, ("result", JSON_String "SUCCESS") :: ("count", JSON_Number "0") :: ("empty_reason", JSON_String reason) @@ -1604,17 +1605,14 @@ let handle_persistent_autocomplete_lsp let metadata = with_data ~extra_data metadata in begin match result with - | Ok items -> - let items = - Base.List.map - ~f: - (Flow_lsp_conversions.flow_completion_to_lsp - ~is_snippet_supported - ~is_preselect_supported) - items + | Ok completions -> + let result = + Flow_lsp_conversions.flow_completions_to_lsp + ~is_snippet_supported + ~is_preselect_supported + completions in - let r = CompletionResult { Lsp.Completion.isIncomplete = false; items } in - let response = ResponseMessage (id, r) in + let response = ResponseMessage (id, CompletionResult result) in Lwt.return ((), LspProt.LspFromServer (Some response), metadata) | Error reason -> mk_lsp_error_response ~ret:() ~id:(Some id) ~reason metadata end) diff --git a/src/server/protocol/serverProt.ml b/src/server/protocol/serverProt.ml index bdb6dafb2e3..c23af8d8741 100644 --- a/src/server/protocol/serverProt.ml +++ b/src/server/protocol/serverProt.ml @@ -237,7 +237,10 @@ module Response = struct documentation: string option; } - type t = completion_item list + type t = { + items: completion_item list; + is_incomplete: bool; + } end type autocomplete_response = (Completion.t, string) result diff --git a/src/services/autocomplete/autocompleteService_js.ml b/src/services/autocomplete/autocompleteService_js.ml index 4f5f3e0158e..95afca20d34 100644 --- a/src/services/autocomplete/autocompleteService_js.ml +++ b/src/services/autocomplete/autocompleteService_js.ml @@ -168,7 +168,7 @@ let ty_normalizer_options = type autocomplete_service_result = | AcResult of { - results: ServerProt.Response.Completion.t; + result: ServerProt.Response.Completion.t; errors_to_log: string list; } | AcEmpty of string @@ -433,7 +433,7 @@ let autocomplete_member with | Error err -> AcFatalError err | Ok (mems, errors_to_log) -> - let results = + let items = mems |> Base.List.map ~f:(fun (name, documentation, MemberInfo.{ ty; from_proto; from_nullable }) -> @@ -468,7 +468,8 @@ let autocomplete_member (name, ac_loc) opt_chain_ty) in - AcResult { results; errors_to_log } + let result = { ServerProt.Response.Completion.items; is_incomplete = false } in + AcResult { result; errors_to_log } (* turns typed AST into normal AST so we can run Scope_builder on it *) (* TODO(vijayramamurthy): make scope builder polymorphic *) @@ -581,10 +582,10 @@ let autocomplete_id let open ServerProt.Response.Completion in let ac_loc = loc_of_aloc ~reader ac_loc |> remove_autocomplete_token_from_loc in let exact_by_default = Context.exact_by_default cx in - let (results, errors_to_log) = + let (items, errors_to_log) = local_value_identifiers ~options ~reader ~cx ~ac_loc ~file_sig ~typed_ast ~tparams |> List.fold_left - (fun (results, errors_to_log) ((name, documentation), elt_result) -> + (fun (items, errors_to_log) ((name, documentation), elt_result) -> match elt_result with | Ok elt -> let result = @@ -595,14 +596,14 @@ let autocomplete_id (name, ac_loc) elt in - (result :: results, errors_to_log) + (result :: items, errors_to_log) | Error err -> let error_to_log = Ty_normalizer.error_to_string err in - (results, error_to_log :: errors_to_log)) + (items, error_to_log :: errors_to_log)) ([], []) in (* "this" is legal inside classes and (non-arrow) functions *) - let results = + let items = if include_this then { kind = Some Lsp.Completion.Variable; @@ -613,12 +614,12 @@ let autocomplete_id preselect = false; documentation = None; } - :: results + :: items else - results + items in (* "super" is legal inside classes *) - let results = + let items = if include_super then { kind = Some Lsp.Completion.Variable; @@ -629,11 +630,12 @@ let autocomplete_id preselect = false; documentation = None; } - :: results + :: items else - results + items in - AcResult { results; errors_to_log } + let result = { ServerProt.Response.Completion.items; is_incomplete = false } in + AcResult { result; errors_to_log } (* Similar to autocomplete_member, except that we're not directly given an object type whose members we want to enumerate: instead, we are given a @@ -668,7 +670,7 @@ let autocomplete_jsx ~reader cx file_sig typed_ast cls ac_name ~used_attr_names match mems_result with | Error err -> AcFatalError err | Ok (mems, errors_to_log) -> - let results = + let items = mems |> Base.List.map ~f:(fun (name, documentation, MemberInfo.{ ty; _ }) -> autocomplete_create_result @@ -678,7 +680,8 @@ let autocomplete_jsx ~reader cx file_sig typed_ast cls ac_name ~used_attr_names (name, ac_loc) ty) in - AcResult { results; errors_to_log } + let result = { ServerProt.Response.Completion.items; is_incomplete = false } in + AcResult { result; errors_to_log } (* TODO(vijayramamurthy) think about how to break this file down into smaller modules *) (* NOTE: excludes classes, because we'll get those from local_value_identifiers *) @@ -840,19 +843,19 @@ let autocomplete_unqualified_type ~options ~reader ~cx ~tparams ~file_sig ~ac_lo (* The value-level identifiers we suggest in type autocompletion: - classes - modules (followed by a dot) *) - let (results, errors_to_log) = + let (items, errors_to_log) = local_value_identifiers ~options ~typed_ast ~reader ~ac_loc ~tparams ~cx ~file_sig |> List.fold_left - (fun (results, errors_to_log) ((name, documentation), ty_res) -> + (fun (items, errors_to_log) ((name, documentation), ty_res) -> match ty_res with | Error err -> let error_to_log = Ty_normalizer.error_to_string err in - (results, error_to_log :: errors_to_log) + (items, error_to_log :: errors_to_log) | Ok (Ty.Decl (Ty.ClassDecl _ | Ty.EnumDecl _) as elt) -> let result = autocomplete_create_result_elt ?documentation ~exact_by_default (name, ac_loc) elt in - (result :: results, errors_to_log) + (result :: items, errors_to_log) | Ok elt when type_exports_of_module_ty ~ac_loc @@ -868,11 +871,12 @@ let autocomplete_unqualified_type ~options ~reader ~cx ~tparams ~file_sig ~ac_lo elt ~insert_text:(name ^ ".") in - (result :: results, errors_to_log) - | Ok _ -> (results, errors_to_log)) + (result :: items, errors_to_log) + | Ok _ -> (items, errors_to_log)) (tparam_and_tident_results, tparam_and_tident_errors_to_log) in - AcResult { results; errors_to_log } + let result = { ServerProt.Response.Completion.items; is_incomplete = false } in + AcResult { result; errors_to_log } let autocomplete_qualified_type ~reader ~cx ~ac_loc ~file_sig ~typed_ast ~tparams ~qtype = let ac_loc = loc_of_aloc ~reader ac_loc |> remove_autocomplete_token_from_loc in @@ -885,19 +889,15 @@ let autocomplete_qualified_type ~reader ~cx ~ac_loc ~file_sig ~typed_ast ~tparam qtype_scheme in let documentation_of_module_member = documentation_of_member ~reader ~cx ~typed_ast qtype in - match module_ty_res with - | Error err -> AcResult { results = []; errors_to_log = [Ty_normalizer.error_to_string err] } - | Ok module_ty -> - AcResult - { - results = - type_exports_of_module_ty - ~ac_loc - ~exact_by_default - ~documentation_of_module_member - module_ty; - errors_to_log = []; - } + let (items, errors_to_log) = + match module_ty_res with + | Error err -> ([], [Ty_normalizer.error_to_string err]) + | Ok module_ty -> + ( type_exports_of_module_ty ~ac_loc ~exact_by_default ~documentation_of_module_member module_ty, + [] ) + in + AcResult + { result = { ServerProt.Response.Completion.items; is_incomplete = false }; errors_to_log } let autocomplete_get_results ~options ~reader ~cx ~file_sig ~typed_ast trigger_character cursor = let file_sig = File_sig.abstractify_locs file_sig in @@ -912,7 +912,8 @@ let autocomplete_get_results ~options ~reader ~cx ~file_sig ~typed_ast trigger_c ("Acmodule", AcEmpty "Module") | Some (_, _, Ackey) -> (* TODO: complete object keys based on their upper bounds *) - ("Ackey", AcResult { results = []; errors_to_log = [] }) + let result = { ServerProt.Response.Completion.items = []; is_incomplete = false } in + ("Ackey", AcResult { result; errors_to_log = [] }) | Some (tparams, ac_loc, Acid { include_super; include_this }) -> ( "Acid", autocomplete_id @@ -947,4 +948,5 @@ let autocomplete_get_results ~options ~reader ~cx ~file_sig ~typed_ast trigger_c ( "Acqualifiedtype", autocomplete_qualified_type ~reader ~cx ~ac_loc ~file_sig ~typed_ast ~tparams ~qtype ) | None -> - ("None", AcResult { results = []; errors_to_log = ["Autocomplete token not found in AST"] }) + let result = { ServerProt.Response.Completion.items = []; is_incomplete = false } in + ("None", AcResult { result; errors_to_log = ["Autocomplete token not found in AST"] }) diff --git a/src/services/autocomplete/autocompleteService_js.mli b/src/services/autocomplete/autocompleteService_js.mli index 4414fe55c7a..dd58cce30ca 100644 --- a/src/services/autocomplete/autocompleteService_js.mli +++ b/src/services/autocomplete/autocompleteService_js.mli @@ -7,7 +7,7 @@ type autocomplete_service_result = | AcResult of { - results: ServerProt.Response.Completion.t; + result: ServerProt.Response.Completion.t; errors_to_log: string list; } | AcEmpty of string From 729ba66e3153316e425289b964358d57f55689c9 Mon Sep 17 00:00:00 2001 From: Mike Vitousek Date: Fri, 20 Nov 2020 13:12:50 -0800 Subject: [PATCH 02/43] [new-generics] Enable new generics by default Summary: This diff will make new generics the default way to check generic definitions and disable generate-tests unless explicitly requested in a flowconfig or CLI flag. Reviewed By: dsainati1 Differential Revision: D24310377 fbshipit-source-id: f008f50858985edc8fec3036d958790078628150 --- src/commands/config/flowConfig.ml | 2 +- src/flow_dot_js.ml | 2 +- src/typing/__tests__/typed_ast_test.ml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/commands/config/flowConfig.ml b/src/commands/config/flowConfig.ml index 4a2fbe2b79f..10aba586b2f 100644 --- a/src/commands/config/flowConfig.ml +++ b/src/commands/config/flowConfig.ml @@ -163,7 +163,7 @@ module Opts = struct enums_with_unknown_members = false; this_annot = false; exact_by_default = false; - generate_tests = true; + generate_tests = false; facebook_fbs = None; facebook_fbt = None; facebook_module_interop = false; diff --git a/src/flow_dot_js.ml b/src/flow_dot_js.ml index 316fd5b9b9e..2128df88532 100644 --- a/src/flow_dot_js.ml +++ b/src/flow_dot_js.ml @@ -155,7 +155,7 @@ let stub_metadata ~root ~checked = enforce_local_inference_annotations = false; enforce_strict_call_arity = true; exact_by_default = false; - generate_tests = true; + generate_tests = false; facebook_fbs = None; facebook_fbt = None; facebook_module_interop = false; diff --git a/src/typing/__tests__/typed_ast_test.ml b/src/typing/__tests__/typed_ast_test.ml index acd1f727111..e48ca5599e2 100644 --- a/src/typing/__tests__/typed_ast_test.ml +++ b/src/typing/__tests__/typed_ast_test.ml @@ -30,7 +30,7 @@ let metadata = enforce_strict_call_arity = true; enforce_local_inference_annotations = false; exact_by_default = false; - generate_tests = true; + generate_tests = false; facebook_fbs = None; facebook_fbt = None; facebook_module_interop = false; From 1b26fce654eb836833c0464b111c9028ffec1f1f Mon Sep 17 00:00:00 2001 From: Panagiotis Vekris Date: Fri, 20 Nov 2020 13:59:07 -0800 Subject: [PATCH 03/43] [easy] keep EvalT rules together Summary: keep EvalT rules together in the Flow_js pattern match Reviewed By: samwgoldman Differential Revision: D24924366 fbshipit-source-id: 4e617691f4896a6dfd4ef228a098a4ded00e373f --- src/typing/flow_js.ml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/typing/flow_js.ml b/src/typing/flow_js.ml index 41be3fdad8c..07f7e42534e 100644 --- a/src/typing/flow_js.ml +++ b/src/typing/flow_js.ml @@ -977,6 +977,8 @@ struct rec_flow cx trace (l, UseT (use_op, result)) | (EvalT (t, LatentPredT (reason, p), i), _) -> rec_flow cx trace (eval_latent_pred cx ~trace reason t p i, u) + | (_, UseT (use_op, EvalT (t, LatentPredT (reason, p), i))) -> + rec_flow cx trace (l, UseT (use_op, eval_latent_pred cx ~trace reason t p i)) (******************) (* process X ~> Y *) (******************) @@ -1043,11 +1045,6 @@ struct (*****************) | (_, UseT (_, MergedT (_, uses))) -> List.iter (fun u -> rec_flow cx trace (l, u)) uses | (MergedT (reason, _), _) -> rec_flow cx trace (Unsoundness.why Merged reason, u) - (****************) - (* eval, contd. *) - (****************) - | (_, UseT (use_op, EvalT (t, LatentPredT (reason, p), i))) -> - rec_flow cx trace (l, UseT (use_op, eval_latent_pred cx ~trace reason t p i)) (***************************) (* type destructor trigger *) (***************************) From 92dad73bbc64540dcedecf6ad4589634544b62ea Mon Sep 17 00:00:00 2001 From: Marshall Roch Date: Mon, 23 Nov 2020 04:49:42 -0800 Subject: [PATCH 04/43] [refactor] autocomplete text_edits -> text_edit Summary: an autocomplete result can return multiple text edits. for example, it obviously has to insert the completion, but it can also make other changes like inserting an `import`. currently, the `text_edits` function takes a single edit and returns it as the entire list. instead, it should just return one so we can call it multiple times. Reviewed By: vrama628 Differential Revision: D25122138 fbshipit-source-id: 0a17d6f8bcc03c09e0fff6661333d02bce770617 --- .../autocomplete/autocompleteService_js.ml | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/services/autocomplete/autocompleteService_js.ml b/src/services/autocomplete/autocompleteService_js.ml index 95afca20d34..c0ae555c504 100644 --- a/src/services/autocomplete/autocompleteService_js.ml +++ b/src/services/autocomplete/autocompleteService_js.ml @@ -83,15 +83,15 @@ let lsp_completion_of_decl = let sort_text_of_rank rank = Some (Printf.sprintf "%020u" rank) -let text_edits ?insert_text (name, loc) = +let text_edit ?insert_text (name, loc) = let newText = Base.Option.value ~default:name insert_text in - [(loc, newText)] + (loc, newText) let autocomplete_create_result ?insert_text ?(rank = 0) ?(preselect = false) ?documentation ~exact_by_default (name, loc) ty = let detail = Ty_printer.string_of_t_single_line ~with_comments:false ~exact_by_default ty in let kind = lsp_completion_of_type ty in - let text_edits = text_edits ?insert_text (name, loc) in + let text_edits = [text_edit ?insert_text (name, loc)] in let sort_text = sort_text_of_rank rank in { ServerProt.Response.Completion.kind; @@ -116,7 +116,7 @@ let autocomplete_create_result_decl ( Some (lsp_completion_of_decl d), Ty_printer.string_of_decl_single_line ~with_comments:false ~exact_by_default d ) in - let text_edits = text_edits ?insert_text (name, loc) in + let text_edits = [text_edit ?insert_text (name, loc)] in let sort_text = sort_text_of_rank rank in { ServerProt.Response.Completion.kind; @@ -609,7 +609,7 @@ let autocomplete_id kind = Some Lsp.Completion.Variable; name = "this"; detail = "this"; - text_edits = text_edits ("this", ac_loc); + text_edits = [text_edit ("this", ac_loc)]; sort_text = sort_text_of_rank 0; preselect = false; documentation = None; @@ -625,7 +625,7 @@ let autocomplete_id kind = Some Lsp.Completion.Variable; name = "super"; detail = "super"; - text_edits = text_edits ("super", ac_loc); + text_edits = [text_edit ("super", ac_loc)]; sort_text = sort_text_of_rank 0; preselect = false; documentation = None; @@ -758,7 +758,7 @@ let type_exports_of_module_ty ~ac_loc ~exact_by_default ~documentation_of_module { kind = lsp_completion_of_type t; name = sym_name; - text_edits = text_edits (sym_name, ac_loc); + text_edits = [text_edit (sym_name, ac_loc)]; detail = Ty_printer.string_of_decl_single_line ~exact_by_default d; sort_text = None; preselect = false; @@ -769,7 +769,7 @@ let type_exports_of_module_ty ~ac_loc ~exact_by_default ~documentation_of_module { kind = Some Lsp.Completion.Interface; name = sym_name; - text_edits = text_edits (sym_name, ac_loc); + text_edits = [text_edit (sym_name, ac_loc)]; detail = Ty_printer.string_of_decl_single_line ~exact_by_default d; sort_text = None; preselect = false; @@ -780,7 +780,7 @@ let type_exports_of_module_ty ~ac_loc ~exact_by_default ~documentation_of_module { kind = Some Lsp.Completion.Class; name = sym_name; - text_edits = text_edits (sym_name, ac_loc); + text_edits = [text_edit (sym_name, ac_loc)]; detail = Ty_printer.string_of_decl_single_line ~exact_by_default d; sort_text = None; preselect = false; @@ -791,7 +791,7 @@ let type_exports_of_module_ty ~ac_loc ~exact_by_default ~documentation_of_module { kind = Some Lsp.Completion.Enum; name = sym_name; - text_edits = text_edits (sym_name, ac_loc); + text_edits = [text_edit (sym_name, ac_loc)]; detail = Ty_printer.string_of_decl_single_line ~exact_by_default d; sort_text = None; preselect = false; @@ -814,7 +814,7 @@ let autocomplete_unqualified_type ~options ~reader ~cx ~tparams ~file_sig ~ac_lo kind = Some Lsp.Completion.TypeParameter; name; detail = name; - text_edits = text_edits (name, ac_loc); + text_edits = [text_edit (name, ac_loc)]; sort_text = sort_text_of_rank 0; preselect = false; documentation = None; From b969691a74009d38bbe63bde3f265b197f841dca Mon Sep 17 00:00:00 2001 From: Sam Goldman Date: Tue, 1 Dec 2020 15:40:56 -0800 Subject: [PATCH 05/43] Remove unused Stats module Summary: I think this is used in Hack, but we never use it. We accumulate some information about the max (shared) heap size, but never log it anywhere. Reviewed By: jbrown215 Differential Revision: D25254509 fbshipit-source-id: 07a79c62e80a0019925861ba923ebd47eff656f5 --- src/hack_forked/utils/core/stats.ml | 37 ---------------------------- src/hack_forked/utils/core/stats.mli | 27 -------------------- src/heap/sharedMem.ml | 1 - 3 files changed, 65 deletions(-) delete mode 100644 src/hack_forked/utils/core/stats.ml delete mode 100644 src/hack_forked/utils/core/stats.mli diff --git a/src/hack_forked/utils/core/stats.ml b/src/hack_forked/utils/core/stats.ml deleted file mode 100644 index 867f4119419..00000000000 --- a/src/hack_forked/utils/core/stats.ml +++ /dev/null @@ -1,37 +0,0 @@ -(* - * Copyright (c) Facebook, Inc. and its affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -(* Not all stats are worth logging for every user. Things like the initial heap - * size are pretty deterministic if you know the input (i.e. the files being - * checked). In fact, it's *only* useful information if you know the input. - * This file is for storing these types of stats: Things that would be useful - * for a benchmark script to know, so it can say "for these inputs, under these - * conditions, here's how hh_server behaves". - *) -type t = { - mutable init_parsing_heap_size: int; - mutable init_heap_size: int; - mutable max_heap_size: int; - gc_stat: Gc.stat; -} - -let stats : t = - { init_parsing_heap_size = 0; init_heap_size = 0; max_heap_size = 0; gc_stat = Gc.quick_stat () } - -let get_stats () = { stats with gc_stat = Gc.quick_stat () } - -let update_max_heap_size x = stats.max_heap_size <- max stats.max_heap_size x - -let to_json stats = - Hh_json.JSON_Object - [ - ("init_parsing_heap_size", Hh_json.int_ stats.init_parsing_heap_size); - ("init_shared_heap_size", Hh_json.int_ stats.init_heap_size); - ("max_shared_heap_size", Hh_json.int_ stats.max_heap_size); - ("master_heap_words", Hh_json.int_ stats.gc_stat.Gc.heap_words); - ("master_top_heap_words", Hh_json.int_ stats.gc_stat.Gc.top_heap_words); - ] diff --git a/src/hack_forked/utils/core/stats.mli b/src/hack_forked/utils/core/stats.mli deleted file mode 100644 index 2e0056b5e16..00000000000 --- a/src/hack_forked/utils/core/stats.mli +++ /dev/null @@ -1,27 +0,0 @@ -(* - * Copyright (c) Facebook, Inc. and its affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -(* This `.mli` file was generated automatically. It may include extra - definitions that should not actually be exposed to the caller. If you notice - that this interface file is a poor interface, please take a few minutes to - clean it up manually, and then delete this comment once the interface is in - shape. *) - -type t = { - mutable init_parsing_heap_size: int; - mutable init_heap_size: int; - mutable max_heap_size: int; - gc_stat: Gc.stat; -} - -val stats : t - -val get_stats : unit -> t - -val update_max_heap_size : int -> unit - -val to_json : t -> Hh_json.json diff --git a/src/heap/sharedMem.ml b/src/heap/sharedMem.ml index f3035e5510d..b86250505ee 100644 --- a/src/heap/sharedMem.ml +++ b/src/heap/sharedMem.ml @@ -119,7 +119,6 @@ let should_collect (effort : [ `gentle | `aggressive | `always_TEST ]) = let collect (effort : [ `gentle | `aggressive | `always_TEST ]) = let old_size = heap_size () in - Stats.update_max_heap_size old_size; let start_t = Unix.gettimeofday () in (* The wrapper is used to run the function in a worker instead of master. *) if should_collect effort then hh_collect (); From da128da6532dd08a4d96e5c9b22be9679a768884 Mon Sep 17 00:00:00 2001 From: Marshall Roch Date: Wed, 2 Dec 2020 11:56:50 -0800 Subject: [PATCH 06/43] log type-at-pos FAILURE_UNPARSABLE error reason Reviewed By: gkz Differential Revision: D25277747 fbshipit-source-id: 900441e9635511cc044061d30dfc1e85d30778e4 --- src/services/type_info/type_info_service.ml | 44 +++++++++++++-------- 1 file changed, 28 insertions(+), 16 deletions(-) diff --git a/src/services/type_info/type_info_service.ml b/src/services/type_info/type_info_service.ml index f60f0a2defc..4b3929a567a 100644 --- a/src/services/type_info/type_info_service.ml +++ b/src/services/type_info/type_info_service.ml @@ -9,6 +9,14 @@ open Base.Result let ( >|= ) = Lwt.( >|= ) +let json_data_of_result str acc = ("result", Hh_json.JSON_String str) :: acc + +let json_data_of_error str acc = ("error", Hh_json.JSON_String str) :: acc + +let json_data_of_loc loc acc = ("loc", Reason.json_of_loc ~offset_table:None loc) :: acc + +let json_data_of_type str acc = ("type", Hh_json.JSON_String str) :: acc + let type_at_pos ~cx ~file_sig @@ -23,13 +31,6 @@ let type_at_pos col = let loc = Loc.cursor (Some file) line col in let (json_data, loc, ty) = - let mk_data result_str loc ty_json = - [ - ("result", Hh_json.JSON_String result_str); - ("loc", Reason.json_of_loc ~offset_table:None loc); - ("type", ty_json); - ] - in Query_types.( let file = Context.file cx in let result = @@ -46,16 +47,27 @@ let type_at_pos loc in match result with - | FailureNoMatch -> ([("result", Hh_json.JSON_String "FAILURE_NO_MATCH")], Loc.none, None) - | FailureUnparseable (loc, gt, _) -> - let json = Hh_json.JSON_String (Type.string_of_ctor gt) in - (mk_data "FAILURE_UNPARSEABLE" loc json, loc, None) + | FailureNoMatch -> (json_data_of_result "FAILURE_NO_MATCH" [], Loc.none, None) + | FailureUnparseable (loc, gt, msg) -> + let json_data = + [] + |> json_data_of_result "FAILURE_UNPARSEABLE" + |> json_data_of_error msg + |> json_data_of_loc loc + |> json_data_of_type (Type.string_of_ctor gt) + in + (json_data, loc, None) | Success (loc, ty) -> - (* TODO use Ty_debug.json_of_t after making it faster using - count_calls *) - let exact_by_default = Context.exact_by_default cx in - let json = Hh_json.JSON_String (Ty_printer.string_of_elt ~exact_by_default ty) in - (mk_data "SUCCESS" loc json, loc, Some ty)) + let json_data = + [] + |> json_data_of_result "SUCCESS" + |> json_data_of_loc loc + |> json_data_of_type + ((* TODO use Ty_debug.json_of_t after making it faster using count_calls *) + let exact_by_default = Context.exact_by_default cx in + Ty_printer.string_of_elt ~exact_by_default ty) + in + (json_data, loc, Some ty)) in ((loc, ty), json_data) From 5cc33107ce16552055f5d265b2a5de245575ff37 Mon Sep 17 00:00:00 2001 From: Marshall Roch Date: Wed, 2 Dec 2020 15:16:04 -0800 Subject: [PATCH 07/43] use record for type_sig type Summary: moves `Parsing_heaps.type_sig` to `Packed_type_sig.t` and turned it into a record. I'm planning to pass this around to a bunch of recursive functions, where each one needs one or two parts of the tuple, and it is quite unwieldy to keep doing `let (_, _, _, foo, _, _) = type_sig in` in each function. I moved it from `Parsing_heaps` to its own module in `type_sig` to avoid having to depend on `heap` unnecessarily. Reviewed By: samwgoldman Differential Revision: D25250927 fbshipit-source-id: e28709539bbc7f7f0770297cfb99a8f201b4c60b --- .../type_sig/__tests__/type_sig_tests.ml | 13 +++++++++++-- src/parser_utils/type_sig/packed_type_sig.ml | 16 ++++++++++++++++ src/parser_utils/type_sig/type_sig_utils.ml | 2 +- src/parsing/parsing_service_js.ml | 2 +- src/services/inference/dep_service.ml | 2 +- src/services/inference/merge_service.ml | 10 +++++++++- src/state/heaps/parsing/parsing_heaps.ml | 15 ++++----------- src/state/heaps/parsing/parsing_heaps.mli | 13 +++---------- 8 files changed, 46 insertions(+), 27 deletions(-) create mode 100644 src/parser_utils/type_sig/packed_type_sig.ml diff --git a/src/parser_utils/type_sig/__tests__/type_sig_tests.ml b/src/parser_utils/type_sig/__tests__/type_sig_tests.ml index 8cab4e004f0..09352f19179 100644 --- a/src/parser_utils/type_sig/__tests__/type_sig_tests.ml +++ b/src/parser_utils/type_sig/__tests__/type_sig_tests.ml @@ -120,8 +120,17 @@ let pp_errors pp_loc fmt errs = let pp_sig fmt - (errs, locs, (exports, export_def, module_refs, local_defs, remote_refs, pattern_defs, patterns)) - = + ( errs, + locs, + { + Packed_type_sig.exports; + export_def; + module_refs; + local_defs; + remote_refs; + pattern_defs; + patterns; + } ) = let open Format in let pp_loc = mk_pp_loc locs in pp_exports pp_loc fmt exports; diff --git a/src/parser_utils/type_sig/packed_type_sig.ml b/src/parser_utils/type_sig/packed_type_sig.ml new file mode 100644 index 00000000000..11995e0978c --- /dev/null +++ b/src/parser_utils/type_sig/packed_type_sig.ml @@ -0,0 +1,16 @@ +(* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type 'loc t = { + exports: 'loc Type_sig_pack.exports; + export_def: 'loc Type_sig_pack.packed option; + module_refs: string Type_sig_collections.Module_refs.t; + local_defs: 'loc Type_sig_pack.packed_def Type_sig_collections.Local_defs.t; + remote_refs: 'loc Type_sig_pack.remote_ref Type_sig_collections.Remote_refs.t; + pattern_defs: 'loc Type_sig_pack.packed Type_sig_collections.Pattern_defs.t; + patterns: 'loc Type_sig_pack.pattern Type_sig_collections.Patterns.t; +} diff --git a/src/parser_utils/type_sig/type_sig_utils.ml b/src/parser_utils/type_sig/type_sig_utils.ml index 9b9d86f6a7d..f0977ff1949 100644 --- a/src/parser_utils/type_sig/type_sig_utils.ml +++ b/src/parser_utils/type_sig/type_sig_utils.ml @@ -99,7 +99,7 @@ let pack (locs, file_loc, (tbls, exports)) = let pattern_defs = Pattern_defs.copy (Pack.pack_parsed cx) pattern_defs in let patterns = Patterns.copy Pack.pack_pattern patterns in let exports, export_def = Pack.pack_exports cx file_loc exports in - cx.Pack.errs, locs, (exports, export_def, module_refs, local_defs, remote_refs, pattern_defs, patterns) + cx.Pack.errs, locs, { Packed_type_sig.exports; export_def; module_refs; local_defs; remote_refs; pattern_defs; patterns } let parse_and_pack_module ~strict opts source ast = pack (parse_module ~strict source opts ast) diff --git a/src/parsing/parsing_service_js.ml b/src/parsing/parsing_service_js.ml index a462a785904..b854adb981e 100644 --- a/src/parsing/parsing_service_js.ml +++ b/src/parsing/parsing_service_js.ml @@ -514,7 +514,7 @@ let do_parse ~parse_options ~info content file = let env = ref SMap.empty in let () = let open Type_sig in - let (_, _, _, local_defs, _, _, _) = type_sig in + let { Packed_type_sig.local_defs; _ } = type_sig in let f def = let name = def_name def in let loc = def_id_loc def in diff --git a/src/services/inference/dep_service.ml b/src/services/inference/dep_service.ml index e3a9e210cc1..bcdd8037b4a 100644 --- a/src/services/inference/dep_service.ml +++ b/src/services/inference/dep_service.ml @@ -219,7 +219,7 @@ let file_dependencies ~options ~audit ~reader file = let sig_file_sig = Parsing_heaps.Mutator_reader.get_sig_file_sig_unsafe reader file in File_sig.With_ALoc.(require_set sig_file_sig.module_sig) | Options.TypesFirst { new_signatures = true } -> - let (_, _, mrefs, _, _, _, _) = + let { Packed_type_sig.module_refs = mrefs; _ } = Parsing_heaps.Mutator_reader.get_type_sig_unsafe reader file in let acc = ref SSet.empty in diff --git a/src/services/inference/merge_service.ml b/src/services/inference/merge_service.ml index f1e299f8641..09cf10a0fb3 100644 --- a/src/services/inference/merge_service.ml +++ b/src/services/inference/merge_service.ml @@ -290,7 +290,15 @@ let merge_context_new_signatures ~options ~reader component = else ALoc.of_loc (ALoc.to_loc aloc_table aloc) in - let (exports, export_def, module_refs, local_defs, remote_refs, pattern_defs, patterns) = + let { + Packed_type_sig.exports; + export_def; + module_refs; + local_defs; + remote_refs; + pattern_defs; + patterns; + } = Parsing_heaps.Reader_dispatcher.get_type_sig_unsafe ~reader file in let dependencies = diff --git a/src/state/heaps/parsing/parsing_heaps.ml b/src/state/heaps/parsing/parsing_heaps.ml index 03836685356..b2f899d7cc5 100644 --- a/src/state/heaps/parsing/parsing_heaps.ml +++ b/src/state/heaps/parsing/parsing_heaps.ml @@ -36,20 +36,13 @@ module SigASTALocTableHeap = let description = "ALocTable" end) -type 'loc type_sig = - 'loc Type_sig_pack.exports - * 'loc Type_sig_pack.packed option - * string Type_sig_collections.Module_refs.t - * 'loc Type_sig_pack.packed_def Type_sig_collections.Local_defs.t - * 'loc Type_sig_pack.remote_ref Type_sig_collections.Remote_refs.t - * 'loc Type_sig_pack.packed Type_sig_collections.Pattern_defs.t - * 'loc Type_sig_pack.pattern Type_sig_collections.Patterns.t +type type_sig = Type_sig_collections.Locs.index Packed_type_sig.t module TypeSigHeap = SharedMem_js.NoCache (File_key) (struct - type t = Type_sig_collections.Locs.index type_sig + type t = type_sig let description = "TypeSig" end) @@ -169,7 +162,7 @@ type sig_extra = sig_file_sig: File_sig.With_ALoc.t; aloc_table: ALoc.table option; } - | TypeSig of Type_sig_collections.Locs.index type_sig * ALoc.table + | TypeSig of type_sig * ALoc.table (* Groups operations on the multiple heaps that need to stay in sync *) module ParsingHeaps = struct @@ -250,7 +243,7 @@ module type READER = sig val get_sig_file_sig_unsafe : reader:reader -> File_key.t -> File_sig.With_ALoc.t - val get_type_sig_unsafe : reader:reader -> File_key.t -> Type_sig_collections.Locs.index type_sig + val get_type_sig_unsafe : reader:reader -> File_key.t -> type_sig val get_file_hash_unsafe : reader:reader -> File_key.t -> Xx.hash end diff --git a/src/state/heaps/parsing/parsing_heaps.mli b/src/state/heaps/parsing/parsing_heaps.mli index e02db1cfbb0..f1520ca073a 100644 --- a/src/state/heaps/parsing/parsing_heaps.mli +++ b/src/state/heaps/parsing/parsing_heaps.mli @@ -5,14 +5,7 @@ * LICENSE file in the root directory of this source tree. *) -type 'loc type_sig = - 'loc Type_sig_pack.exports - * 'loc Type_sig_pack.packed option - * string Type_sig_collections.Module_refs.t - * 'loc Type_sig_pack.packed_def Type_sig_collections.Local_defs.t - * 'loc Type_sig_pack.remote_ref Type_sig_collections.Remote_refs.t - * 'loc Type_sig_pack.packed Type_sig_collections.Pattern_defs.t - * 'loc Type_sig_pack.pattern Type_sig_collections.Patterns.t +type type_sig = Type_sig_collections.Locs.index Packed_type_sig.t module type READER = sig type reader @@ -41,7 +34,7 @@ module type READER = sig val get_sig_file_sig_unsafe : reader:reader -> File_key.t -> File_sig.With_ALoc.t - val get_type_sig_unsafe : reader:reader -> File_key.t -> Type_sig_collections.Locs.index type_sig + val get_type_sig_unsafe : reader:reader -> File_key.t -> type_sig val get_file_hash_unsafe : reader:reader -> File_key.t -> Xx.hash end @@ -63,7 +56,7 @@ type sig_extra = sig_file_sig: File_sig.With_ALoc.t; aloc_table: ALoc.table option; } - | TypeSig of Type_sig_collections.Locs.index type_sig * ALoc.table + | TypeSig of type_sig * ALoc.table (* For use by a worker process *) type worker_mutator = { From 32cd3cd15ef319cada739d73b9619184b49de9e0 Mon Sep 17 00:00:00 2001 From: Jordan Brown Date: Wed, 2 Dec 2020 18:49:38 -0800 Subject: [PATCH 08/43] Fix implicit instantiation infinite recursion with _ Summary: Implicit instantiation has known methods for introducing non-termination. The comment in flow_js.ml just above the modifications made by this diff explain it. We introduced an implicit instantiation cache in order to cut constraint cycles that lead to non-termination. Prior to `_`, there was no known way of introducing non-termination via explicitly instantiated type arguments in a function call. `_` is equivalent to implicit instantiation, which necessitates the use of the cache in the explicitly instantiated function calls as well. This diff adds that caching behavior and includes a test that hits a stack overflow when the caching mechanism is not present. The implicit instantiation cache relies heavily on reasons. The changed test behavior is due exactly to a reason desc changing because it goes through the cache. Piping through the necessary information to keep that specific reason_desc around would keep the behavior as-is. However, that error message only pops up in a `classic` codebase that uses `_` in an input position, which is highly unlikely to appear now that types-first is the default. Given that this special behavior is not really needed anymore, I opted to remove it instead of keeping it around. Reviewed By: panagosg7 Differential Revision: D25034431 fbshipit-source-id: baa99907eb130e6b624c81220512a4cd8ab12689 --- src/typing/errors/error_message.ml | 6 ---- src/typing/flow_js.ml | 31 +++++++++++++------ .../implicit_instantiation.exp | 13 ++++++-- .../implicit_instantiation_underscore.js | 26 ++++++++++++++++ 4 files changed, 57 insertions(+), 19 deletions(-) create mode 100644 tests/typeapp_perf/implicit_instantiation_underscore.js diff --git a/src/typing/errors/error_message.ml b/src/typing/errors/error_message.ml index 8a39b21408f..e58f5cd1f86 100644 --- a/src/typing/errors/error_message.ml +++ b/src/typing/errors/error_message.ml @@ -2228,12 +2228,6 @@ let friendly_message_of_msg : Loc.t t' -> Loc.t friendly_message_recipe = let default = [text "Missing type annotation for "; desc reason; text "."] in let features = match desc_of_reason reason with - | RTypeParam (_, (RImplicitInstantiation, _), _) -> - [ - text "Please use a concrete type annotation instead of "; - code "_"; - text " in this position."; - ] | RTypeParam (_, (reason_op_desc, reason_op_loc), (reason_tapp_desc, reason_tapp_loc)) -> let reason_op = mk_reason reason_op_desc reason_op_loc in let reason_tapp = mk_reason reason_tapp_desc reason_tapp_loc in diff --git a/src/typing/flow_js.ml b/src/typing/flow_js.ml index 07f7e42534e..1a392a3b246 100644 --- a/src/typing/flow_js.ml +++ b/src/typing/flow_js.ml @@ -4056,16 +4056,16 @@ struct without fearing regressions in termination guarantees. *) | CallT (use_op, _, calltype) when not (is_typemap_reason reason_op) -> + let arg_reasons = + Base.List.map + ~f:(function + | Arg t -> reason_of_t t + | SpreadArg t -> reason_of_t t) + calltype.call_args_tlist + in begin match calltype.call_targs with | None -> - let arg_reasons = - Base.List.map - ~f:(function - | Arg t -> reason_of_t t - | SpreadArg t -> reason_of_t t) - calltype.call_args_tlist - in let t_ = instantiate_poly cx @@ -4087,6 +4087,7 @@ struct ~use_op ~reason_op ~reason_tapp + ~cache:arg_reasons in rec_flow cx @@ -8847,20 +8848,30 @@ struct | [] -> ([], ts) | ExplicitArg t :: targs -> (targs, t :: ts) | ImplicitArg (r, id) :: targs -> + (* `_` can introduce non-termination, just like omitting type arguments + * can. In order to protect against that non-termination we use cache_instantiate. + * Instead of letting instantiate_poly do that for us on every type argument, we + * do it ourselves here so that explicit type arguments do not have their reasons + * needlessly changed. Note that the ImplicitTypeParam reason that cache instatiations + * introduce can also change the use_op in a flow. In the NumT ~> StrT case, + * this can make meaningful differences in type checking behavior. Ensuring that + * the use_op/reason change happens _only_ on actually implicitly instantiated + * type variables helps preserve the correct type checking behavior. *) let reason = mk_reason RImplicitInstantiation (aloc_of_reason r) in let t = ImplicitTypeArgument.mk_targ cx typeparam reason reason_tapp in - rec_flow_t cx trace ~use_op (t, OpenT (r, id)); - (targs, t :: ts)) + let t_ = cache_instantiate cx trace ~use_op ?cache typeparam reason_op reason_tapp t in + rec_flow_t cx trace ~use_op (t_, OpenT (r, id)); + (targs, t_ :: ts)) (targs, []) xs in + (* Intentionally omit `cache`, which is handled above *) instantiate_poly_with_targs cx trace ~use_op ~reason_op ~reason_tapp - ?cache ?errs_ref (tparams_loc, xs, t) (List.rev ts) diff --git a/tests/implicit_instantiation/implicit_instantiation.exp b/tests/implicit_instantiation/implicit_instantiation.exp index 863dd55211e..cba6f10f23c 100644 --- a/tests/implicit_instantiation/implicit_instantiation.exp +++ b/tests/implicit_instantiation/implicit_instantiation.exp @@ -49,12 +49,19 @@ References: ^^^^^^ [2] -Error ---------------------------------------------------------------------------------------------------- test.js:22:22 +Error ----------------------------------------------------------------------------------------------------- test.js:22:6 -Please use a concrete type annotation instead of `_` in this position. [missing-annot] +Missing type annotation for `T`. `T` is a type parameter declared in function type [1] and was implicitly instantiated +at call of `unimplementable` [2]. [missing-annot] + test.js:22:6 22| x: unimplementable<_>(), // Error, requires concrete annot - ^ + ^^^^^^^^^^^^^^^^^^^^ [2] + +References: + test.js:7:33 + 7| declare function unimplementable(): {x: T}; + ^^^^^^^^^^^^^ [1] diff --git a/tests/typeapp_perf/implicit_instantiation_underscore.js b/tests/typeapp_perf/implicit_instantiation_underscore.js new file mode 100644 index 00000000000..09028c620c7 --- /dev/null +++ b/tests/typeapp_perf/implicit_instantiation_underscore.js @@ -0,0 +1,26 @@ +//@flow + +// This test exercises the instantiation cache when explicit type arguments are provided. +// Prior to the introduction of `_`, there were no known ways to introduce non-termination +// when explicit type arguments are supplied. Since `_` is equivalent to omitting the type +// arguments entirely, this introduces the full space of non-termination issues to cals +// with explicit type arguments. + +export type Obj = $ReadOnly<{||}>; + +declare class ImmutableMap { + static (): ImmutableMap; + update(key: K, updater: (value: V) => V_): ImmutableMap; +}; + +type Props = { + items: $ReadOnlyArray, + stringOfObj: Obj => ?string, +}; + +declare var props: Props; + +const groups = props.items.reduce((map, item) => { + const group = props.stringOfObj(item); + return map.update<_>(group, items => items.push(item)); +}, ImmutableMap()); From 3794570c42cfa96b7a7730de511691828e6219b7 Mon Sep 17 00:00:00 2001 From: Jordan Brown Date: Wed, 2 Dec 2020 18:49:38 -0800 Subject: [PATCH 09/43] Add flag for post-inference implicit instantiation experimentation Summary: I plan to experiment with an implicit instantiation algorithm that pins down types instead of leaving unresolved OpenTs in the result. This is not possible during inference in Flow today and must take place in a post-inference pass, where we know that every annotation in a function signature is resolved and inspectable. In order to allow myself to test my code and to avoid any performance complications that it may present, I am going to keep this experiment behind a flag. The diffs in the rest of the stack will set up data structures and helper functions necessary to implement the implicit instantiation algorithm described by Pierce in his 00' paper. Reviewed By: panagosg7 Differential Revision: D24734340 fbshipit-source-id: 9481e4c49eff0fc2c31a050f4047ddc1932eb61d --- src/commands/commandUtils.ml | 2 ++ src/commands/config/flowConfig.ml | 10 ++++++++++ src/commands/config/flowConfig.mli | 2 ++ src/common/options.ml | 4 ++++ src/flow_dot_js.ml | 1 + src/typing/__tests__/typed_ast_test.ml | 1 + src/typing/context.ml | 6 ++++++ src/typing/context.mli | 3 +++ .../post_inference_implicit_instantiation/.flowconfig | 2 ++ .../post_inference_implicit_instantiation.exp | 1 + 10 files changed, 32 insertions(+) create mode 100644 tests/post_inference_implicit_instantiation/.flowconfig create mode 100644 tests/post_inference_implicit_instantiation/post_inference_implicit_instantiation.exp diff --git a/src/commands/commandUtils.ml b/src/commands/commandUtils.ml index bc350baf560..0ca81bb13f7 100644 --- a/src/commands/commandUtils.ml +++ b/src/commands/commandUtils.ml @@ -1244,6 +1244,8 @@ let make_options opt_enabled_rollouts = FlowConfig.enabled_rollouts flowconfig; opt_enforce_local_inference_annotations = FlowConfig.enforce_local_inference_annotations flowconfig; + opt_run_post_inference_implicit_instantiation = + FlowConfig.run_post_inference_implicit_instantiation flowconfig; opt_enforce_strict_call_arity = FlowConfig.enforce_strict_call_arity flowconfig; opt_enforce_well_formed_exports; opt_enums = FlowConfig.enums flowconfig; diff --git a/src/commands/config/flowConfig.ml b/src/commands/config/flowConfig.ml index 10aba586b2f..eea5fe797c9 100644 --- a/src/commands/config/flowConfig.ml +++ b/src/commands/config/flowConfig.ml @@ -95,6 +95,7 @@ module Opts = struct react_runtime: Options.react_runtime; recursion_limit: int; root_name: string option; + run_post_inference_implicit_instantiation: bool; saved_state_fetcher: Options.saved_state_fetcher; shm_hash_table_pow: int; shm_heap_size: int; @@ -205,6 +206,7 @@ module Opts = struct react_runtime = Options.ReactRuntimeClassic; recursion_limit = 10000; root_name = None; + run_post_inference_implicit_instantiation = false; saved_state_fetcher = Options.Dummy_fetcher; shm_hash_table_pow = 19; shm_heap_size = 1024 * 1024 * 1024 * 25; @@ -403,6 +405,9 @@ module Opts = struct let local_inference_annotations = boolean (fun opts v -> Ok { opts with enforce_local_inference_annotations = v }) + let post_inference_implicit_instantiation = + boolean (fun opts v -> Ok { opts with run_post_inference_implicit_instantiation = v }) + type deprecated_esproposal_setting = | Enable | Ignore @@ -595,6 +600,8 @@ module Opts = struct ("types_first", types_first_parser); ("experimental.new_signatures", new_signatures_parser); ("experimental.enforce_local_inference_annotations", local_inference_annotations); + ( "experimental.run_post_inference_implicit_instantiation", + post_inference_implicit_instantiation ); ( "experimental.abstract_locations", boolean (fun opts v -> Ok { opts with abstract_locations = v }) ); ( "experimental.disable_live_non_parse_errors", @@ -1273,6 +1280,9 @@ let new_signatures c = c.options.Opts.new_signatures let required_version c = c.version +let run_post_inference_implicit_instantiation c = + c.options.Opts.run_post_inference_implicit_instantiation + let wait_for_recheck c = c.options.Opts.wait_for_recheck let weak c = c.options.Opts.weak diff --git a/src/commands/config/flowConfig.mli b/src/commands/config/flowConfig.mli index d33ca28c394..4bacbe6c0b5 100644 --- a/src/commands/config/flowConfig.mli +++ b/src/commands/config/flowConfig.mli @@ -159,6 +159,8 @@ val recursion_limit : config -> int val root_name : config -> string option +val run_post_inference_implicit_instantiation : config -> bool + val saved_state_fetcher : config -> Options.saved_state_fetcher val shm_hash_table_pow : config -> int diff --git a/src/common/options.ml b/src/common/options.ml index 5de040f9e08..381fc52d89a 100644 --- a/src/common/options.ml +++ b/src/common/options.ml @@ -95,6 +95,7 @@ type t = { opt_recursion_limit: int; opt_root: Path.t; opt_root_name: string option; + opt_run_post_inference_implicit_instantiation: bool; opt_saved_state_fetcher: saved_state_fetcher; opt_saved_state_force_recheck: bool; opt_saved_state_no_fallback: bool; @@ -206,6 +207,9 @@ let facebook_fbt opts = opts.opt_facebook_fbt let facebook_module_interop opts = opts.opt_facebook_module_interop +let run_post_inference_implicit_instantiation opts = + opts.opt_run_post_inference_implicit_instantiation + let saved_state_fetcher opts = opts.opt_saved_state_fetcher let saved_state_force_recheck opts = opts.opt_saved_state_force_recheck diff --git a/src/flow_dot_js.ml b/src/flow_dot_js.ml index 2128df88532..18a6a71403b 100644 --- a/src/flow_dot_js.ml +++ b/src/flow_dot_js.ml @@ -166,6 +166,7 @@ let stub_metadata ~root ~checked = react_runtime = Options.ReactRuntimeClassic; recursion_limit = 10000; root; + run_post_inference_implicit_instantiation = false; strict_es6_import_export = false; strict_es6_import_export_excludes = []; strip_root = true; diff --git a/src/typing/__tests__/typed_ast_test.ml b/src/typing/__tests__/typed_ast_test.ml index e48ca5599e2..b95fd9f2e46 100644 --- a/src/typing/__tests__/typed_ast_test.ml +++ b/src/typing/__tests__/typed_ast_test.ml @@ -41,6 +41,7 @@ let metadata = react_runtime = Options.ReactRuntimeClassic; recursion_limit = 10000; root = Path.dummy_path; + run_post_inference_implicit_instantiation = false; strict_es6_import_export = false; strict_es6_import_export_excludes = []; strip_root = true; diff --git a/src/typing/context.ml b/src/typing/context.ml index f6f7e1e9ba1..2a3a61e3e9c 100644 --- a/src/typing/context.ml +++ b/src/typing/context.ml @@ -53,6 +53,7 @@ type metadata = { react_runtime: Options.react_runtime; recursion_limit: int; root: Path.t; + run_post_inference_implicit_instantiation: bool; strict_es6_import_export: bool; strict_es6_import_export_excludes: string list; strip_root: bool; @@ -225,6 +226,8 @@ let metadata_of_options options = react_runtime = Options.react_runtime options; recursion_limit = Options.recursion_limit options; root = Options.root options; + run_post_inference_implicit_instantiation = + Options.run_post_inference_implicit_instantiation options; strict_es6_import_export = Options.strict_es6_import_export options; strict_es6_import_export_excludes = Options.strict_es6_import_export_excludes options; strip_root = Options.should_strip_root options; @@ -407,6 +410,9 @@ let exact_by_default cx = cx.metadata.exact_by_default let enforce_local_inference_annotations cx = cx.metadata.enforce_local_inference_annotations +let run_post_inference_implicit_instantiation cx = + cx.metadata.run_post_inference_implicit_instantiation + let generate_tests cx = cx.metadata.generate_tests let file cx = cx.file diff --git a/src/typing/context.mli b/src/typing/context.mli index 02425b0b348..d58e4f6acf8 100644 --- a/src/typing/context.mli +++ b/src/typing/context.mli @@ -69,6 +69,7 @@ type metadata = { react_runtime: Options.react_runtime; recursion_limit: int; root: Path.t; + run_post_inference_implicit_instantiation: bool; strict_es6_import_export: bool; strict_es6_import_export_excludes: string list; strip_root: bool; @@ -159,6 +160,8 @@ val exact_by_default : t -> bool val enforce_local_inference_annotations : t -> bool +val run_post_inference_implicit_instantiation : t -> bool + val generate_tests : t -> bool val file : t -> File_key.t diff --git a/tests/post_inference_implicit_instantiation/.flowconfig b/tests/post_inference_implicit_instantiation/.flowconfig new file mode 100644 index 00000000000..8aa4d3ba2ac --- /dev/null +++ b/tests/post_inference_implicit_instantiation/.flowconfig @@ -0,0 +1,2 @@ +[options] +experimental.run_post_inference_implicit_instantiation=true diff --git a/tests/post_inference_implicit_instantiation/post_inference_implicit_instantiation.exp b/tests/post_inference_implicit_instantiation/post_inference_implicit_instantiation.exp new file mode 100644 index 00000000000..2829d581f51 --- /dev/null +++ b/tests/post_inference_implicit_instantiation/post_inference_implicit_instantiation.exp @@ -0,0 +1 @@ +Found 0 errors From 19c0ae1e43e4c73d2bb3f21a4ec037f77cbf2003 Mon Sep 17 00:00:00 2001 From: Jordan Brown Date: Wed, 2 Dec 2020 18:49:38 -0800 Subject: [PATCH 10/43] Add implicit instantiation check data types to Context Summary: In order for me to try to run an implicit instantiation after inference has run, we need to keep track of all the times we do implicit instantiation in Flow today so I can retry it with the information we have in a post-inference setting. This diff sets up the context to track data necessary to accomplish that. Notice that I store the entire PolyT in the context instead of just the tparams and underlying function. I do this because I plan to re-flow the PolyT ~> CallT/ConstructorT to accumulate constraints and pin down types later. I used a record instead of just storing the Type.t because I plan to store more information later. At the very least, I'll need to also store the CallT/ConstructorT. Reviewed By: panagosg7 Differential Revision: D24734337 fbshipit-source-id: 710de868b2568d31474d4182050648a8b94ee373 --- src/typing/context.ml | 11 +++++++++++ src/typing/context.mli | 6 ++++++ 2 files changed, 17 insertions(+) diff --git a/src/typing/context.ml b/src/typing/context.ml index 2a3a61e3e9c..0ce13b2c546 100644 --- a/src/typing/context.ml +++ b/src/typing/context.ml @@ -79,6 +79,8 @@ type voidable_check = { errors: ALoc.t Property_assignment.errors; } +type implicit_instantiation_check = { fun_or_class: Type.t } + (* Equivalently, we could use a Reason.t option, but this is more self-documenting. *) type computed_property_state = | ResolvedOnce of Reason.t @@ -172,6 +174,7 @@ type component_t = { (* Post-inference checks *) mutable literal_subtypes: (Type.t * Type.use_t) list; mutable matching_props: (Reason.reason * string * Type.t * Type.t) list; + mutable implicit_instantiation_checks: implicit_instantiation_check list; } type phase = @@ -302,6 +305,7 @@ let make_ccx sig_cx aloc_tables = exists_checks = ALocMap.empty; exists_excuses = ALocMap.empty; voidable_checks = []; + implicit_instantiation_checks = []; test_prop_hits_and_misses = IMap.empty; computed_property_states = IMap.empty; spread_widened_types = IMap.empty; @@ -521,6 +525,8 @@ let exists_excuses cx = cx.ccx.exists_excuses let voidable_checks cx = cx.ccx.voidable_checks +let implicit_instantiation_checks cx = cx.ccx.implicit_instantiation_checks + let use_def cx = cx.use_def let exported_locals cx = cx.exported_locals @@ -611,6 +617,11 @@ let add_literal_subtypes cx c = cx.ccx.literal_subtypes <- c :: cx.ccx.literal_s let add_voidable_check cx voidable_check = cx.ccx.voidable_checks <- voidable_check :: cx.ccx.voidable_checks +let add_implicit_instantiation_check cx implicit_instantiation_check = + if cx.metadata.run_post_inference_implicit_instantiation then + cx.ccx.implicit_instantiation_checks <- + implicit_instantiation_check :: cx.ccx.implicit_instantiation_checks + let remove_tvar cx id = cx.ccx.sig_cx.graph <- IMap.remove id cx.ccx.sig_cx.graph let set_all_unresolved cx all_unresolved = cx.ccx.all_unresolved <- all_unresolved diff --git a/src/typing/context.mli b/src/typing/context.mli index d58e4f6acf8..13ec6345049 100644 --- a/src/typing/context.mli +++ b/src/typing/context.mli @@ -96,6 +96,8 @@ type voidable_check = { errors: ALoc.t Property_assignment.errors; } +type implicit_instantiation_check = { fun_or_class: Type.t } + type computed_property_state = | ResolvedOnce of Reason.t | ResolvedMultipleTimes @@ -274,6 +276,8 @@ val exists_excuses : t -> ExistsCheck.t ALocMap.t val voidable_checks : t -> voidable_check list +val implicit_instantiation_checks : t -> implicit_instantiation_check list + val use_def : t -> Scope_api.With_ALoc.info * Ssa_api.With_ALoc.values val pid_prefix : t -> string @@ -324,6 +328,8 @@ val add_literal_subtypes : t -> Type.t * Type.use_t -> unit val add_voidable_check : t -> voidable_check -> unit +val add_implicit_instantiation_check : t -> implicit_instantiation_check -> unit + val remove_tvar : t -> Constraint.ident -> unit val set_envs : t -> env IMap.t -> unit From bc88a5ce3bc905e065e29809c224127a70fe965e Mon Sep 17 00:00:00 2001 From: Jordan Brown Date: Wed, 2 Dec 2020 18:49:38 -0800 Subject: [PATCH 11/43] Push implicit instantiation checks on no-targ call/new Summary: This diff makes use of the data structure introduced in the previous diff to track implicit instantiations. Reviewed By: panagosg7 Differential Revision: D24734338 fbshipit-source-id: 712f4651009518b1a12cc53442f9d0af5faeb0c0 --- src/typing/context.ml | 3 ++- src/typing/context.mli | 2 +- src/typing/flow_js.ml | 12 ++++++++++++ 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/src/typing/context.ml b/src/typing/context.ml index 0ce13b2c546..5c198c1595d 100644 --- a/src/typing/context.ml +++ b/src/typing/context.ml @@ -617,7 +617,8 @@ let add_literal_subtypes cx c = cx.ccx.literal_subtypes <- c :: cx.ccx.literal_s let add_voidable_check cx voidable_check = cx.ccx.voidable_checks <- voidable_check :: cx.ccx.voidable_checks -let add_implicit_instantiation_check cx implicit_instantiation_check = +let add_implicit_instantiation_check cx fun_or_class = + let implicit_instantiation_check = { fun_or_class } in if cx.metadata.run_post_inference_implicit_instantiation then cx.ccx.implicit_instantiation_checks <- implicit_instantiation_check :: cx.ccx.implicit_instantiation_checks diff --git a/src/typing/context.mli b/src/typing/context.mli index 13ec6345049..b2dd04eb2fc 100644 --- a/src/typing/context.mli +++ b/src/typing/context.mli @@ -328,7 +328,7 @@ val add_literal_subtypes : t -> Type.t * Type.use_t -> unit val add_voidable_check : t -> voidable_check -> unit -val add_implicit_instantiation_check : t -> implicit_instantiation_check -> unit +val add_implicit_instantiation_check : t -> Type.t -> unit val remove_tvar : t -> Constraint.ident -> unit diff --git a/src/typing/flow_js.ml b/src/typing/flow_js.ml index 1a392a3b246..2d21ead0d74 100644 --- a/src/typing/flow_js.ml +++ b/src/typing/flow_js.ml @@ -4066,6 +4066,7 @@ struct begin match calltype.call_targs with | None -> + Context.add_implicit_instantiation_check cx l; let t_ = instantiate_poly cx @@ -4106,6 +4107,17 @@ struct ~reason_tapp in rec_flow cx trace (t_, ConstructorT (use_op, reason_op, None, args, tout)) + | ConstructorT (_, _, None, _, _) -> + Context.add_implicit_instantiation_check cx l; + let use_op = + match use_op_of_use_t u with + | Some use_op -> use_op + | None -> unknown_use + in + let t_ = + instantiate_poly cx trace ~use_op ~reason_op ~reason_tapp (tparams_loc, ids, t) + in + rec_flow cx trace (t_, u) | _ -> let use_op = match use_op_of_use_t u with From e8c7860835c92e5af3146cb307afd3334e2ad046 Mon Sep 17 00:00:00 2001 From: Jordan Brown Date: Wed, 2 Dec 2020 18:49:38 -0800 Subject: [PATCH 12/43] Introduce temporary error for implicit instantiation tests Summary: There is no way to unit test type system code. In order to gain confidence in the implicit instantiation implementation, I'm introducing this error so that we can print results about the instantiation in error messages before the algorithm is complete. In the very next diff, I use this to print polarity information about the type parameters in an implicit instantiation. This error variant will be deleted when I can start writing full end-to-end tests of the implicit instantiation algorithm. Reviewed By: panagosg7 Differential Revision: D24734335 fbshipit-source-id: f40981b16600c625f570f59fc0a529ec1ed8c5a8 --- src/typing/debug_js.ml | 1 + src/typing/errors/error_message.ml | 11 +++++++++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/src/typing/debug_js.ml b/src/typing/debug_js.ml index dfa80eece0f..cc7d819ec59 100644 --- a/src/typing/debug_js.ml +++ b/src/typing/debug_js.ml @@ -1707,6 +1707,7 @@ let dump_error_message = (dump_reason cx reason) (ListUtils.to_string ", " (dump_reason cx) blame_reasons) | EMalformedCode loc -> spf "EMalformedCode (%s)" (string_of_aloc loc) + | EImplicitInstantiationTemporaryError _ -> "EImplicitInstantiationTemporaryError" module Verbose = struct let print_if_verbose_lazy cx trace ?(delim = "") ?(indent = 0) (lines : string list Lazy.t) = diff --git a/src/typing/errors/error_message.ml b/src/typing/errors/error_message.ml index e58f5cd1f86..db1a2d5edfa 100644 --- a/src/typing/errors/error_message.ml +++ b/src/typing/errors/error_message.ml @@ -410,6 +410,7 @@ and 'loc t' = blame_reasons: 'loc virtual_reason list; } | EMalformedCode of 'loc + | EImplicitInstantiationTemporaryError of 'loc * string and 'loc exponential_spread_reason_group = { first_reason: 'loc virtual_reason; @@ -952,6 +953,8 @@ let rec map_loc_of_error_message (f : 'a -> 'b) : 'a t' -> 'b t' = blame_reasons = Base.List.map ~f:map_reason blame_reasons; } | EMalformedCode loc -> EMalformedCode (f loc) + | EImplicitInstantiationTemporaryError (loc, msg) -> + EImplicitInstantiationTemporaryError (f loc, msg) let desc_of_reason r = Reason.desc_of_reason ~unwrap:(is_scalar_reason r) r @@ -1157,7 +1160,8 @@ let util_use_op_of_msg nope util = function | EEnumInvalidCheck _ | EEnumMemberUsedAsType _ | EAssignExportedConstLikeBinding _ - | EMalformedCode _ -> + | EMalformedCode _ + | EImplicitInstantiationTemporaryError _ -> nope (* Not all messages (i.e. those whose locations are based on use_ops) have locations that can be @@ -1297,7 +1301,8 @@ let loc_of_msg : 'loc t' -> 'loc option = function | EUnexpectedThisType loc | ETypeParamMinArity (loc, _) | EAssignExportedConstLikeBinding { loc; _ } - | EMalformedCode loc -> + | EMalformedCode loc + | EImplicitInstantiationTemporaryError (loc, _) -> Some loc | ELintSetting (loc, _) -> Some loc | ETypeParamArity (loc, _) -> Some loc @@ -3394,6 +3399,7 @@ let friendly_message_of_msg : Loc.t t' -> Loc.t friendly_message_recipe = text "."; ]; } + | EImplicitInstantiationTemporaryError (_, msg) -> Normal { features = [text msg] } let is_lint_error = function | EUntypedTypeImport _ @@ -3636,6 +3642,7 @@ let error_code_of_message err : error_code option = | EUnsupportedSetProto _ -> Some CannotWrite | EUnsupportedSyntax (_, _) -> Some UnsupportedSyntax | EMalformedCode _ + | EImplicitInstantiationTemporaryError _ | EUnusedSuppression _ -> None | EUseArrayLiteral _ -> Some IllegalNewArray From 749258357af9b401d1abb5d8b8981c311d5706fa Mon Sep 17 00:00:00 2001 From: Jordan Brown Date: Wed, 2 Dec 2020 18:49:38 -0800 Subject: [PATCH 13/43] Make Flow_js_utils module for constraint manipulations that depend on Context Summary: I want to move `merge_tvar` out of `merge_js.ml` because we will likely want to use it in various places for local inference (one such place is further up in this stack in the implicit_instantiation type visitor). These places may also depend on flow_js.ml (like the aforementioned type visitor does), so I don't want it to be in flow_js.ml merge_tvar uses `Flow_js.possible_types`, which does not really need to exist in `flow_js.ml`. I'm making a `Flow_js_utils` module that allows us to write functions involving constraints that cannot be put in `constraint.ml` directly because they depend on `context.ml`. In this diff, I also move the constraint helpers that do _not_ depend on the context into `Constraint`, where they belong better. Reviewed By: panagosg7 Differential Revision: D25256822 fbshipit-source-id: 78a81708a88ce5a915b70f62634569d4be15a77a --- src/services/get_def/getDef_js.ml | 2 +- src/typing/constraint.ml | 16 ++++++++++++++++ src/typing/constraint.mli | 4 ++++ src/typing/flow_js.ml | 29 ----------------------------- src/typing/flow_js.mli | 9 --------- src/typing/flow_js_utils.ml | 22 ++++++++++++++++++++++ src/typing/members.ml | 4 ++-- src/typing/merge_js.ml | 8 ++++---- 8 files changed, 49 insertions(+), 45 deletions(-) create mode 100644 src/typing/flow_js_utils.ml diff --git a/src/services/get_def/getDef_js.ml b/src/services/get_def/getDef_js.ml index eff8737721f..a13fa670619 100644 --- a/src/services/get_def/getDef_js.ml +++ b/src/services/get_def/getDef_js.ml @@ -80,7 +80,7 @@ let rec process_request ~options ~reader ~cx ~is_legit_require ~typed_ast : let open Type in function | OpenT _ as t -> - (match Flow_js.possible_types_of_type cx t with + (match Flow_js_utils.possible_types_of_type cx t with | [t'] -> loop t' | [] -> Error "No possible types" | _ :: _ -> Error "More than one possible type") diff --git a/src/typing/constraint.ml b/src/typing/constraint.ml index 648cb11e9b2..03b990e996a 100644 --- a/src/typing/constraint.ml +++ b/src/typing/constraint.ml @@ -110,3 +110,19 @@ let new_bounds () = let new_unresolved_root () = Root { rank = 0; constraints = Unresolved (new_bounds ()) } let new_resolved_root t op = Root { rank = 0; constraints = FullyResolved (op, t) } + +(* For any constraints, return a list of def types that form either the lower + bounds of the solution, or a singleton containing the solution itself. *) +let types_of constraints = + match constraints with + | Unresolved { lower; _ } -> TypeMap.keys lower + | Resolved (_, t) + | FullyResolved (_, t) -> + [t] + +let uses_of constraints = + match constraints with + | Unresolved { upper; _ } -> Base.List.map ~f:fst (UseTypeMap.keys upper) + | Resolved (use_op, t) + | FullyResolved (use_op, t) -> + [UseT (use_op, t)] diff --git a/src/typing/constraint.mli b/src/typing/constraint.mli index 911340c6178..41a93bee888 100644 --- a/src/typing/constraint.mli +++ b/src/typing/constraint.mli @@ -49,3 +49,7 @@ and bounds = { val new_unresolved_root : unit -> node val new_resolved_root : Type.t -> Type.use_op -> node + +val types_of : constraints -> Type.t list + +val uses_of : constraints -> Type.use_t list diff --git a/src/typing/flow_js.ml b/src/typing/flow_js.ml index 2d21ead0d74..3d7c011f3ae 100644 --- a/src/typing/flow_js.ml +++ b/src/typing/flow_js.ml @@ -150,35 +150,6 @@ let visit_eval_id cx id f = | None -> () | Some t -> f t -(***************) -(* strict mode *) -(***************) - -(* For any constraints, return a list of def types that form either the lower - bounds of the solution, or a singleton containing the solution itself. *) -let types_of constraints = - match constraints with - | Unresolved { lower; _ } -> TypeMap.keys lower - | Resolved (_, t) - | FullyResolved (_, t) -> - [t] - -(* Def types that describe the solution of a type variable. *) -let possible_types cx id = types_of (Context.find_graph cx id) |> List.filter is_proper_def - -let possible_types_of_type cx = function - | OpenT (_, id) -> possible_types cx id - | _ -> [] - -let uses_of constraints = - match constraints with - | Unresolved { upper; _ } -> Base.List.map ~f:fst (UseTypeMap.keys upper) - | Resolved (use_op, t) - | FullyResolved (use_op, t) -> - [UseT (use_op, t)] - -let possible_uses cx id = uses_of (Context.find_graph cx id) |> List.filter is_proper_use - (**************) (* builtins *) (**************) diff --git a/src/typing/flow_js.mli b/src/typing/flow_js.mli index 809f9b03187..bce2d8d62a1 100644 --- a/src/typing/flow_js.mli +++ b/src/typing/flow_js.mli @@ -137,17 +137,8 @@ val mk_instance : Context.t -> ?trace:Trace.t -> reason -> ?use_desc:bool -> Typ val mk_typeof_annotation : Context.t -> ?trace:Trace.t -> reason -> ?use_desc:bool -> ?internal:bool -> Type.t -> Type.t -(* strict *) -val types_of : Constraint.constraints -> Type.t list - val enforce_strict : Context.t -> Type.t -> should_munge_underscores:bool -> unit -val possible_types : Context.t -> Constraint.ident -> Type.t list - -val possible_types_of_type : Context.t -> Type.t -> Type.t list - -val possible_uses : Context.t -> Constraint.ident -> Type.use_t list - (* trust *) val mk_trust_var : Context.t -> ?initial:Trust.trust_qualifier -> unit -> Type.ident diff --git a/src/typing/flow_js_utils.ml b/src/typing/flow_js_utils.ml new file mode 100644 index 00000000000..7df68a0f081 --- /dev/null +++ b/src/typing/flow_js_utils.ml @@ -0,0 +1,22 @@ +(* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Type +open Constraint + +(* These possible_* functions would ideally be in constraint.ml, but since they use + * Context and Context depends on Constraint we need to extract these functions + * to a separate module in order to avoid a circular dependency *) + +(* Def types that describe the solution of a type variable. *) +let possible_types cx id = types_of (Context.find_graph cx id) |> List.filter is_proper_def + +let possible_types_of_type cx = function + | OpenT (_, id) -> possible_types cx id + | _ -> [] + +let possible_uses cx id = uses_of (Context.find_graph cx id) |> List.filter is_proper_use diff --git a/src/typing/members.ml b/src/typing/members.ml index 3fa774cfd10..344861f506d 100644 --- a/src/typing/members.ml +++ b/src/typing/members.ml @@ -318,7 +318,7 @@ and instantiate_type = function | t -> "cannot instantiate non-class type " ^ string_of_ctor t |> assert_false let possible_types_of_use cx = function - | UseT (_, t) -> possible_types_of_type cx t + | UseT (_, t) -> Flow_js_utils.possible_types_of_type cx t | _ -> [] let string_of_extracted_type = function @@ -354,7 +354,7 @@ let find_props cx = not (String.length key >= 1 && key.[0] = '$')) let resolve_tvar cx (_, id) = - let ts = possible_types cx id in + let ts = Flow_js_utils.possible_types cx id in (* The list of types returned by possible_types is often empty, and the most common reason is that we don't have enough type coverage to diff --git a/src/typing/merge_js.ml b/src/typing/merge_js.ml index 55f82c2e418..00d6b775ec2 100644 --- a/src/typing/merge_js.ml +++ b/src/typing/merge_js.ml @@ -235,11 +235,11 @@ let detect_non_voidable_properties cx = if ISet.mem id seen_ids then false else ( - match Flow_js.possible_types cx id with + match Flow_js_utils.possible_types cx id with (* tvar has no lower bounds: we conservatively assume it's non-voidable * except in the special case when it also has no upper bounds *) - | [] -> Flow_js.possible_uses cx id = [] + | [] -> Flow_js_utils.possible_uses cx id = [] (* tvar is resolved: look at voidability of the resolved type *) | [t] -> is_voidable (ISet.add id seen_ids) t (* tvar is unresolved: conservatively assume it is non-voidable *) @@ -285,7 +285,7 @@ let detect_non_voidable_properties cx = let merge_tvar = Type.( - let possible_types = Flow_js.possible_types in + let possible_types = Flow_js_utils.possible_types in let rec collect_lowers ~filter_empty cx seen acc = function | [] -> Base.List.rev acc | t :: ts -> @@ -331,7 +331,7 @@ let merge_tvar = | [t] -> t | t0 :: t1 :: ts -> UnionT (r, UnionRep.make t0 t1 ts) | [] -> - let uses = Flow_js.possible_uses cx id in + let uses = Flow_js_utils.possible_uses cx id in if uses = [] || existential then AnyT.locationless Unsoundness.existential else From db84343a42de05bdd628625d075b9e8fe9a10c7c Mon Sep 17 00:00:00 2001 From: Jordan Brown Date: Wed, 2 Dec 2020 18:49:38 -0800 Subject: [PATCH 14/43] Move merge_tvar into Flow_js_utils Summary: Moves merge_tvar into `Flow_js_utils`. Reviewed By: panagosg7 Differential Revision: D25256823 fbshipit-source-id: 6d19e200ee001f291bb46ad3d4609b03f6987384 --- src/typing/flow_js_utils.ml | 53 +++++++++++++++++++++++++++++++++ src/typing/merge_js.ml | 58 ++----------------------------------- 2 files changed, 55 insertions(+), 56 deletions(-) diff --git a/src/typing/flow_js_utils.ml b/src/typing/flow_js_utils.ml index 7df68a0f081..2e418c2de5f 100644 --- a/src/typing/flow_js_utils.ml +++ b/src/typing/flow_js_utils.ml @@ -20,3 +20,56 @@ let possible_types_of_type cx = function | _ -> [] let possible_uses cx id = uses_of (Context.find_graph cx id) |> List.filter is_proper_use + +let merge_tvar = + let possible_types = possible_types in + let rec collect_lowers ~filter_empty cx seen acc = function + | [] -> Base.List.rev acc + | t :: ts -> + (match t with + (* Recursively unwrap unseen tvars *) + | OpenT (_, id) -> + if ISet.mem id seen then + collect_lowers ~filter_empty cx seen acc ts + (* already unwrapped *) + else + let seen = ISet.add id seen in + collect_lowers ~filter_empty cx seen acc (possible_types cx id @ ts) + (* Ignore empty in existentials. This behavior is sketchy, but the error + behavior without this filtering is worse. If an existential accumulates + an empty, we error but it's very non-obvious how the empty arose. *) + | DefT (_, _, EmptyT flavor) when filter_empty flavor -> + collect_lowers ~filter_empty cx seen acc ts + (* Everything else becomes part of the merge typed *) + | _ -> collect_lowers ~filter_empty cx seen (t :: acc) ts) + in + fun ?filter_empty cx r id -> + (* Because the behavior of existentials are so difficult to predict, they + enjoy some special casing here. When existential types are finally + removed, this logic can be removed. *) + let existential = + Reason.( + match desc_of_reason r with + | RExistential -> true + | _ -> false) + in + let filter_empty flavor = + existential + || + match filter_empty with + | Some filter_empty -> filter_empty flavor + | None -> false + in + let lowers = + let seen = ISet.singleton id in + collect_lowers cx seen [] (possible_types cx id) ~filter_empty + in + match lowers with + | [t] -> t + | t0 :: t1 :: ts -> UnionT (r, UnionRep.make t0 t1 ts) + | [] -> + let uses = possible_uses cx id in + if uses = [] || existential then + AnyT.locationless Unsoundness.existential + else + MergedT (r, uses) diff --git a/src/typing/merge_js.ml b/src/typing/merge_js.ml index 00d6b775ec2..245ea7cdf26 100644 --- a/src/typing/merge_js.ml +++ b/src/typing/merge_js.ml @@ -283,60 +283,6 @@ let detect_non_voidable_properties cx = check_properties private_property_map private_property_errors) (Context.voidable_checks cx) -let merge_tvar = - Type.( - let possible_types = Flow_js_utils.possible_types in - let rec collect_lowers ~filter_empty cx seen acc = function - | [] -> Base.List.rev acc - | t :: ts -> - (match t with - (* Recursively unwrap unseen tvars *) - | OpenT (_, id) -> - if ISet.mem id seen then - collect_lowers ~filter_empty cx seen acc ts - (* already unwrapped *) - else - let seen = ISet.add id seen in - collect_lowers ~filter_empty cx seen acc (possible_types cx id @ ts) - (* Ignore empty in existentials. This behavior is sketchy, but the error - behavior without this filtering is worse. If an existential accumulates - an empty, we error but it's very non-obvious how the empty arose. *) - | DefT (_, _, EmptyT flavor) when filter_empty flavor -> - collect_lowers ~filter_empty cx seen acc ts - (* Everything else becomes part of the merge typed *) - | _ -> collect_lowers ~filter_empty cx seen (t :: acc) ts) - in - fun ?filter_empty cx r id -> - (* Because the behavior of existentials are so difficult to predict, they - enjoy some special casing here. When existential types are finally - removed, this logic can be removed. *) - let existential = - Reason.( - match desc_of_reason r with - | RExistential -> true - | _ -> false) - in - let filter_empty flavor = - existential - || - match filter_empty with - | Some filter_empty -> filter_empty flavor - | None -> false - in - let lowers = - let seen = ISet.singleton id in - collect_lowers cx seen [] (possible_types cx id) ~filter_empty - in - match lowers with - | [t] -> t - | t0 :: t1 :: ts -> UnionT (r, UnionRep.make t0 t1 ts) - | [] -> - let uses = Flow_js_utils.possible_uses cx id in - if uses = [] || existential then - AnyT.locationless Unsoundness.existential - else - MergedT (r, uses)) - let merge_trust_var constr = Trust_constraint.( match constr with @@ -358,7 +304,7 @@ class resolver_visitor = method! type_ cx map_cx t = let open Type in match t with - | OpenT (r, id) -> merge_tvar ~filter_empty cx r id + | OpenT (r, id) -> Flow_js_utils.merge_tvar ~filter_empty cx r id | EvalT (t', dt, _id) -> let t'' = self#type_ cx map_cx t' in let dt' = self#defer_use_type cx map_cx dt in @@ -722,7 +668,7 @@ module ContextOptimizer = struct SigHash.add_int sig_hash stable_id; id ) else - let t = merge_tvar cx r id in + let t = Flow_js_utils.merge_tvar cx r id in let node = Root { rank = 0; constraints = FullyResolved (unknown_use, t) } in reduced_graph <- IMap.add id node reduced_graph; let () = From 78748c5c1653e4823316fff6ea51a35ebb827fc2 Mon Sep 17 00:00:00 2001 From: Jordan Brown Date: Wed, 2 Dec 2020 18:49:38 -0800 Subject: [PATCH 15/43] Use type_visitor to compute polarity of type parameters in function signatures Summary: We use the polarity-aware type_visitor to compute the polarity of type params in function signatures. There are a few interesting cases: 1. PolyTs may shadow the tparams bound at the function, so we need to make sure not to erroneously inspect those type params in signatures. 2. Some BoundTs are only used in upper bounds of other BoundTs. In order to compute their polarity, we visit those upper BoundTs each time we see the lower BoundT in a given polarity. It's possible that this is too strict-- i.e., if a BoundT is in a contravariant position, it is possible that we should not say that the upper BoundT is also in a contravariant position. This specific detail is something that can be worked out in a future diff after we see its effect on implicit instantiations. I use the error constructor introduced earlier in the stack to print the polarities we calculate in the form of error messages to give reviewers more confidence in this approach. Note that this diff only handles polymorphic function calls but not polymorphic constructors. I will add support for constructors in a later diff, but want to move on to the actual implicit instantiation algorithm before that. Reviewed By: panagosg7 Differential Revision: D24734336 fbshipit-source-id: 0bbd0f09c9c26e6613d1ea16c3cc67d9a11abaf7 --- src/typing/implicit_instantiation.ml | 143 +++++++++ src/typing/merge_js.ml | 6 + .../post_inference_implicit_instantiation.exp | 291 +++++++++++++++++- .../test.js | 44 +++ .../types.js | 4 + 5 files changed, 487 insertions(+), 1 deletion(-) create mode 100644 src/typing/implicit_instantiation.ml create mode 100644 tests/post_inference_implicit_instantiation/test.js create mode 100644 tests/post_inference_implicit_instantiation/types.js diff --git a/src/typing/implicit_instantiation.ml b/src/typing/implicit_instantiation.ml new file mode 100644 index 00000000000..10b1d6e4993 --- /dev/null +++ b/src/typing/implicit_instantiation.ml @@ -0,0 +1,143 @@ +(* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Type +open Polarity +module IdMarked = Marked.IdMarked +module TypeParamMarked = Marked.Make (StringKey) +module Marked = TypeParamMarked + +let get_t cx = function + | OpenT (r, id) -> Flow_js_utils.merge_tvar cx r id + | t -> t + +(* This visitor records the polarities at which BoundTs are found. We follow the bounds of each + * type parameter as well, since some type params are only used in the bounds of another. + *) +class implicit_instantiation_visitor ~bounds_map = + object (self) + inherit [Marked.t * SSet.t] Type_visitor.t as super + + method! type_ cx pole ((marked, tparam_names) as acc) = + function + | BoundT (_, s) -> + if SSet.mem s tparam_names then + match Marked.add s pole marked with + | None -> acc + | Some (_, marked) -> + (match SMap.find_opt s bounds_map with + | None -> (marked, tparam_names) + | Some t -> self#type_ cx pole (marked, tparam_names) t) + else + acc + (* We remove any tparam names from the map when entering a PolyT to avoid naming conflicts. *) + | DefT (_, _, PolyT { tparams; t_out = t; _ }) -> + let tparam_names' = + Nel.fold_left (fun names x -> SSet.remove x.name names) tparam_names tparams + in + let (marked, _) = self#type_ cx pole (marked, tparam_names') t in + (* TODO(jmbrown): Handle defaults on type parameters *) + (marked, tparam_names) + | TypeAppT (_, _, c, ts) -> self#typeapp ts cx pole acc c + (* ThisTypeAppT is created from a new expression, which cannot + * be used as an annotation, so we do not special case it like + * we do with TypeAppT + *) + | t -> super#type_ cx pole acc t + + method private typeapp = + let rec loop cx pole seen = function + (* Any arity erors are already handled in Flow_js *) + | (_, []) -> seen + | (Some [], _) -> seen + | (None, targ :: targs) -> + (* In the absence of tparams we will just visit the args with a + * neutral polarity. *) + let param_polarity = Polarity.Neutral in + let seen = self#type_ cx param_polarity seen targ in + loop cx pole seen (None, targs) + | (Some (tparam :: tparams), targ :: targs) -> + let param_polarity = Polarity.mult (pole, tparam.polarity) in + let seen = self#type_ cx param_polarity seen targ in + loop cx pole seen (Some tparams, targs) + in + fun targs cx pole acc t -> + match get_t cx t with + | AnnotT (_, t, _) -> self#typeapp targs cx pole acc t + | DefT (_, _, PolyT { tparams; _ }) -> loop cx pole acc (Some (Nel.to_list tparams), targs) + | DefT (_, _, EmptyT _) + | AnyT _ + | MergedT _ -> + loop cx pole acc (None, targs) + | t -> + failwith + @@ "Encountered a " + ^ string_of_ctor t + ^ " in typepapp case of fully constrained analysis" + end + +let check_fun_call cx ~tparams ~params ?rest_param ~return_t ~f_params ~f_return = + let tparams = Nel.to_list tparams in + let (tparams_map, tparam_names) = + List.fold_left + (fun (map, names) x -> (SMap.add x.name x.bound map, SSet.add x.name names)) + (SMap.empty, SSet.empty) + tparams + in + let visitor = new implicit_instantiation_visitor ~bounds_map:tparams_map in + + (* Visit params *) + let (marked_params, _) = + List.fold_left + (fun acc (_, t) -> visitor#type_ cx Negative acc t) + (Marked.empty, tparam_names) + params + in + + (* Visit rest param *) + let (marked_params, _) = + Base.Option.fold + ~init:(marked_params, tparam_names) + ~f:(fun map_cx (_, _, t) -> visitor#type_ cx Negative map_cx t) + rest_param + in + + (* Visit the return type *) + let (marked_return, _) = visitor#type_ cx Positive (Marked.empty, tparam_names) return_t in + tparams + |> List.iter (fun tparam -> + f_params tparam (Marked.get tparam.name marked_params); + f_return tparam (Marked.get tparam.name marked_return)) + +let check_implicit_instantiation cx implicit_instantiation = + let t = implicit_instantiation.Context.fun_or_class in + let mk_error_msg tparam pole position = + let pole_msg pole = + match pole with + | None -> "does not appear in the " + | Some Positive -> "appears positively in the " + | Some Neutral -> "appears neutrally in the " + | Some Negative -> "appears negatively in the " + in + let msg = tparam.name ^ " " ^ pole_msg pole ^ position in + Error_message.EImplicitInstantiationTemporaryError (Reason.aloc_of_reason tparam.reason, msg) + in + match t with + | DefT (_, _, PolyT { t_out = t; tparams; _ }) -> + (match get_t cx t with + | DefT (_, _, FunT (_, _, funtype)) -> + check_fun_call + cx + ~tparams + ~params:funtype.params + ?rest_param:funtype.rest_param + ~return_t:funtype.return_t + ~f_params:(fun tparam pole -> Flow_js.add_output cx (mk_error_msg tparam pole "params")) + ~f_return:(fun tparam pole -> Flow_js.add_output cx (mk_error_msg tparam pole "return")) + | _t -> ()) + | _t -> + failwith "Implicit instantiation checks should always have a polymorphic class or function" diff --git a/src/typing/merge_js.ml b/src/typing/merge_js.ml index 245ea7cdf26..a22f1c3c5f0 100644 --- a/src/typing/merge_js.ml +++ b/src/typing/merge_js.ml @@ -283,6 +283,11 @@ let detect_non_voidable_properties cx = check_properties private_property_map private_property_errors) (Context.voidable_checks cx) +let check_implicit_instantiations cx = + if Context.run_post_inference_implicit_instantiation cx then + let implicit_instantiation_checks = Context.implicit_instantiation_checks cx in + List.iter (Implicit_instantiation.check_implicit_instantiation cx) implicit_instantiation_checks + let merge_trust_var constr = Trust_constraint.( match constr with @@ -545,6 +550,7 @@ let merge_component *) detect_sketchy_null_checks cx; detect_non_voidable_properties cx; + check_implicit_instantiations cx; detect_test_prop_misses cx; detect_unnecessary_optional_chains cx; detect_unnecessary_invariants cx; diff --git a/tests/post_inference_implicit_instantiation/post_inference_implicit_instantiation.exp b/tests/post_inference_implicit_instantiation/post_inference_implicit_instantiation.exp index 2829d581f51..310b7fa5bd2 100644 --- a/tests/post_inference_implicit_instantiation/post_inference_implicit_instantiation.exp +++ b/tests/post_inference_implicit_instantiation/post_inference_implicit_instantiation.exp @@ -1 +1,290 @@ -Found 0 errors +Error ----------------------------------------------------------------------------------------------------- test.js:4:21 + +T appears negatively in the params + + 4| declare function f1(Covariant): Covariant; // Contra param, Co return + ^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:4:21 + +T appears positively in the return + + 4| declare function f1(Covariant): Covariant; // Contra param, Co return + ^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:5:21 + +T appears negatively in the params + + 5| declare function f2(Covariant): Contravariant; // Contra param, Contra return + ^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:5:21 + +T appears negatively in the return + + 5| declare function f2(Covariant): Contravariant; // Contra param, Contra return + ^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:6:21 + +T appears negatively in the params + + 6| declare function f3(Covariant): Invariant; // Contra param, Inv return + ^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:6:21 + +T appears neutrally in the return + + 6| declare function f3(Covariant): Invariant; // Contra param, Inv return + ^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:7:21 + +T appears positively in the params + + 7| declare function f4(Contravariant): Covariant; // Co param, Co return + ^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:7:21 + +T appears positively in the return + + 7| declare function f4(Contravariant): Covariant; // Co param, Co return + ^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:8:21 + +T appears negatively in the return + + 8| declare function f5(Contravariant): Contravariant; // Co param, Contra return + ^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:8:21 + +T appears positively in the params + + 8| declare function f5(Contravariant): Contravariant; // Co param, Contra return + ^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:9:21 + +T appears neutrally in the return + + 9| declare function f6(Contravariant): Invariant; // Co param, Inv return + ^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:9:21 + +T appears positively in the params + + 9| declare function f6(Contravariant): Invariant; // Co param, Inv return + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:10:21 + +T appears neutrally in the params + + 10| declare function f7(Invariant): Covariant; // Inv Param, Co return + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:10:21 + +T appears positively in the return + + 10| declare function f7(Invariant): Covariant; // Inv Param, Co return + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:11:21 + +T appears negatively in the return + + 11| declare function f8(Invariant): Contravariant; // Inv Param, Contra Return + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:11:21 + +T appears neutrally in the params + + 11| declare function f8(Invariant): Contravariant; // Inv Param, Contra Return + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:12:21 + +T appears neutrally in the params + + 12| declare function f9(Invariant): Invariant; // Inv Param, Inv Return + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:12:21 + +T appears neutrally in the return + + 12| declare function f9(Invariant): Invariant; // Inv Param, Inv Return + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:23:26 + +T does not appear in the params + + 23| declare function missing(): void; // Missing param and return + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:23:26 + +T does not appear in the return + + 23| declare function missing(): void; // Missing param and return + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:27:39 + +T appears negatively in the params + + 27| declare function InterdependentBounds>( // Follow bounds in each position we see U + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:27:39 + +T appears positively in the return + + 27| declare function InterdependentBounds>( // Follow bounds in each position we see U + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:27:42 + +U appears negatively in the params + + 27| declare function InterdependentBounds>( // Follow bounds in each position we see U + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:27:42 + +U appears positively in the return + + 27| declare function InterdependentBounds>( // Follow bounds in each position we see U + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:33:31 + +T appears negatively in the params + + 33| declare function NameOverride((T) => T, T): (T) => T; // Contra Param, no return + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:33:31 + +T does not appear in the return + + 33| declare function NameOverride((T) => T, T): (T) => T; // Contra Param, no return + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:36:28 + +T appears positively in the params + + 36| declare function RestParam(...$ReadOnlyArray>): void; // Co param, no return + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:36:28 + +T does not appear in the return + + 36| declare function RestParam(...$ReadOnlyArray>): void; // Co param, no return + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:39:27 + +T appears positively in the return + + 39| declare var Overloaded: ((null) => T) & ((T) => T); + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:39:27 + +T does not appear in the params + + 39| declare var Overloaded: ((null) => T) & ((T) => T); + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:39:46 + +T appears negatively in the params + + 39| declare var Overloaded: ((null) => T) & ((T) => T); + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:39:46 + +T appears positively in the return + + 39| declare var Overloaded: ((null) => T) & ((T) => T); + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:43:22 + +T appears negatively in the params + + 43| declare var Union: ((T) => T) | ((null) => T); + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:43:22 + +T appears positively in the return + + 43| declare var Union: ((T) => T) | ((null) => T); + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:43:38 + +T appears positively in the return + + 43| declare var Union: ((T) => T) | ((null) => T); + ^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:43:38 + +T does not appear in the params + + 43| declare var Union: ((T) => T) | ((null) => T); + ^ + + + +Found 36 errors diff --git a/tests/post_inference_implicit_instantiation/test.js b/tests/post_inference_implicit_instantiation/test.js new file mode 100644 index 00000000000..9adda04315d --- /dev/null +++ b/tests/post_inference_implicit_instantiation/test.js @@ -0,0 +1,44 @@ +//@flow +declare var arg: any; +import type {Covariant, Contravariant, Invariant} from './types'; +declare function f1(Covariant): Covariant; // Contra param, Co return +declare function f2(Covariant): Contravariant; // Contra param, Contra return +declare function f3(Covariant): Invariant; // Contra param, Inv return +declare function f4(Contravariant): Covariant; // Co param, Co return +declare function f5(Contravariant): Contravariant; // Co param, Contra return +declare function f6(Contravariant): Invariant; // Co param, Inv return +declare function f7(Invariant): Covariant; // Inv Param, Co return +declare function f8(Invariant): Contravariant; // Inv Param, Contra Return +declare function f9(Invariant): Invariant; // Inv Param, Inv Return +f1(arg); +f2(arg); +f3(arg); +f4(arg); +f5(arg); +f6(arg); +f7(arg); +f8(arg); +f9(arg); + +declare function missing(): void; // Missing param and return +missing(); + + +declare function InterdependentBounds>( // Follow bounds in each position we see U + Covariant, +): Covariant; + +InterdependentBounds(arg); + +declare function NameOverride((T) => T, T): (T) => T; // Contra Param, no return +NameOverride(arg); + +declare function RestParam(...$ReadOnlyArray>): void; // Co param, no return +RestParam(arg); + +declare var Overloaded: ((null) => T) & ((T) => T); +Overloaded(null); +Overloaded(3); // Need a not-any value so the second branch is hit + +declare var Union: ((T) => T) | ((null) => T); +Union(arg); diff --git a/tests/post_inference_implicit_instantiation/types.js b/tests/post_inference_implicit_instantiation/types.js new file mode 100644 index 00000000000..44d61a0b0bc --- /dev/null +++ b/tests/post_inference_implicit_instantiation/types.js @@ -0,0 +1,4 @@ +//@flow +export type Covariant<+T> = T; +export type Contravariant<-T> = T => void; +export type Invariant = T; From 6141107fff24231f8ed08daf47c7d40dd485aaa9 Mon Sep 17 00:00:00 2001 From: Jordan Brown Date: Wed, 2 Dec 2020 18:49:38 -0800 Subject: [PATCH 16/43] Keep track of call/constructor in implicit instantiation check Summary: In order to run the implicit instantiation again after inference, I need access to the original upper bound of the constraint. This stores that information in the implicit_instantiation_check data structure Reviewed By: panagosg7 Differential Revision: D24760289 fbshipit-source-id: b1b1f43278954eede8bb6040dc6b5612f8295502 --- src/typing/context.ml | 9 ++++++--- src/typing/context.mli | 7 +++++-- src/typing/flow_js.ml | 4 ++-- 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/src/typing/context.ml b/src/typing/context.ml index 5c198c1595d..4289931c23b 100644 --- a/src/typing/context.ml +++ b/src/typing/context.ml @@ -79,7 +79,10 @@ type voidable_check = { errors: ALoc.t Property_assignment.errors; } -type implicit_instantiation_check = { fun_or_class: Type.t } +type implicit_instantiation_check = { + fun_or_class: Type.t; + call_or_constructor: Type.use_t; +} (* Equivalently, we could use a Reason.t option, but this is more self-documenting. *) type computed_property_state = @@ -617,8 +620,8 @@ let add_literal_subtypes cx c = cx.ccx.literal_subtypes <- c :: cx.ccx.literal_s let add_voidable_check cx voidable_check = cx.ccx.voidable_checks <- voidable_check :: cx.ccx.voidable_checks -let add_implicit_instantiation_check cx fun_or_class = - let implicit_instantiation_check = { fun_or_class } in +let add_implicit_instantiation_check cx fun_or_class call_or_constructor = + let implicit_instantiation_check = { fun_or_class; call_or_constructor } in if cx.metadata.run_post_inference_implicit_instantiation then cx.ccx.implicit_instantiation_checks <- implicit_instantiation_check :: cx.ccx.implicit_instantiation_checks diff --git a/src/typing/context.mli b/src/typing/context.mli index b2dd04eb2fc..1eb18cf17cd 100644 --- a/src/typing/context.mli +++ b/src/typing/context.mli @@ -96,7 +96,10 @@ type voidable_check = { errors: ALoc.t Property_assignment.errors; } -type implicit_instantiation_check = { fun_or_class: Type.t } +type implicit_instantiation_check = { + fun_or_class: Type.t; + call_or_constructor: Type.use_t; +} type computed_property_state = | ResolvedOnce of Reason.t @@ -328,7 +331,7 @@ val add_literal_subtypes : t -> Type.t * Type.use_t -> unit val add_voidable_check : t -> voidable_check -> unit -val add_implicit_instantiation_check : t -> Type.t -> unit +val add_implicit_instantiation_check : t -> Type.t -> Type.use_t -> unit val remove_tvar : t -> Constraint.ident -> unit diff --git a/src/typing/flow_js.ml b/src/typing/flow_js.ml index 3d7c011f3ae..9c70575862d 100644 --- a/src/typing/flow_js.ml +++ b/src/typing/flow_js.ml @@ -4037,7 +4037,7 @@ struct begin match calltype.call_targs with | None -> - Context.add_implicit_instantiation_check cx l; + Context.add_implicit_instantiation_check cx l u; let t_ = instantiate_poly cx @@ -4079,7 +4079,7 @@ struct in rec_flow cx trace (t_, ConstructorT (use_op, reason_op, None, args, tout)) | ConstructorT (_, _, None, _, _) -> - Context.add_implicit_instantiation_check cx l; + Context.add_implicit_instantiation_check cx l u; let use_op = match use_op_of_use_t u with | Some use_op -> use_op From e2db85292d1496c45640963a5686dc6b03997fd6 Mon Sep 17 00:00:00 2001 From: Marshall Roch Date: Thu, 3 Dec 2020 13:20:55 -0800 Subject: [PATCH 17/43] pass AST to autocomplete Summary: autocomplete runs `Scope_builder`, which only works on untyped ASTs right now, but autocomplete also needs the typed AST. previously, `check_contents` only returned the typed AST, so we had to strip the types off with `type_killer` for `Scope_builder`. but `check_contents` now returns the AST as well as the typed AST, so we can just pass it in instead. still, in the future we should make `Scope_builder` work on a typed AST and not need both. Reviewed By: vrama628 Differential Revision: D25306913 fbshipit-source-id: 01bfab2159ab9ed517d79c4d40faac0ac57aef93 --- src/server/command_handler/commandHandler.ml | 3 +- .../autocomplete/autocompleteService_js.ml | 36 ++++++++----------- .../autocomplete/autocompleteService_js.mli | 1 + 3 files changed, 17 insertions(+), 23 deletions(-) diff --git a/src/server/command_handler/commandHandler.ml b/src/server/command_handler/commandHandler.ml index 32ee0e22044..7146a2b5f1c 100644 --- a/src/server/command_handler/commandHandler.ml +++ b/src/server/command_handler/commandHandler.ml @@ -134,7 +134,7 @@ let autocomplete ~trigger_character ~reader ~options ~env ~profiling ~filename ~ :: initial_json_props ) in Lwt.return (Error err, Some json_data_to_log) - | Ok (cx, info, file_sig, _, _ast, typed_ast, parse_errors) -> + | Ok (cx, info, file_sig, _, ast, typed_ast, parse_errors) -> Profiling_js.with_timer_lwt profiling ~timer:"GetResults" ~f:(fun () -> try_with_json2 (fun () -> let open AutocompleteService_js in @@ -144,6 +144,7 @@ let autocomplete ~trigger_character ~reader ~options ~env ~profiling ~filename ~ ~reader ~cx ~file_sig + ~ast ~typed_ast trigger_character cursor_loc diff --git a/src/services/autocomplete/autocompleteService_js.ml b/src/services/autocomplete/autocompleteService_js.ml index c0ae555c504..85428445075 100644 --- a/src/services/autocomplete/autocompleteService_js.ml +++ b/src/services/autocomplete/autocompleteService_js.ml @@ -471,21 +471,10 @@ let autocomplete_member let result = { ServerProt.Response.Completion.items; is_incomplete = false } in AcResult { result; errors_to_log } -(* turns typed AST into normal AST so we can run Scope_builder on it *) -(* TODO(vijayramamurthy): make scope builder polymorphic *) -class type_killer (reader : Parsing_heaps.Reader.reader) = - object - inherit [ALoc.t, ALoc.t * Type.t, Loc.t, Loc.t] Flow_polymorphic_ast_mapper.mapper - - method on_loc_annot x = loc_of_aloc ~reader x - - method on_type_annot (x, _) = loc_of_aloc ~reader x - end - (* The fact that we need this feels convoluted. - We started with a typed AST, then stripped the types off of it to run Scope_builder on it, - and now we go back to the typed AST to get the types of the locations we got from Scope_api. - We wouldn't need to do this separate pass if Scope_builder/Scope_api were polymorphic. + We run Scope_builder on the untyped AST and now we go back to the typed AST to get the types + of the locations we got from Scope_api. We wouldn't need to do this separate pass if + Scope_builder/Scope_api were polymorphic. *) class type_collector (reader : Parsing_heaps.Reader.reader) (locs : LocSet.t) = object @@ -521,8 +510,8 @@ let documentation_of_loc ~options ~reader ~cx ~file_sig ~typed_ast loc = | Def_error _ -> None -let local_value_identifiers ~options ~reader ~cx ~ac_loc ~file_sig ~typed_ast ~tparams = - let scope_info = Scope_builder.program ((new type_killer reader)#program typed_ast) in +let local_value_identifiers ~options ~reader ~cx ~ac_loc ~file_sig ~ast ~typed_ast ~tparams = + let scope_info = Scope_builder.program ast in let open Scope_api.With_Loc in (* get the innermost scope enclosing the requested location *) let (ac_scope_id, _) = @@ -578,12 +567,12 @@ let local_value_identifiers ~options ~reader ~cx ~ac_loc ~file_sig ~typed_ast ~t (* env is all visible bound names at cursor *) let autocomplete_id - ~options ~reader ~cx ~ac_loc ~file_sig ~typed_ast ~include_super ~include_this ~tparams = + ~options ~reader ~cx ~ac_loc ~file_sig ~ast ~typed_ast ~include_super ~include_this ~tparams = let open ServerProt.Response.Completion in let ac_loc = loc_of_aloc ~reader ac_loc |> remove_autocomplete_token_from_loc in let exact_by_default = Context.exact_by_default cx in let (items, errors_to_log) = - local_value_identifiers ~options ~reader ~cx ~ac_loc ~file_sig ~typed_ast ~tparams + local_value_identifiers ~options ~reader ~cx ~ac_loc ~file_sig ~typed_ast ~ast ~tparams |> List.fold_left (fun (items, errors_to_log) ((name, documentation), elt_result) -> match elt_result with @@ -803,7 +792,7 @@ let type_exports_of_module_ty ~ac_loc ~exact_by_default ~documentation_of_module |> Base.List.mapi ~f:(fun i r -> { r with sort_text = sort_text_of_rank i }) | _ -> [] -let autocomplete_unqualified_type ~options ~reader ~cx ~tparams ~file_sig ~ac_loc ~typed_ast = +let autocomplete_unqualified_type ~options ~reader ~cx ~tparams ~file_sig ~ac_loc ~ast ~typed_ast = let open ServerProt.Response.Completion in let ac_loc = loc_of_aloc ~reader ac_loc |> remove_autocomplete_token_from_loc in let exact_by_default = Context.exact_by_default cx in @@ -844,7 +833,7 @@ let autocomplete_unqualified_type ~options ~reader ~cx ~tparams ~file_sig ~ac_lo - classes - modules (followed by a dot) *) let (items, errors_to_log) = - local_value_identifiers ~options ~typed_ast ~reader ~ac_loc ~tparams ~cx ~file_sig + local_value_identifiers ~options ~ast ~typed_ast ~reader ~ac_loc ~tparams ~cx ~file_sig |> List.fold_left (fun (items, errors_to_log) ((name, documentation), ty_res) -> match ty_res with @@ -899,7 +888,8 @@ let autocomplete_qualified_type ~reader ~cx ~ac_loc ~file_sig ~typed_ast ~tparam AcResult { result = { ServerProt.Response.Completion.items; is_incomplete = false }; errors_to_log } -let autocomplete_get_results ~options ~reader ~cx ~file_sig ~typed_ast trigger_character cursor = +let autocomplete_get_results ~options ~reader ~cx ~file_sig ~ast ~typed_ast trigger_character cursor + = let file_sig = File_sig.abstractify_locs file_sig in match Autocomplete_js.process_location ~trigger_character ~cursor ~typed_ast with | Some (_, _, Acbinding) -> ("Empty", AcEmpty "Binding") @@ -922,6 +912,7 @@ let autocomplete_get_results ~options ~reader ~cx ~file_sig ~typed_ast trigger_c ~cx ~ac_loc ~file_sig + ~ast ~typed_ast ~include_super ~include_this @@ -943,7 +934,8 @@ let autocomplete_get_results ~options ~reader ~cx ~file_sig ~typed_ast trigger_c autocomplete_jsx ~reader cx file_sig typed_ast cls ac_name ~used_attr_names ac_loc ~tparams ) | Some (tparams, ac_loc, Actype) -> ( "Actype", - autocomplete_unqualified_type ~options ~reader ~cx ~tparams ~ac_loc ~typed_ast ~file_sig ) + autocomplete_unqualified_type ~options ~reader ~cx ~tparams ~ac_loc ~ast ~typed_ast ~file_sig + ) | Some (tparams, ac_loc, Acqualifiedtype qtype) -> ( "Acqualifiedtype", autocomplete_qualified_type ~reader ~cx ~ac_loc ~file_sig ~typed_ast ~tparams ~qtype ) diff --git a/src/services/autocomplete/autocompleteService_js.mli b/src/services/autocomplete/autocompleteService_js.mli index dd58cce30ca..4363f14c2eb 100644 --- a/src/services/autocomplete/autocompleteService_js.mli +++ b/src/services/autocomplete/autocompleteService_js.mli @@ -18,6 +18,7 @@ val autocomplete_get_results : reader:Parsing_heaps.Reader.reader -> cx:Context.t -> file_sig:File_sig.With_Loc.t -> + ast:(Loc.t, Loc.t) Flow_ast.Program.t -> typed_ast:(ALoc.t, ALoc.t * Type.t) Flow_ast.Program.t -> string option -> Loc.t -> From 4a8dbd3d6cca8ed4a17ccf32d10a8fb062b5e612 Mon Sep 17 00:00:00 2001 From: Panagiotis Vekris Date: Fri, 4 Dec 2020 10:29:20 -0800 Subject: [PATCH 18/43] [normalizer] fix recursion bug in ExpandMembersConverter Summary: ExpandMembersConverter peeks down on the input type constructor until it reaches an object-like structure. It then calls the regular normalizing procedure. The part that did the peeking did not handle some cases of recursive definition properly, causing infinite loops, e.g. in ``` type Foo = Foo & { bar : string }; declare var x : Foo; x.bar; // ^ ``` This diff fixes this, by using the same method for tracking recursion that the main normalizer paths do (`Recursive.with_cache`), and replacing the earlier call to `peek`. This diff also makes ExpandMembersConverter a functor (this helps with formatting) and also renames `loop` to `type__` for compatibility with other parts of the normalizer. (thanks to vrama628 for bringing up this example) Reviewed By: vrama628 Differential Revision: D25323934 fbshipit-source-id: 618dfea6ab9faa4d289941fc63b62666d5493866 --- src/typing/ty_normalizer.ml | 368 +++++++++++++++++++----------------- 1 file changed, 196 insertions(+), 172 deletions(-) diff --git a/src/typing/ty_normalizer.ml b/src/typing/ty_normalizer.ml index b8390f13956..93cea145463 100644 --- a/src/typing/ty_normalizer.ml +++ b/src/typing/ty_normalizer.ml @@ -2218,17 +2218,20 @@ end = struct |> extract_schemes typed_ast |> normalize_imports ~options ~genv) + module type EXPAND_MEMBERS_CONVERTER = sig + val include_proto_members : bool + + val idx_hook : unit -> unit + end + (* Expand the toplevel structure of the input type into an object type. This is * useful for services like autocomplete for properties. *) - module ExpandMembersConverter : sig - val convert_t : - include_proto_members:bool -> - idx_hook:(unit -> unit) -> - env:Env.t -> - Type.t -> - (Ty.t, error) t + module ExpandMembersConverter (Conf : EXPAND_MEMBERS_CONVERTER) : sig + val convert_t : env:Env.t -> Type.t -> (Ty.t, error) t end = struct + open Conf + (* Sets how to expand members upon encountering an InstanceT: * - if set to IMStatic then expand the static members * - if set to IMUnset or IMInstance then expand the instance members. @@ -2246,176 +2249,197 @@ end = struct | IMStatic | IMInstance - let convert_t ~include_proto_members ~idx_hook = - let rec set_proto_prop = - let open Ty in - function - | NamedProp { name; prop; from_proto = _ } -> NamedProp { name; prop; from_proto = true } - | CallProp _ as p -> p - | SpreadProp t -> SpreadProp (set_proto_t t) - and set_proto_t = - let open Ty in - function - | Obj o -> Obj { o with obj_props = Base.List.map ~f:set_proto_prop o.obj_props } - | t -> t + let rec set_proto_prop = + let open Ty in + function + | NamedProp { name; prop; from_proto = _ } -> NamedProp { name; prop; from_proto = true } + | CallProp _ as p -> p + | SpreadProp t -> SpreadProp (set_proto_t t) + + and set_proto_t = + let open Ty in + function + | Obj o -> Obj { o with obj_props = Base.List.map ~f:set_proto_prop o.obj_props } + | t -> t + + let rec arr_t ~env r a = + let builtin = + match a with + | T.ArrayAT _ -> "Array" + | T.ROArrayAT _ + | T.TupleAT _ -> + "$ReadOnlyArray" in - let rec loop ~env ~proto ~imode t = - match Lookahead.peek ~env t with - | Lookahead.Recursive - | Lookahead.LowerBounds [] -> - terr ~kind:UnsupportedTypeCtor ~msg:"no-lower bounds" None - | Lookahead.LowerBounds (l :: ls) -> - let%bind t = type_ctor ~env ~proto ~imode l in - let%map ts = mapM (type_ctor ~env ~proto ~imode) ls in - Ty.mk_union (t, ts) - and arr_t ~env r a = - let builtin = - match a with - | T.ArrayAT _ -> "Array" - | T.ROArrayAT _ - | T.TupleAT _ -> - "$ReadOnlyArray" - in - loop ~env ~proto:true ~imode:IMInstance (Flow_js.get_builtin (Env.get_cx env) builtin r) - and member_expand_object ~env super inst = - let { T.own_props; proto_props; _ } = inst in - let%bind own_ty_props = TypeConverter.convert_obj_props_t ~env own_props None in - let%bind proto_ty_props = - TypeConverter.convert_obj_props_t ~env ~proto:true proto_props None - in - let%map obj_props = - if include_proto_members then - let%map super_ty = loop ~env ~proto:true ~imode:IMInstance super in - (Ty.SpreadProp super_ty :: own_ty_props) @ proto_ty_props - else - return (own_ty_props @ proto_ty_props) - in - Ty.Obj { Ty.obj_kind = Ty.InexactObj; obj_frozen = false; obj_literal = None; obj_props } - and type_app_t ~env ~proto ~imode reason use_op c ts = - let cx = Env.get_cx env in - Context.with_normalizer_mode cx (fun cx -> - let trace = Trace.dummy_trace in - let reason_op = reason in - let reason_tapp = reason in - match Flow_js.mk_typeapp_instance cx ~trace ~use_op ~reason_op ~reason_tapp c ts with - | exception Flow_js.Attempted_operation_on_bound _ -> - terr ~kind:UnsupportedTypeCtor ~msg:"type_app" None - | t -> loop ~env ~proto ~imode t) - and enum_t ~env reason trust enum = - let { T.members; representation_t; _ } = enum in - let enum_ty = T.mk_enum_type ~loc:(def_aloc_of_reason reason) ~trust enum in - let proto_t = - let enum_t = T.mk_enum_type ~loc:(def_aloc_of_reason reason) ~trust enum in - Flow_js.get_builtin_typeapp - Env.(env.genv.cx) - reason - "$EnumProto" - [enum_t; representation_t] - in - let%bind proto_ty = loop ~env ~proto:true ~imode:IMUnset proto_t in - let%map enum_ty = TypeConverter.convert_t ~env enum_ty in - let members_ty = - List.map - (fun name -> - let prop = Ty.Field { t = enum_ty; polarity = Ty.Positive; optional = false } in - Ty.NamedProp { name; prop; from_proto = false }) - (SMap.keys members) - in - Ty.mk_object (Ty.SpreadProp proto_ty :: members_ty) - and obj_t ~env ~proto ~imode reason o = - let%bind obj = TypeConverter.convert_obj_t ~env reason o in - let obj = - if include_proto_members && proto then - { obj with Ty.obj_props = Base.List.map ~f:set_proto_prop obj.Ty.obj_props } - else - obj - in - let%map extra_props = - if include_proto_members then - let%map proto = loop ~env ~proto:true ~imode o.T.proto_t in - [Ty.SpreadProp proto] - else - return [] - in - { obj with Ty.obj_props = obj.Ty.obj_props @ extra_props } - and primitive ~env reason builtin = - let t = Flow_js.get_builtin_type (Env.get_cx env) reason builtin in - loop ~env ~proto:true ~imode:IMUnset t - and instance_t ~env ~imode r static super inst = - let { T.inst_kind; _ } = inst in - let desc = desc_of_reason ~unwrap:false r in - match (inst_kind, desc, imode) with - | (_, Reason.RReactComponent, _) -> TypeConverter.convert_instance_t ~env r super inst - | (T.ClassKind, _, IMStatic) -> loop ~env ~proto:false ~imode static - | (T.ClassKind, _, (IMUnset | IMInstance)) - | (T.InterfaceKind _, _, _) -> - member_expand_object ~env super inst - and latent_pred_t ~env ~proto ~imode id t = - let cx = Env.get_cx env in - let evaluated = Context.evaluated cx in - let t' = - match T.Eval.Map.find_opt id evaluated with - | Some evaled_t -> evaled_t - | None -> t - in - loop ~env ~proto ~imode t' - and this_class_t ~env ~proto ~imode t = - match imode with - | IMUnset -> loop ~env ~proto ~imode:IMStatic t - | IMInstance - | IMStatic -> - loop ~env ~proto ~imode t - and type_ctor ~env ~proto ~(imode : instance_mode) = - let open Type in - function - | DefT (_, _, IdxWrapper t) -> - idx_hook (); - loop ~env ~proto ~imode t - | ThisTypeAppT (_, c, _, _) -> loop ~env ~proto ~imode c - | DefT (r, _, (NumT _ | SingletonNumT _)) -> primitive ~env r "Number" - | DefT (r, _, (StrT _ | SingletonStrT _)) -> primitive ~env r "String" - | DefT (r, _, (BoolT _ | SingletonBoolT _)) -> primitive ~env r "Boolean" - | DefT (r, _, SymbolT) -> primitive ~env r "Symbol" - | ObjProtoT r -> primitive ~env r "Object" - | FunProtoT r -> primitive ~env r "Function" - | DefT (r, _, ObjT o) -> - let%map o = obj_t ~env ~proto ~imode r o in - Ty.Obj o - | DefT (_, _, ClassT t) -> loop ~env ~proto ~imode t - | DefT (r, _, ArrT a) -> arr_t ~env r a - | DefT (r, tr, EnumObjectT e) -> enum_t ~env r tr e - | DefT (r, _, InstanceT (static, super, _, inst)) -> - instance_t ~env ~imode r static super inst - | ThisClassT (_, t, _) -> this_class_t ~env ~proto ~imode t - | DefT (_, _, PolyT { t_out; _ }) -> loop ~env ~proto ~imode t_out - | MaybeT (_, t) -> - let%map t = loop ~env ~proto ~imode t in - Ty.mk_union (Ty.Void, [Ty.Null; t]) - | IntersectionT (_, rep) -> app_intersection ~f:(loop ~env ~proto ~imode) rep - | UnionT (_, rep) -> app_union ~f:(loop ~env ~proto ~imode) rep - | DefT (_, _, FunT (static, _, _)) -> loop ~env ~proto ~imode static - | TypeAppT (r, use_op, t, ts) -> type_app_t ~env ~proto ~imode r use_op t ts - | DefT (_, _, TypeT (_, t)) -> loop ~env ~proto ~imode t - | OptionalT { type_ = t; _ } -> - let%map t = loop ~env ~proto ~imode t in - Ty.mk_union (Ty.Void, [t]) - | EvalT (t, TypeDestructorT (use_op, r, d), id) -> - let cont = loop ~proto ~imode in - let non_eval = TypeConverter.convert_type_destructor_unevaluated in - let default = TypeConverter.convert_t ~skip_reason:false in - type_destructor_t ~env ~cont ~default ~non_eval (use_op, r, id, t, d) - | EvalT (t, LatentPredT _, id) -> latent_pred_t ~env ~proto ~imode id t - | ExactT (_, t) -> loop ~env ~proto ~imode t - | GenericT { bound; _ } -> loop ~env ~proto ~imode bound - | t -> TypeConverter.convert_t ~env t + type__ ~env ~proto:true ~imode:IMInstance (Flow_js.get_builtin (Env.get_cx env) builtin r) + + and member_expand_object ~env super inst = + let { T.own_props; proto_props; _ } = inst in + let%bind own_ty_props = TypeConverter.convert_obj_props_t ~env own_props None in + let%bind proto_ty_props = + TypeConverter.convert_obj_props_t ~env ~proto:true proto_props None + in + let%map obj_props = + if include_proto_members then + let%map super_ty = type__ ~env ~proto:true ~imode:IMInstance super in + (Ty.SpreadProp super_ty :: own_ty_props) @ proto_ty_props + else + return (own_ty_props @ proto_ty_props) in - loop ~proto:false ~imode:IMUnset + Ty.Obj { Ty.obj_kind = Ty.InexactObj; obj_frozen = false; obj_literal = None; obj_props } + + and type_app_t ~env ~proto ~imode reason use_op c ts = + let cx = Env.get_cx env in + Context.with_normalizer_mode cx (fun cx -> + let trace = Trace.dummy_trace in + let reason_op = reason in + let reason_tapp = reason in + match Flow_js.mk_typeapp_instance cx ~trace ~use_op ~reason_op ~reason_tapp c ts with + | exception Flow_js.Attempted_operation_on_bound _ -> + terr ~kind:UnsupportedTypeCtor ~msg:"type_app" None + | t -> type__ ~env ~proto ~imode t) + + and enum_t ~env reason trust enum = + let { T.members; representation_t; _ } = enum in + let enum_ty = T.mk_enum_type ~loc:(def_aloc_of_reason reason) ~trust enum in + let proto_t = + let enum_t = T.mk_enum_type ~loc:(def_aloc_of_reason reason) ~trust enum in + Flow_js.get_builtin_typeapp Env.(env.genv.cx) reason "$EnumProto" [enum_t; representation_t] + in + let%bind proto_ty = type__ ~env ~proto:true ~imode:IMUnset proto_t in + let%map enum_ty = TypeConverter.convert_t ~env enum_ty in + let members_ty = + List.map + (fun name -> + let prop = Ty.Field { t = enum_ty; polarity = Ty.Positive; optional = false } in + Ty.NamedProp { name; prop; from_proto = false }) + (SMap.keys members) + in + Ty.mk_object (Ty.SpreadProp proto_ty :: members_ty) + + and obj_t ~env ~proto ~imode reason o = + let%bind obj = TypeConverter.convert_obj_t ~env reason o in + let obj = + if include_proto_members && proto then + { obj with Ty.obj_props = Base.List.map ~f:set_proto_prop obj.Ty.obj_props } + else + obj + in + let%map extra_props = + if include_proto_members then + let%map proto = type__ ~env ~proto:true ~imode o.T.proto_t in + [Ty.SpreadProp proto] + else + return [] + in + { obj with Ty.obj_props = obj.Ty.obj_props @ extra_props } + + and primitive ~env reason builtin = + let t = Flow_js.get_builtin_type (Env.get_cx env) reason builtin in + type__ ~env ~proto:true ~imode:IMUnset t + + and instance_t ~env ~imode r static super inst = + let { T.inst_kind; _ } = inst in + let desc = desc_of_reason ~unwrap:false r in + match (inst_kind, desc, imode) with + | (_, Reason.RReactComponent, _) -> TypeConverter.convert_instance_t ~env r super inst + | (T.ClassKind, _, IMStatic) -> type__ ~env ~proto:false ~imode static + | (T.ClassKind, _, (IMUnset | IMInstance)) + | (T.InterfaceKind _, _, _) -> + member_expand_object ~env super inst + + and latent_pred_t ~env ~proto ~imode id t = + let cx = Env.get_cx env in + let evaluated = Context.evaluated cx in + let t' = + match T.Eval.Map.find_opt id evaluated with + | Some evaled_t -> evaled_t + | None -> t + in + type__ ~env ~proto ~imode t' + + and this_class_t ~env ~proto ~imode t = + match imode with + | IMUnset -> type__ ~env ~proto ~imode:IMStatic t + | IMInstance + | IMStatic -> + type__ ~env ~proto ~imode t + + and type_variable ~env ~proto ~imode id = + let (root_id, constraints) = Context.find_constraints Env.(env.genv.cx) id in + Recursive.with_cache (TVarKey root_id) ~f:(fun () -> + match constraints with + | Constraint.Resolved (_, t) + | Constraint.FullyResolved (_, t) -> + type__ ~env ~proto ~imode t + | Constraint.Unresolved bounds -> + let%map lowers = + mapM + (fun t -> type__ ~env ~proto ~imode t >>| Ty.bk_union >>| Nel.to_list) + (T.TypeMap.keys bounds.Constraint.lower) + in + let lowers = Base.List.(dedup_and_sort ~compare:Stdlib.compare (concat lowers)) in + (match lowers with + | [] -> Ty.Bot Ty.EmptyType + | hd :: tl -> Ty.mk_union ~flattened:true (hd, tl))) + + and type__ ~env ~proto ~(imode : instance_mode) t = + let open Type in + match t with + | OpenT (_, id) -> type_variable ~env ~proto ~imode id + | AnnotT (_, t, _) + | ReposT (_, t) -> + type__ ~env ~proto ~imode t + | DefT (_, _, IdxWrapper t) -> + idx_hook (); + type__ ~env ~proto ~imode t + | ThisTypeAppT (_, c, _, _) -> type__ ~env ~proto ~imode c + | DefT (r, _, (NumT _ | SingletonNumT _)) -> primitive ~env r "Number" + | DefT (r, _, (StrT _ | SingletonStrT _)) -> primitive ~env r "String" + | DefT (r, _, (BoolT _ | SingletonBoolT _)) -> primitive ~env r "Boolean" + | DefT (r, _, SymbolT) -> primitive ~env r "Symbol" + | ObjProtoT r -> primitive ~env r "Object" + | FunProtoT r -> primitive ~env r "Function" + | DefT (r, _, ObjT o) -> + let%map o = obj_t ~env ~proto ~imode r o in + Ty.Obj o + | DefT (_, _, ClassT t) -> type__ ~env ~proto ~imode t + | DefT (r, _, ArrT a) -> arr_t ~env r a + | DefT (r, tr, EnumObjectT e) -> enum_t ~env r tr e + | DefT (r, _, InstanceT (static, super, _, inst)) -> + instance_t ~env ~imode r static super inst + | ThisClassT (_, t, _) -> this_class_t ~env ~proto ~imode t + | DefT (_, _, PolyT { t_out; _ }) -> type__ ~env ~proto ~imode t_out + | MaybeT (_, t) -> + let%map t = type__ ~env ~proto ~imode t in + Ty.mk_union (Ty.Void, [Ty.Null; t]) + | IntersectionT (_, rep) -> app_intersection ~f:(type__ ~env ~proto ~imode) rep + | UnionT (_, rep) -> app_union ~f:(type__ ~env ~proto ~imode) rep + | DefT (_, _, FunT (static, _, _)) -> type__ ~env ~proto ~imode static + | TypeAppT (r, use_op, t, ts) -> type_app_t ~env ~proto ~imode r use_op t ts + | DefT (_, _, TypeT (_, t)) -> type__ ~env ~proto ~imode t + | OptionalT { type_ = t; _ } -> + let%map t = type__ ~env ~proto ~imode t in + Ty.mk_union (Ty.Void, [t]) + | EvalT (t, TypeDestructorT (use_op, r, d), id) -> + let cont = type__ ~proto ~imode in + let non_eval = TypeConverter.convert_type_destructor_unevaluated in + let default = TypeConverter.convert_t ~skip_reason:false in + type_destructor_t ~env ~cont ~default ~non_eval (use_op, r, id, t, d) + | EvalT (t, LatentPredT _, id) -> latent_pred_t ~env ~proto ~imode id t + | ExactT (_, t) -> type__ ~env ~proto ~imode t + | GenericT { bound; _ } -> type__ ~env ~proto ~imode bound + | t -> TypeConverter.convert_t ~env t + + let convert_t ~env t = type__ ~env ~proto:false ~imode:IMUnset t end let run_expand_members ~include_proto_members ~idx_hook = - run_type_aux - ~f:(ExpandMembersConverter.convert_t ~include_proto_members ~idx_hook) - ~simpl:Ty_utils.simplify_type + let module Converter = ExpandMembersConverter (struct + let include_proto_members = include_proto_members + + let idx_hook = idx_hook + end) in + run_type_aux ~f:Converter.convert_t ~simpl:Ty_utils.simplify_type end open NormalizerMonad From 5d2ff431792ebdfd6148b992cac91d750f2d7637 Mon Sep 17 00:00:00 2001 From: Sam Goldman Date: Fri, 4 Dec 2020 13:15:53 -0800 Subject: [PATCH 19/43] Simplify idle logging loop Summary: Before this diff, we would sample and sleep for 1s in a loop, subtracting 1s from the idle timeout until we hit zero, then log. Instead of "counting" loops like this, we can just set up an infinite sampling loop and combine with a timeout using Lwt.pick, which will cancel the sampling loop once the timeout hits. While unimportant, the event timing is more regular with this change. In practice, collecting stats and waiting 1s takes longer than 1s, but we only subtracted 1 from the counter. This imprecision adds up over 300s, causing the actual duration to be 301.7 seconds in practice. I also added a call to flush the logger, since otherwise we would buffer the idle heartbeat events until the next recheck. Reviewed By: nmote Differential Revision: D25253975 fbshipit-source-id: 0a2ef2c94d920bdadde197fc525d154c1317c43d --- src/server/server.ml | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/src/server/server.ml b/src/server/server.ml index ffbc14f1111..a69fc88ec7b 100644 --- a/src/server/server.ml +++ b/src/server/server.ml @@ -66,7 +66,7 @@ let init ~profiling ?focus_targets genv = let rec log_on_idle = (* The time in seconds to gather data before logging. Shouldn't be too small or we'll flood the * logs. *) - let idle_period_in_seconds = 300 in + let idle_period_in_seconds = 300. in (* Grab memory stats. Since we're idle, we don't really care much about sharedmemory stats. But * our cgroup stats may change depending on the memory pressure *) let sample profiling = @@ -83,23 +83,21 @@ let rec log_on_idle = end; Lwt.return_unit in - (* Sample every second for `seconds_remaining` seconds *) - let rec sample_and_sleep profiling seconds_remaining = - if seconds_remaining > 0 then - let%lwt () = sample profiling in - let%lwt () = Lwt_unix.sleep 1.0 in - sample_and_sleep profiling (seconds_remaining - 1) - else - Lwt.return_unit + (* Sample every second *) + let rec sample_loop profiling = + let%lwt () = Lwt.join [sample profiling; Lwt_unix.sleep 1.0] in + sample_loop profiling in fun ~options start_time -> let should_print_summary = Options.should_profile options in let%lwt (profiling, ()) = Profiling_js.with_profiling_lwt ~label:"Idle" ~should_print_summary (fun profiling -> - let%lwt () = sample_and_sleep profiling idle_period_in_seconds in - sample profiling) + let sampler_thread = sample_loop profiling in + let timeout = Lwt_unix.sleep idle_period_in_seconds in + Lwt.pick [sampler_thread; timeout]) in FlowEventLogger.idle_heartbeat ~idle_time:(Unix.gettimeofday () -. start_time) ~profiling; + Lwt.async EventLoggerLwt.flush; log_on_idle ~options start_time let rec serve ~genv ~env = From 21b43f8b82ac88c48904b08946112cc80717c9d1 Mon Sep 17 00:00:00 2001 From: Marshall Roch Date: Sat, 5 Dec 2020 07:11:24 -0800 Subject: [PATCH 20/43] pull typing out of Code_action_service, use cache Summary: `Code_action_service.code_actions_at_loc` called `Types_js.type_contents` internally. this is bad encapsulation and prevents reusing an existing typed ast. notably, we cache `type_contents` between LSP requests in commandHandler, but can't use the cache when we call it directly from `Code_action_service`. hitting the cache can make a huge perf difference on big files where `type_contents` dominates. Reviewed By: nmote Differential Revision: D25324233 fbshipit-source-id: 98f11acffd5d854b2e3033fa5a972c344e6e655a --- src/server/command_handler/commandHandler.ml | 41 +++++++++--- .../code_action/code_action_service.ml | 67 ++++++++++--------- .../code_action/code_action_service.mli | 14 ++-- 3 files changed, 77 insertions(+), 45 deletions(-) diff --git a/src/server/command_handler/commandHandler.ml b/src/server/command_handler/commandHandler.ml index 7146a2b5f1c..bc10abe11c4 100644 --- a/src/server/command_handler/commandHandler.ml +++ b/src/server/command_handler/commandHandler.ml @@ -911,22 +911,43 @@ let handle_save_state ~saved_state_filename ~genv ~profiling ~env = Lwt.return (env, ServerProt.Response.SAVE_STATE result, None) let find_code_actions ~reader ~options ~env ~profiling ~params ~client = - let CodeActionRequest.{ textDocument; range; _ } = params in + let CodeActionRequest.{ textDocument; range; context = { only = _; diagnostics } } = params in let (file_key, file, loc) = Flow_lsp_conversions.lsp_textDocument_and_range_to_flow textDocument range client in match File_input.content_of_file_input file with | Error msg -> Lwt.return (Error msg) | Ok file_contents -> - Code_action_service.code_actions_at_loc - ~reader - ~options - ~env - ~profiling - ~params - ~file_key - ~file_contents - ~loc + if not (Code_action_service.client_supports_quickfixes params) then + Lwt.return (Ok []) + else + let type_contents_cache = Some (Persistent_connection.type_contents_cache client) in + let uri = TextDocumentIdentifier.(textDocument.uri) in + let%lwt (type_contents_result, _did_hit_cache) = + type_contents_with_cache + ~options + ~env + ~profiling + ~type_contents_cache + file_contents + file_key + in + (match type_contents_result with + | Error _ -> Lwt.return (Ok []) + | Ok (cx, _info, file_sig, tolerable_errors, ast, typed_ast, parse_errors) -> + Code_action_service.code_actions_at_loc + ~reader + ~options + ~file_key + ~cx + ~file_sig + ~tolerable_errors + ~ast + ~typed_ast + ~parse_errors + ~diagnostics + ~uri + ~loc) type command_handler = (* A command can be handled immediately if it is super duper fast and doesn't require the env. diff --git a/src/services/code_action/code_action_service.ml b/src/services/code_action/code_action_service.ml index eb9c92b6d00..308df1290db 100644 --- a/src/services/code_action/code_action_service.ml +++ b/src/services/code_action/code_action_service.ml @@ -98,39 +98,44 @@ let code_actions_of_parse_errors ~diagnostics ~uri ~loc parse_errors = ~init:[] parse_errors -let client_supports_quickfixes only = +(** currently all of our code actions are quickfixes, so we can short circuit if the client + doesn't support those. *) +let client_supports_quickfixes params = + let Lsp.CodeActionRequest.{ context = { only; _ }; _ } = params in Lsp.CodeActionKind.contains_kind_opt ~default:true Lsp.CodeActionKind.quickfix only -let code_actions_at_loc ~reader ~options ~env ~profiling ~params ~file_key ~file_contents ~loc = - let open Lsp in - let CodeActionRequest.{ textDocument; range = _; context = { only; diagnostics } } = params in - if not (client_supports_quickfixes only) then - (* currently all of our code actions are quickfixes, so we can short circuit *) - Lwt.return (Ok []) - else - let uri = TextDocumentIdentifier.(textDocument.uri) in - match%lwt Types_js.type_contents ~options ~env ~profiling file_contents file_key with - | Ok (full_cx, _info, file_sig, tolerable_errors, ast, typed_ast, parse_errors) -> - let experimental_code_actions = - if Inference_utils.well_formed_exports_enabled options file_key then - autofix_exports_code_actions - ~full_cx - ~ast - ~file_sig - ~tolerable_errors - ~typed_ast - ~diagnostics - uri - loc - else - [] - in - let error_fixes = - code_actions_of_errors ~reader ~diagnostics ~errors:(Context.errors full_cx) uri loc - in - let parse_error_fixes = code_actions_of_parse_errors ~diagnostics ~uri ~loc parse_errors in - Lwt.return (Ok (parse_error_fixes @ experimental_code_actions @ error_fixes)) - | Error _ -> Lwt.return (Ok []) +let code_actions_at_loc + ~reader + ~options + ~file_key + ~cx + ~file_sig + ~tolerable_errors + ~ast + ~typed_ast + ~parse_errors + ~diagnostics + ~uri + ~loc = + let experimental_code_actions = + if Inference_utils.well_formed_exports_enabled options file_key then + autofix_exports_code_actions + ~full_cx:cx + ~ast + ~file_sig + ~tolerable_errors + ~typed_ast + ~diagnostics + uri + loc + else + [] + in + let error_fixes = + code_actions_of_errors ~reader ~diagnostics ~errors:(Context.errors cx) uri loc + in + let parse_error_fixes = code_actions_of_parse_errors ~diagnostics ~uri ~loc parse_errors in + Lwt.return (Ok (parse_error_fixes @ experimental_code_actions @ error_fixes)) let autofix_exports ~options ~env ~profiling ~file_key ~file_content = let open Autofix_exports in diff --git a/src/services/code_action/code_action_service.mli b/src/services/code_action/code_action_service.mli index 61ebb16a134..890bdc5a8d9 100644 --- a/src/services/code_action/code_action_service.mli +++ b/src/services/code_action/code_action_service.mli @@ -5,14 +5,20 @@ * LICENSE file in the root directory of this source tree. *) +val client_supports_quickfixes : Lsp.CodeActionRequest.params -> bool + val code_actions_at_loc : reader:Parsing_heaps.Reader.reader -> options:Options.t -> - env:ServerEnv.env -> - profiling:Profiling_js.running -> - params:Lsp.CodeActionRequest.params -> file_key:File_key.t -> - file_contents:string -> + cx:Context.t -> + file_sig:File_sig.With_Loc.t -> + tolerable_errors:File_sig.With_Loc.tolerable_error list -> + ast:(Loc.t, Loc.t) Flow_ast.Program.t -> + typed_ast:(ALoc.t, ALoc.t * Type.t) Flow_ast.Program.t -> + parse_errors:(Loc.t * Parse_error.t) Base.List.t -> + diagnostics:Lsp.PublishDiagnostics.diagnostic list -> + uri:Lsp.DocumentUri.t -> loc:Loc.t -> (Lsp.CodeAction.command_or_action list, string) result Lwt.t From b0f2062d1ce9c7839e5ee290d1c52cb90393eee3 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 5 Dec 2020 12:02:32 -0800 Subject: [PATCH 21/43] Add ShadowRoot properties Summary: Implement missing properties in `ShadowRoot` and mark relevant ones as readOnly. Closes https://github.com/facebook/flow/issues/8457 Link to the documentation: https://developer.mozilla.org/en-US/docs/Web/API/ShadowRoot Pull Request resolved: https://github.com/facebook/flow/pull/8473 Reviewed By: gkz Differential Revision: D24952015 Pulled By: mroch fbshipit-source-id: 38c1af86e3e6c925b83ecba297bfcc0cc443e6c0 --- lib/dom.js | 4 +- tests/bom/bom.exp | 4 +- tests/dom/ShadowRoot.js | 29 ++ tests/dom/dom.exp | 652 +++++++++++++++++++++------------------- 4 files changed, 379 insertions(+), 310 deletions(-) create mode 100644 tests/dom/ShadowRoot.js diff --git a/lib/dom.js b/lib/dom.js index da96db363c5..b8c3e7c5085 100644 --- a/lib/dom.js +++ b/lib/dom.js @@ -119,8 +119,10 @@ declare interface CustomElementRegistry { } declare interface ShadowRoot extends DocumentFragment { - host: Element; + +delegatesFocus: boolean; + +host: Element; innerHTML: string; + +mode: ShadowRootMode; } declare type ShadowRootMode = 'open'|'closed'; diff --git a/tests/bom/bom.exp b/tests/bom/bom.exp index 909b32b6c09..4445eb78497 100644 --- a/tests/bom/bom.exp +++ b/tests/bom/bom.exp @@ -23,8 +23,8 @@ with `HTMLFormElement` [2]. [incompatible-call] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /dom.js:783:70 - 783| createElement(tagName: 'input', options?: ElementCreationOptions): HTMLInputElement; + /dom.js:785:70 + 785| createElement(tagName: 'input', options?: ElementCreationOptions): HTMLInputElement; ^^^^^^^^^^^^^^^^ [1] /bom.js:528:24 528| constructor(form?: HTMLFormElement): void; diff --git a/tests/dom/ShadowRoot.js b/tests/dom/ShadowRoot.js new file mode 100644 index 00000000000..9c24fb7e6d8 --- /dev/null +++ b/tests/dom/ShadowRoot.js @@ -0,0 +1,29 @@ +// @flow + +let tests = [ + // delegatesFocus readOnly + function(root: ShadowRoot) { + // fails + root.delegatesFocus = true; + }, + + // host readOnly + function(root: ShadowRoot, element: Element) { + // fails + root.host = element; + }, + + // innerHTML + function(root: ShadowRoot) { + root.innerHTML = 'test'; + + // fails + root.innerHTML = true; + }, + + // mode readOnly + function(root: ShadowRoot) { + // fails + root.mode = 'open'; + }, +]; diff --git a/tests/dom/dom.exp b/tests/dom/dom.exp index bee817e6d53..3dafcfaae78 100644 --- a/tests/dom/dom.exp +++ b/tests/dom/dom.exp @@ -7,8 +7,8 @@ Cannot call `ctx.moveTo` with `'0'` bound to `x` because string [1] is incompati ^^^ [1] References: - /dom.js:2061:13 - 2061| moveTo(x: number, y: number): void; + /dom.js:2063:13 + 2063| moveTo(x: number, y: number): void; ^^^^^^ [2] @@ -21,8 +21,8 @@ Cannot call `ctx.moveTo` with `'1'` bound to `y` because string [1] is incompati ^^^ [1] References: - /dom.js:2061:24 - 2061| moveTo(x: number, y: number): void; + /dom.js:2063:24 + 2063| moveTo(x: number, y: number): void; ^^^^^^ [2] @@ -36,8 +36,8 @@ Cannot call `ClipboardEvent` with `'invalid'` bound to `type` because string [1] ^^^^^^^^^ [1] References: - /dom.js:576:21 - 576| constructor(type: ClipboardEventTypes, eventInit?: ClipboardEvent$Init): void; + /dom.js:578:21 + 578| constructor(type: ClipboardEventTypes, eventInit?: ClipboardEvent$Init): void; ^^^^^^^^^^^^^^^^^^^ [2] @@ -51,8 +51,8 @@ object literal [1] but exists in object type [2]. [prop-missing] ^^ [1] References: - /dom.js:573:41 - 573| type ClipboardEvent$Init = Event$Init & { clipboardData: DataTransfer | null, ... }; + /dom.js:575:41 + 575| type ClipboardEvent$Init = Event$Init & { clipboardData: DataTransfer | null, ... }; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] @@ -68,8 +68,8 @@ Cannot call `ClipboardEvent` with object literal bound to `eventInit` because ob ---------------------------------------^ [1] References: - /dom.js:573:58 - 573| type ClipboardEvent$Init = Event$Init & { clipboardData: DataTransfer | null, ... }; + /dom.js:575:58 + 575| type ClipboardEvent$Init = Event$Init & { clipboardData: DataTransfer | null, ... }; ^^^^^^^^^^^^ [2] @@ -82,8 +82,8 @@ Cannot call `e.clipboardData.getData` because property `getData` is missing in n ^^^^^^^ References: - /dom.js:577:19 - 577| +clipboardData: ?DataTransfer; // readonly + /dom.js:579:19 + 579| +clipboardData: ?DataTransfer; // readonly ^^^^^^^^^^^^^ [1] @@ -100,8 +100,8 @@ References: Element.js:14:40 14| element.scrollIntoView({ behavior: 'invalid' }); ^^^^^^^^^ [1] - /dom.js:1517:22 - 1517| behavior?: ('auto' | 'instant' | 'smooth'), + /dom.js:1519:22 + 1519| behavior?: ('auto' | 'instant' | 'smooth'), ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] @@ -118,8 +118,8 @@ References: Element.js:15:37 15| element.scrollIntoView({ block: 'invalid' }); ^^^^^^^^^ [1] - /dom.js:1518:19 - 1518| block?: ('start' | 'center' | 'end' | 'nearest'), + /dom.js:1520:19 + 1520| block?: ('start' | 'center' | 'end' | 'nearest'), ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] @@ -133,8 +133,8 @@ Cannot call `element.scrollIntoView` with `1` bound to `arg` because number [1] ^ [1] References: - /dom.js:1516:25 - 1516| scrollIntoView(arg?: (boolean | { + /dom.js:1518:25 + 1518| scrollIntoView(arg?: (boolean | { ^^^^^^^ [2] @@ -147,8 +147,8 @@ Cannot get `el.className` because property `className` is missing in null [1]. [ ^^^^^^^^^ References: - /dom.js:697:56 - 697| item(nameOrIndex?: any, optionalIndex?: any): Elem | null; + /dom.js:699:56 + 699| item(nameOrIndex?: any, optionalIndex?: any): Elem | null; ^^^^ [1] @@ -161,8 +161,8 @@ Cannot get `el.className` because property `className` is missing in null [1]. [ ^^^^^^^^^ References: - /dom.js:698:35 - 698| namedItem(name: string): Elem | null; + /dom.js:700:35 + 700| namedItem(name: string): Elem | null; ^^^^ [1] @@ -175,8 +175,8 @@ Cannot call `element.hasAttributes` because no arguments are expected by functio ^^^^^ References: - /dom.js:1505:3 - 1505| hasAttributes(): boolean; + /dom.js:1507:3 + 1507| hasAttributes(): boolean; ^^^^^^^^^^^^^^^^^^^^^^^^ [1] @@ -193,8 +193,8 @@ References: HTMLElement.js:22:39 22| element.scrollIntoView({behavior: 'invalid'}); ^^^^^^^^^ [1] - /dom.js:1517:22 - 1517| behavior?: ('auto' | 'instant' | 'smooth'), + /dom.js:1519:22 + 1519| behavior?: ('auto' | 'instant' | 'smooth'), ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] @@ -211,8 +211,8 @@ References: HTMLElement.js:23:36 23| element.scrollIntoView({block: 'invalid'}); ^^^^^^^^^ [1] - /dom.js:1518:19 - 1518| block?: ('start' | 'center' | 'end' | 'nearest'), + /dom.js:1520:19 + 1520| block?: ('start' | 'center' | 'end' | 'nearest'), ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] @@ -226,8 +226,8 @@ Cannot call `element.scrollIntoView` with `1` bound to `arg` because number [1] ^ [1] References: - /dom.js:1516:25 - 1516| scrollIntoView(arg?: (boolean | { + /dom.js:1518:25 + 1518| scrollIntoView(arg?: (boolean | { ^^^^^^^ [2] @@ -244,11 +244,11 @@ References: HTMLElement.js:46:56 46| (element.getElementsByTagName(str): HTMLCollection); ^^^^^^^^^^^^^^^^^ [1] - /dom.js:1448:54 - 1448| getElementsByTagName(name: string): HTMLCollection; + /dom.js:1450:54 + 1450| getElementsByTagName(name: string): HTMLCollection; ^^^^^^^^^^^ [2] - /dom.js:694:31 - 694| declare class HTMLCollection<+Elem: HTMLElement> { + /dom.js:696:31 + 696| declare class HTMLCollection<+Elem: HTMLElement> { ^^^^ [3] @@ -269,11 +269,11 @@ References: HTMLElement.js:50:23 50| ): HTMLCollection); ^^^^^^^^^^^^^^^^^ [1] - /dom.js:1502:90 - 1502| getElementsByTagNameNS(namespaceURI: string | null, localName: string): HTMLCollection; + /dom.js:1504:90 + 1504| getElementsByTagNameNS(namespaceURI: string | null, localName: string): HTMLCollection; ^^^^^^^^^^^ [2] - /dom.js:694:31 - 694| declare class HTMLCollection<+Elem: HTMLElement> { + /dom.js:696:31 + 696| declare class HTMLCollection<+Elem: HTMLElement> { ^^^^ [3] @@ -287,8 +287,8 @@ Cannot cast `element.querySelector(...)` to union type because `HTMLElement` [1] ^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /dom.js:1607:36 - 1607| querySelector(selector: string): HTMLElement | null; + /dom.js:1609:36 + 1609| querySelector(selector: string): HTMLElement | null; ^^^^^^^^^^^ [1] HTMLElement.js:51:34 51| (element.querySelector(str): HTMLAnchorElement | null); @@ -308,11 +308,11 @@ References: HTMLElement.js:52:46 52| (element.querySelectorAll(str): NodeList); ^^^^^^^^^^^^^^^^^ [1] - /dom.js:1671:48 - 1671| querySelectorAll(selector: string): NodeList; + /dom.js:1673:48 + 1673| querySelectorAll(selector: string): NodeList; ^^^^^^^^^^^ [2] - /dom.js:658:24 - 658| declare class NodeList { + /dom.js:660:24 + 660| declare class NodeList { ^ [3] @@ -329,11 +329,11 @@ References: HTMLElement.js:55:58 55| (element.getElementsByTagName('div'): HTMLCollection); ^^^^^^^^^^^^^^^^^ [1] - /dom.js:1407:53 - 1407| getElementsByTagName(name: 'div'): HTMLCollection; + /dom.js:1409:53 + 1409| getElementsByTagName(name: 'div'): HTMLCollection; ^^^^^^^^^^^^^^ [2] - /dom.js:694:31 - 694| declare class HTMLCollection<+Elem: HTMLElement> { + /dom.js:696:31 + 696| declare class HTMLCollection<+Elem: HTMLElement> { ^^^^ [3] @@ -354,11 +354,11 @@ References: HTMLElement.js:59:23 59| ): HTMLCollection); ^^^^^^^^^^^^^^^^^ [1] - /dom.js:1461:89 - 1461| getElementsByTagNameNS(namespaceURI: string | null, localName: 'div'): HTMLCollection; + /dom.js:1463:89 + 1463| getElementsByTagNameNS(namespaceURI: string | null, localName: 'div'): HTMLCollection; ^^^^^^^^^^^^^^ [2] - /dom.js:694:31 - 694| declare class HTMLCollection<+Elem: HTMLElement> { + /dom.js:696:31 + 696| declare class HTMLCollection<+Elem: HTMLElement> { ^^^^ [3] @@ -372,8 +372,8 @@ Cannot cast `element.querySelector(...)` to union type because `HTMLDivElement` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /dom.js:1560:35 - 1560| querySelector(selector: 'div'): HTMLDivElement | null; + /dom.js:1562:35 + 1562| querySelector(selector: 'div'): HTMLDivElement | null; ^^^^^^^^^^^^^^ [1] HTMLElement.js:60:36 60| (element.querySelector('div'): HTMLAnchorElement | null); @@ -390,14 +390,14 @@ Cannot cast `element.querySelectorAll(...)` to `NodeList` because `HTMLDivElemen ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /dom.js:1624:47 - 1624| querySelectorAll(selector: 'div'): NodeList; + /dom.js:1626:47 + 1626| querySelectorAll(selector: 'div'): NodeList; ^^^^^^^^^^^^^^ [1] HTMLElement.js:61:48 61| (element.querySelectorAll('div'): NodeList); ^^^^^^^^^^^^^^^^^ [2] - /dom.js:658:24 - 658| declare class NodeList { + /dom.js:660:24 + 660| declare class NodeList { ^ [3] @@ -414,11 +414,11 @@ References: HTMLElement.js:61:48 61| (element.querySelectorAll('div'): NodeList); ^^^^^^^^^^^^^^^^^ [1] - /dom.js:1624:47 - 1624| querySelectorAll(selector: 'div'): NodeList; + /dom.js:1626:47 + 1626| querySelectorAll(selector: 'div'): NodeList; ^^^^^^^^^^^^^^ [2] - /dom.js:658:24 - 658| declare class NodeList { + /dom.js:660:24 + 660| declare class NodeList { ^ [3] @@ -432,8 +432,8 @@ in property `preventScroll`. [incompatible-call] ^^^^^^^^^ [1] References: - /dom.js:1340:39 - 1340| type FocusOptions = { preventScroll?: boolean, ... } + /dom.js:1342:39 + 1342| type FocusOptions = { preventScroll?: boolean, ... } ^^^^^^^ [2] @@ -447,8 +447,8 @@ Cannot call `element.focus` with `1` bound to `options` because number [1] is in ^ [1] References: - /dom.js:1683:19 - 1683| focus(options?: FocusOptions): void; + /dom.js:1685:19 + 1685| focus(options?: FocusOptions): void; ^^^^^^^^^^^^ [2] @@ -461,8 +461,8 @@ Cannot get `el.className` because property `className` is missing in null [1]. [ ^^^^^^^^^ References: - /dom.js:2990:43 - 2990| [index: number | string]: HTMLElement | null; + /dom.js:2992:43 + 2992| [index: number | string]: HTMLElement | null; ^^^^ [1] @@ -475,8 +475,8 @@ Cannot get `el.className` because property `className` is missing in null [1]. [ ^^^^^^^^^ References: - /dom.js:2990:43 - 2990| [index: number | string]: HTMLElement | null; + /dom.js:2992:43 + 2992| [index: number | string]: HTMLElement | null; ^^^^ [1] @@ -492,8 +492,8 @@ References: HTMLInputElement.js:7:28 7| el.setRangeText('foo', 123); // end is required ^^^ [1] - /dom.js:3371:45 - 3371| setRangeText(replacement: string, start?: void, end?: void, selectMode?: void): void; + /dom.js:3373:45 + 3373| setRangeText(replacement: string, start?: void, end?: void, selectMode?: void): void; ^^^^ [2] @@ -509,8 +509,8 @@ References: HTMLInputElement.js:10:38 10| el.setRangeText('foo', 123, 234, 'bogus'); // invalid value ^^^^^^^ [1] - /dom.js:3372:78 - 3372| setRangeText(replacement: string, start: number, end: number, selectMode?: SelectionMode): void; + /dom.js:3374:78 + 3374| setRangeText(replacement: string, start: number, end: number, selectMode?: SelectionMode): void; ^^^^^^^^^^^^^ [2] @@ -523,8 +523,8 @@ Cannot get `form.action` because property `action` is missing in null [1]. [inco ^^^^^^ References: - /dom.js:3432:27 - 3432| form: HTMLFormElement | null; + /dom.js:3434:27 + 3434| form: HTMLFormElement | null; ^^^^ [1] @@ -537,8 +537,8 @@ Cannot get `item.value` because property `value` is missing in null [1]. [incomp ^^^^^ References: - /dom.js:3450:44 - 3450| item(index: number): HTMLOptionElement | null; + /dom.js:3452:44 + 3452| item(index: number): HTMLOptionElement | null; ^^^^ [1] @@ -551,8 +551,8 @@ Cannot get `item.value` because property `value` is missing in null [1]. [incomp ^^^^^ References: - /dom.js:3451:48 - 3451| namedItem(name: string): HTMLOptionElement | null; + /dom.js:3453:48 + 3453| namedItem(name: string): HTMLOptionElement | null; ^^^^ [1] @@ -567,11 +567,11 @@ Cannot get `attributes[null]` because: [incompatible-type] ^^^^ [1] References: - /dom.js:675:11 - 675| [index: number | string]: Attr; + /dom.js:677:11 + 677| [index: number | string]: Attr; ^^^^^^ [2] - /dom.js:675:20 - 675| [index: number | string]: Attr; + /dom.js:677:20 + 677| [index: number | string]: Attr; ^^^^^^ [3] @@ -586,14 +586,52 @@ Cannot get `attributes[{...}]` because: [incompatible-type] ^^ [1] References: - /dom.js:675:11 - 675| [index: number | string]: Attr; + /dom.js:677:11 + 677| [index: number | string]: Attr; ^^^^^^ [2] - /dom.js:675:20 - 675| [index: number | string]: Attr; + /dom.js:677:20 + 677| [index: number | string]: Attr; ^^^^^^ [3] +Error ----------------------------------------------------------------------------------------------- ShadowRoot.js:7:10 + +Cannot assign `true` to `root.delegatesFocus` because property `delegatesFocus` is not writable. [cannot-write] + + 7| root.delegatesFocus = true; + ^^^^^^^^^^^^^^ + + +Error ---------------------------------------------------------------------------------------------- ShadowRoot.js:13:10 + +Cannot assign `element` to `root.host` because property `host` is not writable. [cannot-write] + + 13| root.host = element; + ^^^^ + + +Error ---------------------------------------------------------------------------------------------- ShadowRoot.js:21:22 + +Cannot assign `true` to `root.innerHTML` because boolean [1] is incompatible with string [2]. [incompatible-type] + + ShadowRoot.js:21:22 + 21| root.innerHTML = true; + ^^^^ [1] + +References: + /dom.js:124:14 + 124| innerHTML: string; + ^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------------- ShadowRoot.js:27:10 + +Cannot assign `'open'` to `root.mode` because property `mode` is not writable. [cannot-write] + + 27| root.mode = 'open'; + ^^^^ + + Error ------------------------------------------------------------------------------------------------------ URL.js:8:21 Cannot get `c.path` because property `path` is missing in `URL` [1]. [prop-missing] @@ -617,8 +655,8 @@ Cannot call `target.attachEvent` because undefined [1] is not a function. [not-a ^^^^^^^^^^^ References: - /dom.js:255:17 - 255| attachEvent?: (type: string, listener: EventListener) => void; + /dom.js:257:17 + 257| attachEvent?: (type: string, listener: EventListener) => void; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] @@ -631,8 +669,8 @@ Cannot call `target.detachEvent` because undefined [1] is not a function. [not-a ^^^^^^^^^^^ References: - /dom.js:273:17 - 273| detachEvent?: (type: string, listener: EventListener) => void; + /dom.js:275:17 + 275| detachEvent?: (type: string, listener: EventListener) => void; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] @@ -657,8 +695,8 @@ References: path2d.js:16:33 16| (path.arcTo(0, 0, 0, 0, 10, '20', 5): void); // invalid ^^^^ [1] - /dom.js:1926:83 - 1926| arcTo(x1: number, y1: number, x2: number, y2: number, radiusX: number, radiusY: number, rotation: number): void; + /dom.js:1928:83 + 1928| arcTo(x1: number, y1: number, x2: number, y2: number, radiusX: number, radiusY: number, rotation: number): void; ^^^^^^ [2] @@ -672,8 +710,8 @@ null [2] in the second argument of property `prototype.attributeChangedCallback` ^^^^^^ [1] References: - /dom.js:715:36 - 715| oldAttributeValue: null, + /dom.js:717:36 + 717| oldAttributeValue: null, ^^^^ [2] @@ -687,8 +725,8 @@ null [2] in the third argument of property `prototype.attributeChangedCallback`. ^^^^^^ [1] References: - /dom.js:730:36 - 730| newAttributeValue: null, + /dom.js:732:36 + 732| newAttributeValue: null, ^^^^ [2] @@ -745,128 +783,128 @@ References: traversal.js:29:33 29| document.createNodeIterator({}); // invalid ^^ [1] - /dom.js:1158:33 - 1158| createNodeIterator(root: RootNodeT, whatToShow: 2, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1160:33 + 1160| createNodeIterator(root: RootNodeT, whatToShow: 2, filter?: NodeFilterInterface): NodeIterator; ^^^^ [2] - /dom.js:1166:33 - 1166| createNodeIterator(root: RootNodeT, whatToShow: 256, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1168:33 + 1168| createNodeIterator(root: RootNodeT, whatToShow: 256, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [3] - /dom.js:1167:33 - 1167| createNodeIterator(root: RootNodeT, whatToShow: 257, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1169:33 + 1169| createNodeIterator(root: RootNodeT, whatToShow: 257, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [4] - /dom.js:1168:33 - 1168| createNodeIterator(root: RootNodeT, whatToShow: 260, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1170:33 + 1170| createNodeIterator(root: RootNodeT, whatToShow: 260, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [5] - /dom.js:1169:33 - 1169| createNodeIterator(root: RootNodeT, whatToShow: 261, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1171:33 + 1171| createNodeIterator(root: RootNodeT, whatToShow: 261, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [6] - /dom.js:1170:33 - 1170| createNodeIterator(root: RootNodeT, whatToShow: 384, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1172:33 + 1172| createNodeIterator(root: RootNodeT, whatToShow: 384, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [7] - /dom.js:1171:33 - 1171| createNodeIterator(root: RootNodeT, whatToShow: 385, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1173:33 + 1173| createNodeIterator(root: RootNodeT, whatToShow: 385, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [8] - /dom.js:1172:33 - 1172| createNodeIterator(root: RootNodeT, whatToShow: 388, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1174:33 + 1174| createNodeIterator(root: RootNodeT, whatToShow: 388, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [9] - /dom.js:1173:33 - 1173| createNodeIterator(root: RootNodeT, whatToShow: 389, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1175:33 + 1175| createNodeIterator(root: RootNodeT, whatToShow: 389, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [10] - /dom.js:1174:33 - 1174| createNodeIterator(root: RootNodeT, whatToShow: 512, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1176:33 + 1176| createNodeIterator(root: RootNodeT, whatToShow: 512, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [11] - /dom.js:1175:33 - 1175| createNodeIterator(root: RootNodeT, whatToShow: 513, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1177:33 + 1177| createNodeIterator(root: RootNodeT, whatToShow: 513, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [12] - /dom.js:1176:33 - 1176| createNodeIterator(root: RootNodeT, whatToShow: 516, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1178:33 + 1178| createNodeIterator(root: RootNodeT, whatToShow: 516, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [13] - /dom.js:1177:33 - 1177| createNodeIterator(root: RootNodeT, whatToShow: 517, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1179:33 + 1179| createNodeIterator(root: RootNodeT, whatToShow: 517, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [14] - /dom.js:1178:33 - 1178| createNodeIterator(root: RootNodeT, whatToShow: 640, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1180:33 + 1180| createNodeIterator(root: RootNodeT, whatToShow: 640, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [15] - /dom.js:1179:33 - 1179| createNodeIterator(root: RootNodeT, whatToShow: 641, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1181:33 + 1181| createNodeIterator(root: RootNodeT, whatToShow: 641, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [16] - /dom.js:1180:33 - 1180| createNodeIterator(root: RootNodeT, whatToShow: 644, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1182:33 + 1182| createNodeIterator(root: RootNodeT, whatToShow: 644, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [17] - /dom.js:1181:33 - 1181| createNodeIterator(root: RootNodeT, whatToShow: 645, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1183:33 + 1183| createNodeIterator(root: RootNodeT, whatToShow: 645, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [18] - /dom.js:1182:33 - 1182| createNodeIterator(root: RootNodeT, whatToShow: 768, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1184:33 + 1184| createNodeIterator(root: RootNodeT, whatToShow: 768, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [19] - /dom.js:1183:33 - 1183| createNodeIterator(root: RootNodeT, whatToShow: 769, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1185:33 + 1185| createNodeIterator(root: RootNodeT, whatToShow: 769, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [20] - /dom.js:1184:33 - 1184| createNodeIterator(root: RootNodeT, whatToShow: 772, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1186:33 + 1186| createNodeIterator(root: RootNodeT, whatToShow: 772, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [21] - /dom.js:1185:33 - 1185| createNodeIterator(root: RootNodeT, whatToShow: 773, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1187:33 + 1187| createNodeIterator(root: RootNodeT, whatToShow: 773, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [22] - /dom.js:1186:33 - 1186| createNodeIterator(root: RootNodeT, whatToShow: 896, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1188:33 + 1188| createNodeIterator(root: RootNodeT, whatToShow: 896, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [23] - /dom.js:1187:33 - 1187| createNodeIterator(root: RootNodeT, whatToShow: 897, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1189:33 + 1189| createNodeIterator(root: RootNodeT, whatToShow: 897, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [24] - /dom.js:1188:33 - 1188| createNodeIterator(root: RootNodeT, whatToShow: 900, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1190:33 + 1190| createNodeIterator(root: RootNodeT, whatToShow: 900, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [25] - /dom.js:1189:33 - 1189| createNodeIterator(root: RootNodeT, whatToShow: 901, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1191:33 + 1191| createNodeIterator(root: RootNodeT, whatToShow: 901, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [26] - /dom.js:1217:33 - 1217| createNodeIterator(root: RootNodeT, whatToShow: 1024, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1219:33 + 1219| createNodeIterator(root: RootNodeT, whatToShow: 1024, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [27] - /dom.js:1218:33 - 1218| createNodeIterator(root: RootNodeT, whatToShow: 1025, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1220:33 + 1220| createNodeIterator(root: RootNodeT, whatToShow: 1025, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [28] - /dom.js:1219:33 - 1219| createNodeIterator(root: RootNodeT, whatToShow: 1028, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1221:33 + 1221| createNodeIterator(root: RootNodeT, whatToShow: 1028, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [29] - /dom.js:1220:33 - 1220| createNodeIterator(root: RootNodeT, whatToShow: 1029, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1222:33 + 1222| createNodeIterator(root: RootNodeT, whatToShow: 1029, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [30] - /dom.js:1221:33 - 1221| createNodeIterator(root: RootNodeT, whatToShow: 1152, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1223:33 + 1223| createNodeIterator(root: RootNodeT, whatToShow: 1152, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [31] - /dom.js:1222:33 - 1222| createNodeIterator(root: RootNodeT, whatToShow: 1153, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1224:33 + 1224| createNodeIterator(root: RootNodeT, whatToShow: 1153, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [32] - /dom.js:1223:33 - 1223| createNodeIterator(root: RootNodeT, whatToShow: 1156, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1225:33 + 1225| createNodeIterator(root: RootNodeT, whatToShow: 1156, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [33] - /dom.js:1224:33 - 1224| createNodeIterator(root: RootNodeT, whatToShow: 1157, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1226:33 + 1226| createNodeIterator(root: RootNodeT, whatToShow: 1157, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [34] - /dom.js:1237:33 - 1237| createNodeIterator(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1239:33 + 1239| createNodeIterator(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface): NodeIterator; ^^^^ [35] - /dom.js:1238:33 - 1238| createNodeIterator(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1240:33 + 1240| createNodeIterator(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface): NodeIterator; ^^^^ [36] - /dom.js:1239:33 - 1239| createNodeIterator(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1241:33 + 1241| createNodeIterator(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface): NodeIterator; ^^^^ [37] - /dom.js:1240:33 - 1240| createNodeIterator(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1242:33 + 1242| createNodeIterator(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface): NodeIterator; ^^^^ [38] - /dom.js:1241:33 - 1241| createNodeIterator(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1243:33 + 1243| createNodeIterator(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface): NodeIterator; ^^^^ [39] - /dom.js:1242:33 - 1242| createNodeIterator(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1244:33 + 1244| createNodeIterator(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface): NodeIterator; ^^^^ [40] - /dom.js:1243:33 - 1243| createNodeIterator(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1245:33 + 1245| createNodeIterator(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface): NodeIterator; ^^^^ [41] - /dom.js:1254:33 - 1254| createNodeIterator(root: RootNodeT, whatToShow?: number, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1256:33 + 1256| createNodeIterator(root: RootNodeT, whatToShow?: number, filter?: NodeFilterInterface): NodeIterator; ^^^^ [42] @@ -923,128 +961,128 @@ References: traversal.js:33:31 33| document.createTreeWalker({}); // invalid ^^ [1] - /dom.js:1159:31 - 1159| createTreeWalker(root: RootNodeT, whatToShow: 2, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1161:31 + 1161| createTreeWalker(root: RootNodeT, whatToShow: 2, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [2] - /dom.js:1190:31 - 1190| createTreeWalker(root: RootNodeT, whatToShow: 256, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1192:31 + 1192| createTreeWalker(root: RootNodeT, whatToShow: 256, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [3] - /dom.js:1191:31 - 1191| createTreeWalker(root: RootNodeT, whatToShow: 257, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1193:31 + 1193| createTreeWalker(root: RootNodeT, whatToShow: 257, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [4] - /dom.js:1192:31 - 1192| createTreeWalker(root: RootNodeT, whatToShow: 260, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1194:31 + 1194| createTreeWalker(root: RootNodeT, whatToShow: 260, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [5] - /dom.js:1193:31 - 1193| createTreeWalker(root: RootNodeT, whatToShow: 261, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1195:31 + 1195| createTreeWalker(root: RootNodeT, whatToShow: 261, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [6] - /dom.js:1194:31 - 1194| createTreeWalker(root: RootNodeT, whatToShow: 384, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1196:31 + 1196| createTreeWalker(root: RootNodeT, whatToShow: 384, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [7] - /dom.js:1195:31 - 1195| createTreeWalker(root: RootNodeT, whatToShow: 385, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1197:31 + 1197| createTreeWalker(root: RootNodeT, whatToShow: 385, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [8] - /dom.js:1196:31 - 1196| createTreeWalker(root: RootNodeT, whatToShow: 388, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1198:31 + 1198| createTreeWalker(root: RootNodeT, whatToShow: 388, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [9] - /dom.js:1197:31 - 1197| createTreeWalker(root: RootNodeT, whatToShow: 389, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1199:31 + 1199| createTreeWalker(root: RootNodeT, whatToShow: 389, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [10] - /dom.js:1198:31 - 1198| createTreeWalker(root: RootNodeT, whatToShow: 512, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1200:31 + 1200| createTreeWalker(root: RootNodeT, whatToShow: 512, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [11] - /dom.js:1199:31 - 1199| createTreeWalker(root: RootNodeT, whatToShow: 513, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1201:31 + 1201| createTreeWalker(root: RootNodeT, whatToShow: 513, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [12] - /dom.js:1200:31 - 1200| createTreeWalker(root: RootNodeT, whatToShow: 516, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1202:31 + 1202| createTreeWalker(root: RootNodeT, whatToShow: 516, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [13] - /dom.js:1201:31 - 1201| createTreeWalker(root: RootNodeT, whatToShow: 517, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1203:31 + 1203| createTreeWalker(root: RootNodeT, whatToShow: 517, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [14] - /dom.js:1202:31 - 1202| createTreeWalker(root: RootNodeT, whatToShow: 640, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1204:31 + 1204| createTreeWalker(root: RootNodeT, whatToShow: 640, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [15] - /dom.js:1203:31 - 1203| createTreeWalker(root: RootNodeT, whatToShow: 641, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1205:31 + 1205| createTreeWalker(root: RootNodeT, whatToShow: 641, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [16] - /dom.js:1204:31 - 1204| createTreeWalker(root: RootNodeT, whatToShow: 644, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1206:31 + 1206| createTreeWalker(root: RootNodeT, whatToShow: 644, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [17] - /dom.js:1205:31 - 1205| createTreeWalker(root: RootNodeT, whatToShow: 645, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1207:31 + 1207| createTreeWalker(root: RootNodeT, whatToShow: 645, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [18] - /dom.js:1206:31 - 1206| createTreeWalker(root: RootNodeT, whatToShow: 768, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1208:31 + 1208| createTreeWalker(root: RootNodeT, whatToShow: 768, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [19] - /dom.js:1207:31 - 1207| createTreeWalker(root: RootNodeT, whatToShow: 769, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1209:31 + 1209| createTreeWalker(root: RootNodeT, whatToShow: 769, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [20] - /dom.js:1208:31 - 1208| createTreeWalker(root: RootNodeT, whatToShow: 772, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1210:31 + 1210| createTreeWalker(root: RootNodeT, whatToShow: 772, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [21] - /dom.js:1209:31 - 1209| createTreeWalker(root: RootNodeT, whatToShow: 773, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1211:31 + 1211| createTreeWalker(root: RootNodeT, whatToShow: 773, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [22] - /dom.js:1210:31 - 1210| createTreeWalker(root: RootNodeT, whatToShow: 896, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1212:31 + 1212| createTreeWalker(root: RootNodeT, whatToShow: 896, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [23] - /dom.js:1211:31 - 1211| createTreeWalker(root: RootNodeT, whatToShow: 897, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1213:31 + 1213| createTreeWalker(root: RootNodeT, whatToShow: 897, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [24] - /dom.js:1212:31 - 1212| createTreeWalker(root: RootNodeT, whatToShow: 900, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1214:31 + 1214| createTreeWalker(root: RootNodeT, whatToShow: 900, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [25] - /dom.js:1213:31 - 1213| createTreeWalker(root: RootNodeT, whatToShow: 901, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1215:31 + 1215| createTreeWalker(root: RootNodeT, whatToShow: 901, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [26] - /dom.js:1225:31 - 1225| createTreeWalker(root: RootNodeT, whatToShow: 1024, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1227:31 + 1227| createTreeWalker(root: RootNodeT, whatToShow: 1024, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [27] - /dom.js:1226:31 - 1226| createTreeWalker(root: RootNodeT, whatToShow: 1025, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1228:31 + 1228| createTreeWalker(root: RootNodeT, whatToShow: 1025, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [28] - /dom.js:1227:31 - 1227| createTreeWalker(root: RootNodeT, whatToShow: 1028, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1229:31 + 1229| createTreeWalker(root: RootNodeT, whatToShow: 1028, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [29] - /dom.js:1228:31 - 1228| createTreeWalker(root: RootNodeT, whatToShow: 1029, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1230:31 + 1230| createTreeWalker(root: RootNodeT, whatToShow: 1029, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [30] - /dom.js:1229:31 - 1229| createTreeWalker(root: RootNodeT, whatToShow: 1152, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1231:31 + 1231| createTreeWalker(root: RootNodeT, whatToShow: 1152, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [31] - /dom.js:1230:31 - 1230| createTreeWalker(root: RootNodeT, whatToShow: 1153, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1232:31 + 1232| createTreeWalker(root: RootNodeT, whatToShow: 1153, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [32] - /dom.js:1231:31 - 1231| createTreeWalker(root: RootNodeT, whatToShow: 1156, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1233:31 + 1233| createTreeWalker(root: RootNodeT, whatToShow: 1156, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [33] - /dom.js:1232:31 - 1232| createTreeWalker(root: RootNodeT, whatToShow: 1157, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1234:31 + 1234| createTreeWalker(root: RootNodeT, whatToShow: 1157, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [34] - /dom.js:1244:31 - 1244| createTreeWalker(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1246:31 + 1246| createTreeWalker(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [35] - /dom.js:1245:31 - 1245| createTreeWalker(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1247:31 + 1247| createTreeWalker(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [36] - /dom.js:1246:31 - 1246| createTreeWalker(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1248:31 + 1248| createTreeWalker(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [37] - /dom.js:1247:31 - 1247| createTreeWalker(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1249:31 + 1249| createTreeWalker(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [38] - /dom.js:1248:31 - 1248| createTreeWalker(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1250:31 + 1250| createTreeWalker(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [39] - /dom.js:1249:31 - 1249| createTreeWalker(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1251:31 + 1251| createTreeWalker(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [40] - /dom.js:1250:31 - 1250| createTreeWalker(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1252:31 + 1252| createTreeWalker(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [41] - /dom.js:1255:31 - 1255| createTreeWalker(root: RootNodeT, whatToShow?: number, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1257:31 + 1257| createTreeWalker(root: RootNodeT, whatToShow?: number, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [42] @@ -1058,11 +1096,11 @@ literal union [2] in the return value. [incompatible-call] ^^^^^^^^ [1] References: - /dom.js:3916:1 + /dom.js:3918:1 v-------------------------------- - 3916| typeof NodeFilter.FILTER_ACCEPT | - 3917| typeof NodeFilter.FILTER_REJECT | - 3918| typeof NodeFilter.FILTER_SKIP; + 3918| typeof NodeFilter.FILTER_ACCEPT | + 3919| typeof NodeFilter.FILTER_REJECT | + 3920| typeof NodeFilter.FILTER_SKIP; ----------------------------^ [2] @@ -1076,11 +1114,11 @@ literal union [2] in the return value of property `acceptNode`. [incompatible-ca ^^^^^^^^ [1] References: - /dom.js:3916:1 + /dom.js:3918:1 v-------------------------------- - 3916| typeof NodeFilter.FILTER_ACCEPT | - 3917| typeof NodeFilter.FILTER_REJECT | - 3918| typeof NodeFilter.FILTER_SKIP; + 3918| typeof NodeFilter.FILTER_ACCEPT | + 3919| typeof NodeFilter.FILTER_REJECT | + 3920| typeof NodeFilter.FILTER_SKIP; ----------------------------^ [2] @@ -1103,26 +1141,26 @@ References: traversal.js:189:48 189| document.createNodeIterator(document_body, -1, {}); // invalid ^^ [1] - /dom.js:1237:68 - 1237| createNodeIterator(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1239:68 + 1239| createNodeIterator(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface): NodeIterator; ^ [2] - /dom.js:1238:68 - 1238| createNodeIterator(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1240:68 + 1240| createNodeIterator(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface): NodeIterator; ^ [3] - /dom.js:1239:68 - 1239| createNodeIterator(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1241:68 + 1241| createNodeIterator(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface): NodeIterator; ^ [4] - /dom.js:1240:68 - 1240| createNodeIterator(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1242:68 + 1242| createNodeIterator(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface): NodeIterator; ^^^ [5] - /dom.js:1241:68 - 1241| createNodeIterator(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1243:68 + 1243| createNodeIterator(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface): NodeIterator; ^^^ [6] - /dom.js:1242:68 - 1242| createNodeIterator(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1244:68 + 1244| createNodeIterator(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface): NodeIterator; ^^^ [7] - /dom.js:1243:68 - 1243| createNodeIterator(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1245:68 + 1245| createNodeIterator(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface): NodeIterator; ^^^ [8] @@ -1136,11 +1174,11 @@ union [2] in the return value. [incompatible-call] ^^^^^^^^ [1] References: - /dom.js:3916:1 + /dom.js:3918:1 v-------------------------------- - 3916| typeof NodeFilter.FILTER_ACCEPT | - 3917| typeof NodeFilter.FILTER_REJECT | - 3918| typeof NodeFilter.FILTER_SKIP; + 3918| typeof NodeFilter.FILTER_ACCEPT | + 3919| typeof NodeFilter.FILTER_REJECT | + 3920| typeof NodeFilter.FILTER_SKIP; ----------------------------^ [2] @@ -1154,11 +1192,11 @@ literal union [2] in the return value of property `acceptNode`. [incompatible-ca ^^^^^^^^ [1] References: - /dom.js:3916:1 + /dom.js:3918:1 v-------------------------------- - 3916| typeof NodeFilter.FILTER_ACCEPT | - 3917| typeof NodeFilter.FILTER_REJECT | - 3918| typeof NodeFilter.FILTER_SKIP; + 3918| typeof NodeFilter.FILTER_ACCEPT | + 3919| typeof NodeFilter.FILTER_REJECT | + 3920| typeof NodeFilter.FILTER_SKIP; ----------------------------^ [2] @@ -1181,31 +1219,31 @@ References: traversal.js:196:46 196| document.createTreeWalker(document_body, -1, {}); // invalid ^^ [1] - /dom.js:1244:66 - 1244| createTreeWalker(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1246:66 + 1246| createTreeWalker(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^ [2] - /dom.js:1245:66 - 1245| createTreeWalker(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1247:66 + 1247| createTreeWalker(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^ [3] - /dom.js:1246:66 - 1246| createTreeWalker(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1248:66 + 1248| createTreeWalker(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^ [4] - /dom.js:1247:66 - 1247| createTreeWalker(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1249:66 + 1249| createTreeWalker(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^ [5] - /dom.js:1248:66 - 1248| createTreeWalker(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1250:66 + 1250| createTreeWalker(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^ [6] - /dom.js:1249:66 - 1249| createTreeWalker(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1251:66 + 1251| createTreeWalker(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^ [7] - /dom.js:1250:66 - 1250| createTreeWalker(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1252:66 + 1252| createTreeWalker(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^ [8] -Found 50 errors +Found 54 errors Only showing the most relevant union/intersection branches. To see all branches, re-run Flow with --show-all-branches From 329ec4f29504d298e1da20d319be5ddc72affa9d Mon Sep 17 00:00:00 2001 From: Marshall Roch Date: Mon, 7 Dec 2020 06:57:26 -0800 Subject: [PATCH 22/43] Flow v0.139.0 Reviewed By: jbrown215 Differential Revision: D25355937 fbshipit-source-id: fdfef781b06c92a73fa3b9961f37207bcb75a825 --- Changelog.md | 16 ++++++++++++++++ flow_parser.opam | 2 +- flowtype.opam | 2 +- packages/flow-parser-bin/package.json | 2 +- packages/flow-parser/package.json | 2 +- packages/flow-remove-types/package.json | 4 ++-- src/common/flow_version.ml | 2 +- src/parser/META | 2 +- website/en/docs/_install/setup-npm.md | 2 +- 9 files changed, 25 insertions(+), 9 deletions(-) diff --git a/Changelog.md b/Changelog.md index a56ebf03091..389bfb0a406 100644 --- a/Changelog.md +++ b/Changelog.md @@ -1,3 +1,19 @@ +### 0.139.0 + +New Features: +* Support for `this` annotations in functions, like `function f(this: {foo: string}, param1: string): string { return this.foo; }` +* The `experimental.abstract_locations` config option is now `true` by default, as it enables significant performance improvements. This option is now deprecated and will be removed in a coming version. + +Notable bug fixes: +* Fixed a false positive when a bounded generic like `K: string` flows into `$Keys<{[K]: ...}>` +* Fixed a false positive when a bounded generic like `K: 'literal'` is checked against itself like `k === 'literal'` +* Fixed autocomplete inside of JSX attribute values +* Fixed autocomplete of properties of interfaces + +Misc: +* Updated `flow-remove-types` to support `this` parameters +* Added SpeechRecognition definitions (thanks @ayshiff) + ### 0.138.0 Likely to cause new Flow errors: diff --git a/flow_parser.opam b/flow_parser.opam index 11b74009508..ce47fbc6122 100644 --- a/flow_parser.opam +++ b/flow_parser.opam @@ -1,6 +1,6 @@ opam-version: "2.0" name: "flow_parser" -version: "0.138.0" +version: "0.139.0" maintainer: "flow@fb.com" authors: ["Flow Team "] homepage: "https://github.com/facebook/flow/tree/master/src/parser" diff --git a/flowtype.opam b/flowtype.opam index f02f0ffa916..9c64e735530 100644 --- a/flowtype.opam +++ b/flowtype.opam @@ -1,6 +1,6 @@ opam-version: "2.0" name: "flowtype" -version: "0.138.0" +version: "0.139.0" maintainer: "flow@fb.com" authors: "Flow Team " license: "MIT" diff --git a/packages/flow-parser-bin/package.json b/packages/flow-parser-bin/package.json index 101aeddde8f..0cc7d176e8c 100644 --- a/packages/flow-parser-bin/package.json +++ b/packages/flow-parser-bin/package.json @@ -1,6 +1,6 @@ { "name": "flow-parser-bin", - "version": "0.138.0", + "version": "0.139.0", "description": "The Flow JavaScript parser, via bindings to the native OCaml implementation", "main": "index.js", "repository": "https://github.com/facebook/flow.git", diff --git a/packages/flow-parser/package.json b/packages/flow-parser/package.json index b5d7bbd9fe6..9320d858383 100644 --- a/packages/flow-parser/package.json +++ b/packages/flow-parser/package.json @@ -1,6 +1,6 @@ { "name": "flow-parser", - "version": "0.138.0", + "version": "0.139.0", "description": "JavaScript parser written in OCaml. Produces ESTree AST", "homepage": "https://flow.org", "license": "MIT", diff --git a/packages/flow-remove-types/package.json b/packages/flow-remove-types/package.json index be5da859d0d..2b9f07eee54 100644 --- a/packages/flow-remove-types/package.json +++ b/packages/flow-remove-types/package.json @@ -1,6 +1,6 @@ { "name": "flow-remove-types", - "version": "2.138.0", + "version": "2.139.0", "description": "Removes Flow type annotations from JavaScript files with speed and simplicity.", "author": { "name": "Flow Team", @@ -43,7 +43,7 @@ "es6" ], "dependencies": { - "flow-parser": "^0.138.0", + "flow-parser": "^0.139.0", "pirates": "^3.0.2", "vlq": "^0.2.1" }, diff --git a/src/common/flow_version.ml b/src/common/flow_version.ml index cc33cada7bd..c4ea232f9a8 100644 --- a/src/common/flow_version.ml +++ b/src/common/flow_version.ml @@ -5,4 +5,4 @@ * LICENSE file in the root directory of this source tree. *) -let version = "0.138.0" +let version = "0.139.0" diff --git a/src/parser/META b/src/parser/META index dad8d8672ab..437dd2b7b95 100644 --- a/src/parser/META +++ b/src/parser/META @@ -1,5 +1,5 @@ name="parser_flow" -version="0.138.0" +version="0.139.0" requires = "sedlex wtf8" description="flow parser ocamlfind package" archive(byte)="parser_flow.cma" diff --git a/website/en/docs/_install/setup-npm.md b/website/en/docs/_install/setup-npm.md index 3df4010fea2..3ddc0e04471 100644 --- a/website/en/docs/_install/setup-npm.md +++ b/website/en/docs/_install/setup-npm.md @@ -11,7 +11,7 @@ npm install --save-dev flow-bin "name": "my-flow-project", "version": "1.0.0", "devDependencies": { - "flow-bin": "^0.138.0" + "flow-bin": "^0.139.0" }, "scripts": { "flow": "flow" From 5b3608a09f9469fc4a101a0dc4a4151fa2dd090a Mon Sep 17 00:00:00 2001 From: Marshall Roch Date: Mon, 7 Dec 2020 08:48:49 -0800 Subject: [PATCH 23/43] add test for add_autocomplete_token Summary: was investigating why sometimes autocomplete can't find the token in the AST, and the `broader_context` doesn't show the token, as if `add_autocomplete_token` is failing to insert it. added some tests, but it appears to be working as intended. Reviewed By: gkz Differential Revision: D25356961 fbshipit-source-id: f43b4ce90e3d389707c4b159d59443395839b72d --- .../__tests__/autocompleteService_js_tests.ml | 23 +++++++++++++++++++ .../__tests__/autocomplete_tests.ml | 12 ++++++++++ 2 files changed, 35 insertions(+) create mode 100644 src/services/autocomplete/__tests__/autocompleteService_js_tests.ml create mode 100644 src/services/autocomplete/__tests__/autocomplete_tests.ml diff --git a/src/services/autocomplete/__tests__/autocompleteService_js_tests.ml b/src/services/autocomplete/__tests__/autocompleteService_js_tests.ml new file mode 100644 index 00000000000..c10fb78a64b --- /dev/null +++ b/src/services/autocomplete/__tests__/autocompleteService_js_tests.ml @@ -0,0 +1,23 @@ +(* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 +open AutocompleteService_js + +let tests = + [ + "add_autocomplete_token" + >::: [ + ( "empty_line" >:: fun ctxt -> + let contents = "// @flow\n\nfoo\n\nbar" in + let expected = "// @flow\n\nfoo\nAUTO332\nbar" in + let (actual, broader_context) = add_autocomplete_token contents 4 0 in + assert_equal ~ctxt ~printer:(fun x -> x) expected actual; + let expected = "foo\nAUTO332\nbar" in + assert_equal ~ctxt ~printer:(fun x -> x) expected broader_context ); + ]; + ] diff --git a/src/services/autocomplete/__tests__/autocomplete_tests.ml b/src/services/autocomplete/__tests__/autocomplete_tests.ml new file mode 100644 index 00000000000..eea9b07852a --- /dev/null +++ b/src/services/autocomplete/__tests__/autocomplete_tests.ml @@ -0,0 +1,12 @@ +(* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = "autocomplete" >::: ["autocompleteService_js" >::: AutocompleteService_js_tests.tests] + +let () = run_test_tt_main tests From d98548ef1a80d4caadee8a8efe8121d03446c351 Mon Sep 17 00:00:00 2001 From: Marshall Roch Date: Mon, 7 Dec 2020 13:39:12 -0800 Subject: [PATCH 24/43] fix autocomplete suggesting types in value positions Summary: added a flag to `Hoister` and `Scope_builder` to not add types to the scope, for cases that are only interested in values. this fixes a bug with autocomplete in which we would suggest types in value positions (see new test; previously, it suggested all of those imported types!). it also fixes a bug where autocomplete would list `import type`'d classes twice when completing types, because it was finding it once as a type (correctly) and again as a value (incorrectly). Reviewed By: vrama628 Differential Revision: D25273237 fbshipit-source-id: 5f07002a739518c7edd830153ba0945610d09db6 --- newtests/lsp/completion/test.js | 42 ------------ .../__tests__/scope_builder_test.ml | 8 +-- src/parser_utils/file_sig.ml | 2 +- src/parser_utils/hoister.ml | 42 +++++++++--- src/parser_utils/scope_builder.ml | 68 +++++++++++++++---- src/parser_utils/scope_builder_sig.ml | 29 ++++---- src/parser_utils/ssa_builder.ml | 6 +- .../autocomplete/autocompleteService_js.ml | 2 +- src/services/get_def/getDef_js.ml | 2 +- src/services/references/variableFindRefs.ml | 2 +- src/typing/strict_es6_import_export.ml | 2 +- tests/autocomplete/autocomplete.exp | 20 +++--- tests/autocomplete/identifier_import_type.js | 15 ++++ tests/autocomplete/identifier_type.js | 8 +++ tests/autocomplete/test.sh | 2 + 15 files changed, 147 insertions(+), 103 deletions(-) create mode 100644 tests/autocomplete/identifier_import_type.js create mode 100644 tests/autocomplete/identifier_type.js diff --git a/newtests/lsp/completion/test.js b/newtests/lsp/completion/test.js index cbea8f95ef8..c729ac78a2f 100644 --- a/newtests/lsp/completion/test.js +++ b/newtests/lsp/completion/test.js @@ -162,34 +162,6 @@ export default (suite( newText: 'foo', }, }, - { - label: 'anInterface', - kind: 8, - detail: 'interface anInterface', - sortText: '00000000000000000000', - insertTextFormat: 1, - textEdit: { - range: { - start: {line: 13, character: 15}, - end: {line: 13, character: 15}, - }, - newText: 'anInterface', - }, - }, - { - label: 'aUnion', - kind: 13, - detail: 'type aUnion = "a" | "b"', - sortText: '00000000000000000000', - insertTextFormat: 1, - textEdit: { - range: { - start: {line: 13, character: 15}, - end: {line: 13, character: 15}, - }, - newText: 'aUnion', - }, - }, { label: 'aNumber', kind: 6, @@ -412,20 +384,6 @@ export default (suite( newText: 'React', }, }, - { - label: 'Props', - kind: 13, - detail: 'type Props = {a: number, ...}', - sortText: '00000000000000000000', - insertTextFormat: 1, - textEdit: { - range: { - start: {line: 11, character: 1}, - end: {line: 11, character: 1}, - }, - newText: 'Props', - }, - }, { label: 'D', kind: 3, diff --git a/src/parser_utils/__tests__/scope_builder_test.ml b/src/parser_utils/__tests__/scope_builder_test.ml index 2840a8fc576..76fa86f1a47 100644 --- a/src/parser_utils/__tests__/scope_builder_test.ml +++ b/src/parser_utils/__tests__/scope_builder_test.ml @@ -10,7 +10,7 @@ open Test_utils module Scope_api = Scope_api.With_Loc let mk_scope_builder_all_uses_test contents expected_all_uses ctxt = - let info = Scope_builder.program (parse contents) in + let info = Scope_builder.program ~with_types:true (parse contents) in let all_uses = Loc_collections.LocSet.elements @@ Scope_api.all_uses info in let printer = print_list Loc.debug_to_string in assert_equal @@ -22,7 +22,7 @@ let mk_scope_builder_all_uses_test contents expected_all_uses ctxt = all_uses let mk_scope_builder_locs_of_defs_of_all_uses_test contents expected_locs_of_defs ctxt = - let info = Scope_builder.program (parse contents) in + let info = Scope_builder.program ~with_types:true (parse contents) in let all_uses = Loc_collections.LocSet.elements @@ Scope_api.all_uses info in let defs = Base.List.map ~f:(Scope_api.def_of_use info) all_uses in let locs_of_defs = Base.List.map ~f:(fun { Scope_api.Def.locs; _ } -> Nel.to_list locs) defs in @@ -36,7 +36,7 @@ let mk_scope_builder_locs_of_defs_of_all_uses_test contents expected_locs_of_def locs_of_defs let mk_scope_builder_uses_of_all_uses_test contents expected_uses ctxt = - let info = Scope_builder.program (parse contents) in + let info = Scope_builder.program ~with_types:true (parse contents) in let all_uses = Loc_collections.LocSet.elements @@ Scope_api.all_uses info in let uses = Base.List.map @@ -56,7 +56,7 @@ let mk_scope_builder_uses_of_all_uses_test contents expected_uses ctxt = uses let mk_scope_builder_scope_loc_test contents expected_scope_locs ctxt = - let info = Scope_builder.program (parse contents) in + let info = Scope_builder.program ~with_types:true (parse contents) in let scope_locs = IMap.elements (IMap.map (fun scope -> scope.Scope_api.Scope.loc) info.Scope_api.scopes) in diff --git a/src/parser_utils/file_sig.ml b/src/parser_utils/file_sig.ml index 0b9ee72b3ea..b2ead9488ac 100644 --- a/src/parser_utils/file_sig.ml +++ b/src/parser_utils/file_sig.ml @@ -418,7 +418,7 @@ struct [(exports_info t' * tolerable_error list, error) result, L.t] visitor ~init:(Ok (mk_file_sig init_exports_info, [])) as super - val scope_info = Scope_builder.program ast + val scope_info : Scope_api.info = Scope_builder.program ~with_types:true ast val mutable curr_declare_module : exports_info module_sig' option = None diff --git a/src/parser_utils/hoister.ml b/src/parser_utils/hoister.ml index c452ac47f40..6aaf02ba532 100644 --- a/src/parser_utils/hoister.ml +++ b/src/parser_utils/hoister.ml @@ -82,7 +82,7 @@ end are known to introduce bindings. The logic here is sufficiently tricky that we probably should not change it without extensive testing. *) -class ['loc] hoister = +class ['loc] hoister ~with_types = object (this) inherit ['loc Bindings.t, 'loc] visitor ~init:Bindings.empty as super @@ -154,21 +154,21 @@ class ['loc] hoister = method! type_alias loc (alias : ('loc, 'loc) Ast.Statement.TypeAlias.t) = let open Ast.Statement.TypeAlias in - this#add_binding alias.id; + if with_types then this#add_binding alias.id; super#type_alias loc alias method! opaque_type loc (alias : ('loc, 'loc) Ast.Statement.OpaqueType.t) = let open Ast.Statement.OpaqueType in - this#add_binding alias.id; + if with_types then this#add_binding alias.id; super#opaque_type loc alias method! interface loc (interface : ('loc, 'loc) Ast.Statement.Interface.t) = let open Ast.Statement.Interface in - this#add_binding interface.id; + if with_types then this#add_binding interface.id; super#interface loc interface end -class ['loc] lexical_hoister = +class ['loc] lexical_hoister ~with_types = object (this) inherit ['loc Bindings.t, 'loc] visitor ~init:Bindings.empty as super @@ -238,16 +238,36 @@ class ['loc] lexical_hoister = this#add_binding id; enum + method! import_declaration loc decl = + let open Ast.Statement.ImportDeclaration in + let { import_kind; _ } = decl in + (* when `with_types` is false, don't visit `import type ...` or `import typeof ...` *) + match (with_types, import_kind) with + | (false, ImportType) + | (false, ImportTypeof) -> + decl + | _ -> super#import_declaration loc decl + method! import_named_specifier (specifier : ('loc, 'loc) Ast.Statement.ImportDeclaration.named_specifier) = let open Ast.Statement.ImportDeclaration in - let binding = - match specifier with - | { local = Some binding; remote = _; kind = _ } - | { local = None; remote = binding; kind = _ } -> - binding + (* when `with_types` is false, only add bindings for values, not types. + `import_declaration` avoids visiting specifiers for `import type` and + `import typeof`, so `kind = None` must mean a value here. *) + let allowed_kind = function + | None + | Some ImportValue -> + true + | Some ImportType + | Some ImportTypeof -> + with_types in - this#add_binding binding; + (match specifier with + | { local = Some binding; remote = _; kind } + | { local = None; remote = binding; kind } + when allowed_kind kind -> + this#add_binding binding + | _ -> ()); specifier method! import_default_specifier (id : ('loc, 'loc) Ast.Identifier.t) = diff --git a/src/parser_utils/scope_builder.ml b/src/parser_utils/scope_builder.ml index aa8fd1ac91a..f0e8ca82663 100644 --- a/src/parser_utils/scope_builder.ml +++ b/src/parser_utils/scope_builder.ml @@ -109,7 +109,7 @@ module Make (L : Loc_sig.S) (Api : Scope_api_sig.S with module L = L) : env :: parent_env end - class scope_builder = + class scope_builder ~with_types = object (this) inherit [Acc.t, L.t] visitor ~init:Acc.init as super @@ -183,6 +183,24 @@ module Make (L : Loc_sig.S) (Api : Scope_api_sig.S with module L = L) : uses <- Flow_ast_utils.ident_of_source (loc, name) :: uses; id + method! type_alias loc alias = + if not with_types then + alias + else + super#type_alias loc alias + + method! opaque_type loc alias = + if not with_types then + alias + else + super#opaque_type loc alias + + method! interface loc interface = + if not with_types then + interface + else + super#interface loc interface + (* don't rename the `foo` in `x.foo` *) method! member_property_identifier (id : (L.t, L.t) Ast.Identifier.t) = id @@ -194,13 +212,37 @@ module Make (L : Loc_sig.S) (Api : Scope_api_sig.S with module L = L) : (* don't rename the `foo` in `{ foo: ... }` *) method! object_key_identifier (id : (L.t, L.t) Ast.Identifier.t) = id + method! import_declaration loc decl = + let open Ast.Statement.ImportDeclaration in + let { import_kind; _ } = decl in + (* when `with_types` is false, don't visit `import type ...` or `import typeof ...` *) + match (with_types, import_kind) with + | (false, ImportType) + | (false, ImportTypeof) -> + decl + | _ -> super#import_declaration loc decl + (* don't rename the `foo` in `import {foo as bar} from ...;` *) method! import_named_specifier (specifier : (L.t, L.t) Ast.Statement.ImportDeclaration.named_specifier) = let open Ast.Statement.ImportDeclaration in + (* when `with_types` is false, only visit values, not types. `import_declaration` + avoids visiting specifiers for `import type` and `import typeof`, so + `kind = None` must mean a value here. *) + let allowed_kind = function + | None + | Some ImportValue -> + true + | Some ImportType + | Some ImportTypeof -> + with_types + in (match specifier with - | { local = Some ident; _ } -> ignore (this#identifier ident) - | { local = None; remote = ident; _ } -> ignore (this#identifier ident)); + | { local = Some ident; remote = _; kind } + | { local = None; remote = ident; kind } + when allowed_kind kind -> + ignore (this#identifier ident) + | _ -> ()); specifier (* don't rename the `bar` in `export {foo as bar}` *) @@ -212,14 +254,14 @@ module Make (L : Loc_sig.S) (Api : Scope_api_sig.S with module L = L) : spec method! block loc (stmt : (L.t, L.t) Ast.Statement.Block.t) = - let lexical_hoist = new lexical_hoister in + let lexical_hoist = new lexical_hoister ~with_types in let lexical_bindings = lexical_hoist#eval (lexical_hoist#block loc) stmt in this#with_bindings ~lexical:true loc lexical_bindings (super#block loc) stmt (* like block *) method! program (program : (L.t, L.t) Ast.Program.t) = let (loc, _) = program in - let lexical_hoist = new lexical_hoister in + let lexical_hoist = new lexical_hoister ~with_types in let lexical_bindings = lexical_hoist#eval lexical_hoist#program program in this#with_bindings ~lexical:true loc lexical_bindings super#program program @@ -229,7 +271,7 @@ module Make (L : Loc_sig.S) (Api : Scope_api_sig.S with module L = L) : method! for_in_statement loc (stmt : (L.t, L.t) Ast.Statement.ForIn.t) = let open Ast.Statement.ForIn in let { left; right = _; body = _; each = _; comments = _ } = stmt in - let lexical_hoist = new lexical_hoister in + let lexical_hoist = new lexical_hoister ~with_types in let lexical_bindings = match left with | LeftDeclaration (loc, decl) -> @@ -249,7 +291,7 @@ module Make (L : Loc_sig.S) (Api : Scope_api_sig.S with module L = L) : method! for_of_statement loc (stmt : (L.t, L.t) Ast.Statement.ForOf.t) = let open Ast.Statement.ForOf in let { left; right = _; body = _; await = _; comments = _ } = stmt in - let lexical_hoist = new lexical_hoister in + let lexical_hoist = new lexical_hoister ~with_types in let lexical_bindings = match left with | LeftDeclaration (loc, decl) -> @@ -269,7 +311,7 @@ module Make (L : Loc_sig.S) (Api : Scope_api_sig.S with module L = L) : method! for_statement loc (stmt : (L.t, L.t) Ast.Statement.For.t) = let open Ast.Statement.For in let { init; test = _; update = _; body = _; comments = _ } = stmt in - let lexical_hoist = new lexical_hoister in + let lexical_hoist = new lexical_hoister ~with_types in let lexical_bindings = match init with | Some (InitDeclaration (loc, decl)) -> @@ -285,7 +327,7 @@ module Make (L : Loc_sig.S) (Api : Scope_api_sig.S with module L = L) : let lexical_bindings = match param with | Some p -> - let lexical_hoist = new lexical_hoister in + let lexical_hoist = new lexical_hoister ~with_types in lexical_hoist#eval lexical_hoist#catch_clause_pattern p | None -> Bindings.empty in @@ -295,7 +337,7 @@ module Make (L : Loc_sig.S) (Api : Scope_api_sig.S with module L = L) : method private lambda loc params body = (* function params and bindings within the function body share the same scope *) let bindings = - let hoist = new hoister in + let hoist = new hoister ~with_types in run hoist#function_params params; run hoist#function_body_any body; hoist#acc @@ -376,14 +418,14 @@ module Make (L : Loc_sig.S) (Api : Scope_api_sig.S with module L = L) : expr end - let program ?(ignore_toplevel = false) program = + let program ?(ignore_toplevel = false) ~with_types program = let (loc, _) = program in - let walk = new scope_builder in + let walk = new scope_builder ~with_types in let bindings = if ignore_toplevel then Bindings.empty else - let hoist = new hoister in + let hoist = new hoister ~with_types in hoist#eval hoist#program program in walk#eval (walk#with_bindings loc bindings walk#program) program diff --git a/src/parser_utils/scope_builder_sig.ml b/src/parser_utils/scope_builder_sig.ml index b2f03b4a6eb..a5e62ca052b 100644 --- a/src/parser_utils/scope_builder_sig.ml +++ b/src/parser_utils/scope_builder_sig.ml @@ -14,25 +14,26 @@ module type S = sig type t = Api.info end - val program : ?ignore_toplevel:bool -> (L.t, L.t) Flow_ast.Program.t -> Acc.t + val program : ?ignore_toplevel:bool -> with_types:bool -> (L.t, L.t) Flow_ast.Program.t -> Acc.t class scope_builder : - object - inherit [Acc.t, L.t] Flow_ast_visitor.visitor + with_types:bool + -> object + inherit [Acc.t, L.t] Flow_ast_visitor.visitor - method with_bindings : - 'a. ?lexical:bool -> L.t -> L.t Hoister.Bindings.t -> ('a -> 'a) -> 'a -> 'a + method with_bindings : + 'a. ?lexical:bool -> L.t -> L.t Hoister.Bindings.t -> ('a -> 'a) -> 'a -> 'a - method private scoped_for_statement : - L.t -> (L.t, L.t) Flow_ast.Statement.For.t -> (L.t, L.t) Flow_ast.Statement.For.t + method private scoped_for_statement : + L.t -> (L.t, L.t) Flow_ast.Statement.For.t -> (L.t, L.t) Flow_ast.Statement.For.t - method private scoped_for_in_statement : - L.t -> (L.t, L.t) Flow_ast.Statement.ForIn.t -> (L.t, L.t) Flow_ast.Statement.ForIn.t + method private scoped_for_in_statement : + L.t -> (L.t, L.t) Flow_ast.Statement.ForIn.t -> (L.t, L.t) Flow_ast.Statement.ForIn.t - method private scoped_for_of_statement : - L.t -> (L.t, L.t) Flow_ast.Statement.ForOf.t -> (L.t, L.t) Flow_ast.Statement.ForOf.t + method private scoped_for_of_statement : + L.t -> (L.t, L.t) Flow_ast.Statement.ForOf.t -> (L.t, L.t) Flow_ast.Statement.ForOf.t - method private lambda : - L.t -> (L.t, L.t) Flow_ast.Function.Params.t -> (L.t, L.t) Flow_ast.Function.body -> unit - end + method private lambda : + L.t -> (L.t, L.t) Flow_ast.Function.Params.t -> (L.t, L.t) Flow_ast.Function.body -> unit + end end diff --git a/src/parser_utils/ssa_builder.ml b/src/parser_utils/ssa_builder.ml index 9aa444df7a4..e98fde38ce6 100644 --- a/src/parser_utils/ssa_builder.ml +++ b/src/parser_utils/ssa_builder.ml @@ -223,7 +223,8 @@ struct class ssa_builder = object (this) - inherit scope_builder as super + (* TODO: with_types should probably be false, but this maintains previous behavior *) + inherit scope_builder ~with_types:true as super (* We maintain a map of read locations to raw Val.t terms, which are simplified to lists of write locations once the analysis is done. *) @@ -1107,7 +1108,8 @@ struct if ignore_toplevel then Bindings.empty else - let hoist = new hoister in + (* TODO: with_types should probably be false, but this maintains previous behavior *) + let hoist = new hoister ~with_types:true in hoist#eval hoist#program program in ignore @@ ssa_walk#with_bindings loc bindings ssa_walk#program program; diff --git a/src/services/autocomplete/autocompleteService_js.ml b/src/services/autocomplete/autocompleteService_js.ml index 85428445075..a9dd4638b0a 100644 --- a/src/services/autocomplete/autocompleteService_js.ml +++ b/src/services/autocomplete/autocompleteService_js.ml @@ -511,7 +511,7 @@ let documentation_of_loc ~options ~reader ~cx ~file_sig ~typed_ast loc = None let local_value_identifiers ~options ~reader ~cx ~ac_loc ~file_sig ~ast ~typed_ast ~tparams = - let scope_info = Scope_builder.program ast in + let scope_info = Scope_builder.program ~with_types:false ast in let open Scope_api.With_Loc in (* get the innermost scope enclosing the requested location *) let (ac_scope_id, _) = diff --git a/src/services/get_def/getDef_js.ml b/src/services/get_def/getDef_js.ml index a13fa670619..38dbfc1c5f0 100644 --- a/src/services/get_def/getDef_js.ml +++ b/src/services/get_def/getDef_js.ml @@ -48,7 +48,7 @@ let rec process_request ~options ~reader ~cx ~is_legit_require ~typed_ast : | Get_def_request.Identifier { name = _; loc = aloc; type_ } -> let loc = loc_of_aloc ~reader aloc in let ast = (new type_killer reader)#program typed_ast in - let scope_info = Scope_builder.program ast in + let scope_info = Scope_builder.program ~with_types:true ast in let all_uses = Scope_api.With_Loc.all_uses scope_info in Loc_collections.( let matching_uses = LocSet.filter (fun use -> Loc.contains use loc) all_uses in diff --git a/src/services/references/variableFindRefs.ml b/src/services/references/variableFindRefs.ml index b6b97377e58..060e8d044fa 100644 --- a/src/services/references/variableFindRefs.ml +++ b/src/services/references/variableFindRefs.ml @@ -10,7 +10,7 @@ module Scope_api = Scope_api.With_Loc let local_find_refs ast loc = Scope_api.( - let scope_info = Scope_builder.program ast in + let scope_info = Scope_builder.program ~with_types:true ast in let all_uses = all_uses scope_info in let matching_uses = LocSet.filter (fun use -> Loc.contains use loc) all_uses in let num_matching_uses = LocSet.cardinal matching_uses in diff --git a/src/typing/strict_es6_import_export.ml b/src/typing/strict_es6_import_export.ml index ac5133b2007..d52613fb96a 100644 --- a/src/typing/strict_es6_import_export.ml +++ b/src/typing/strict_es6_import_export.ml @@ -457,7 +457,7 @@ let detect_mixed_import_and_require_error cx declarations = | _ -> () let detect_errors_from_ast cx ast = - let scope_info = Scope_builder.With_ALoc.program ast in + let scope_info = Scope_builder.With_ALoc.program ~with_types:true ast in let declarations = gather_declarations ast in detect_mixed_import_and_require_error cx declarations; let visitor = new import_export_visitor ~cx ~scope_info ~declarations in diff --git a/tests/autocomplete/autocomplete.exp b/tests/autocomplete/autocomplete.exp index de320ee18df..c6eb55c4033 100644 --- a/tests/autocomplete/autocomplete.exp +++ b/tests/autocomplete/autocomplete.exp @@ -1040,6 +1040,14 @@ Flags: --pretty "result":[{"name":"foobaz","type":"number"},{"name":"foobarbaz","type":"number"}] } +identifier_import_type.js:14:4 +Flags: --pretty +{"result":[{"name":"ThisIsAValue","type":"string"}]} + +identifier_type.js:7:4 +Flags: --pretty +{"result":[{"name":"baz","type":"bar"}]} + super.js:4:8 Flags: --pretty { @@ -3367,18 +3375,6 @@ Flags: --lsp "newText":"myClass" } } -{ - "label":"ExportClass", - "kind":7, - "detail":"class ExportClass", - "documentation":{"kind":"markdown","value":"this is ExportClass"}, - "sortText":"00000000000000000000", - "insertTextFormat":1, - "textEdit":{ - "range":{"start":{"line":15,"character":5},"end":{"line":15,"character":5}}, - "newText":"ExportClass" - } -} { "label":"myFoo", "kind":13, diff --git a/tests/autocomplete/identifier_import_type.js b/tests/autocomplete/identifier_import_type.js new file mode 100644 index 00000000000..122a4f7b873 --- /dev/null +++ b/tests/autocomplete/identifier_import_type.js @@ -0,0 +1,15 @@ +// @flow + +import type { Typical, Tyrannosaurus } from './type-exports'; + +import { type Typesafety } from './type-exports'; + +import typeof TypeofDefault from './type-exports'; + +import { typeof Typhoon } from './type-exports'; + +declare var ThisIsAValue: string; + +// should not suggest types + T +// ^ diff --git a/tests/autocomplete/identifier_type.js b/tests/autocomplete/identifier_type.js new file mode 100644 index 00000000000..88b56f3df4b --- /dev/null +++ b/tests/autocomplete/identifier_type.js @@ -0,0 +1,8 @@ +// @flow + +type bar = string +const baz : bar = "baz" + +// should only suggest values, not types + b +// ^ diff --git a/tests/autocomplete/test.sh b/tests/autocomplete/test.sh index 520c0d6c310..64fdac12386 100755 --- a/tests/autocomplete/test.sh +++ b/tests/autocomplete/test.sh @@ -56,6 +56,8 @@ queries_in_file autocomplete "poly.js" --pretty queries_in_file autocomplete "poly_no_args.js" --pretty queries_in_file autocomplete "identifier.js" --pretty queries_in_file autocomplete "identifier_middle.js" --pretty +queries_in_file autocomplete "identifier_import_type.js" --pretty +queries_in_file autocomplete "identifier_type.js" --pretty queries_in_file autocomplete "super.js" --pretty queries_in_file autocomplete "this-2.js" --pretty queries_in_file autocomplete "pattern.js" --pretty From 5d92f4b6ba8170e6d82679bbfa9019e22cf5ef6a Mon Sep 17 00:00:00 2001 From: Marshall Roch Date: Mon, 7 Dec 2020 13:39:12 -0800 Subject: [PATCH 25/43] fix Ty_normalizer for imported enum types Summary: normalizes `TypeT (ImportEnumKind, DefT (_, _, EnumT _))` into a `Ty.Decl (Ty.EnumDecl)` instead of a `Ty.Type (Ty.Generic (... EnumKind ...)` this fixes a bug with autocomplete where `import type`'d enums weren't included in autocomplete for types. it also fixes bugs with type-at-pos that caused no results when hovering over the `E` in `import type {E} from ...`, `import { type E } from ...`, and any use of that `E`. -- `TypeT (ImportEnumKind, _)` comes from `import type { E }` or `import { type E }`, which is the type of the enum object, not an enum instance, so I normalize it the same way as a literal `EnumObjectT`. this also mirrors what we do for classes. see the very similar ` | DefT (_, _, TypeT (ImportClassKind, DefT (r, _, InstanceT (static, super, _, inst)))) ->` line just above these changes in ty_normalizer.ml, which we normalize into a `ClassDecl` even though it's an `InstanceT` Reviewed By: panagosg7 Differential Revision: D25273372 fbshipit-source-id: 4028f73259df4af8acedbbcc5f200749a0ee2bab --- src/typing/ty_normalizer.ml | 17 +++-- tests/autocomplete/autocomplete.exp | 12 ++++ tests/type_at_pos_enums/export.js | 2 + tests/type_at_pos_enums/test.js | 22 ++++++ tests/type_at_pos_enums/type_at_pos_enums.exp | 67 +++++++++++++++++-- 5 files changed, 108 insertions(+), 12 deletions(-) diff --git a/src/typing/ty_normalizer.ml b/src/typing/ty_normalizer.ml index 93cea145463..0a08ac3eaf1 100644 --- a/src/typing/ty_normalizer.ml +++ b/src/typing/ty_normalizer.ml @@ -1952,6 +1952,11 @@ end = struct let%map symbol = Reason_utils.instance_symbol env r in Ty.Decl (Ty.ClassDecl (symbol, ps)) in + let enum_decl ~env reason enum = + let { T.enum_name; _ } = enum in + let symbol = symbol_from_reason env reason enum_name in + return (Ty.Decl Ty.(EnumDecl symbol)) + in let singleton_poly ~env ~orig_t tparams = function (* Imported interfaces *) | DefT (_, _, TypeT (ImportClassKind, DefT (r, _, InstanceT (static, super, _, inst)))) -> @@ -1991,6 +1996,10 @@ end = struct | DefT (_, _, TypeT (InstanceKind, DefT (r, _, InstanceT (static, super, _, inst)))) | DefT (_, _, TypeT (ImportClassKind, DefT (r, _, InstanceT (static, super, _, inst)))) -> class_or_interface_decl ~env r None static super inst + (* Enums *) + | DefT (reason, _, EnumObjectT enum) + | DefT (_, _, TypeT (ImportEnumKind, DefT (reason, _, EnumT enum))) -> + enum_decl ~env reason enum (* Monomorphic Type Aliases *) | DefT (r, _, TypeT (kind, t)) -> let r = @@ -1999,11 +2008,6 @@ end = struct | _ -> TypeUtil.reason_of_t t in type_t ~env r kind t None - (* Enums *) - | DefT (reason, _, EnumObjectT enum) -> - let { T.enum_name; _ } = enum in - let symbol = symbol_from_reason env reason enum_name in - return (Ty.Decl Ty.(EnumDecl symbol)) (* Types *) | _ -> let%map t = TypeConverter.convert_t ~env orig_t in @@ -2174,7 +2178,8 @@ end = struct let def_loc_of_decl = function | TypeAliasDecl { import = false; name = { sym_def_loc; _ }; _ } | ClassDecl ({ sym_def_loc; _ }, _) - | InterfaceDecl ({ sym_def_loc; _ }, _) -> + | InterfaceDecl ({ sym_def_loc; _ }, _) + | EnumDecl { sym_def_loc; _ } -> Some sym_def_loc | TypeAliasDecl { import = true; type_ = Some t; _ } -> def_loc_of_ty t | _ -> None diff --git a/tests/autocomplete/autocomplete.exp b/tests/autocomplete/autocomplete.exp index c6eb55c4033..54de5534b5c 100644 --- a/tests/autocomplete/autocomplete.exp +++ b/tests/autocomplete/autocomplete.exp @@ -3459,6 +3459,18 @@ Flags: --lsp "newText":"ExportInterface" } } +{ + "label":"ExportEnum", + "kind":13, + "detail":"enum ExportEnum", + "documentation":{"kind":"markdown","value":"this is ExportEnum"}, + "sortText":"00000000000000000000", + "insertTextFormat":1, + "textEdit":{ + "range":{"start":{"line":15,"character":5},"end":{"line":15,"character":5}}, + "newText":"ExportEnum" + } +} { "label":"exportValue", "kind":13, diff --git a/tests/type_at_pos_enums/export.js b/tests/type_at_pos_enums/export.js index 74fd4131d63..8c38d4b5e5c 100644 --- a/tests/type_at_pos_enums/export.js +++ b/tests/type_at_pos_enums/export.js @@ -9,3 +9,5 @@ export default enum G { A, B, } + +export type FType = F; diff --git a/tests/type_at_pos_enums/test.js b/tests/type_at_pos_enums/test.js index 4e6610b0f47..39654b0d061 100644 --- a/tests/type_at_pos_enums/test.js +++ b/tests/type_at_pos_enums/test.js @@ -9,14 +9,36 @@ const x = E.A; const y = E.A; // ^ +const z: E = E.A; +// ^ + type T = Class; // ^ import type {F} from './export.js'; +// ^ declare var f: F; // ^ +declare var f2: F; +// ^ import type G from './export.js'; +// ^ declare var g: G; // ^ +declare var g2: G; +// ^ +import H from './export.js'; +// ^ +declare var h: H; +// ^ +declare var h2: H; +// ^ + +import type {FType} from './export.js'; +// ^ +declare var i: FType; +// ^ +declare var i2: FType; +// ^ diff --git a/tests/type_at_pos_enums/type_at_pos_enums.exp b/tests/type_at_pos_enums/type_at_pos_enums.exp index 20fb5710732..9f122a4b62e 100644 --- a/tests/type_at_pos_enums/type_at_pos_enums.exp +++ b/tests/type_at_pos_enums/type_at_pos_enums.exp @@ -13,18 +13,73 @@ Flags: E test.js:9:13,9:13 -test.js:12:6 +test.js:12:10 +Flags: +enum E +test.js:12:10,12:10 + +test.js:15:6 Flags: type T = Class -test.js:12:6,12:6 +test.js:15:6,15:6 -test.js:16:13 +test.js:18:14 Flags: -F -test.js:16:13,16:13 +enum F +test.js:18:14,18:14 test.js:20:13 Flags: -G +F test.js:20:13,20:13 +test.js:22:17 +Flags: +enum F +test.js:22:17,22:17 + +test.js:25:13 +Flags: +enum G +test.js:25:13,25:13 + +test.js:27:13 +Flags: +G +test.js:27:13,27:13 + +test.js:29:17 +Flags: +enum G +test.js:29:17,29:17 + +test.js:32:8 +Flags: +enum G +test.js:32:8,32:8 + +test.js:34:13 +Flags: +G +test.js:34:13,34:13 + +test.js:36:17 +Flags: +enum G +test.js:36:17,36:17 + +test.js:39:14 +Flags: +type FType = F +test.js:39:14,39:18 + +test.js:41:13 +Flags: +FType +test.js:41:13,41:13 + +test.js:43:17 +Flags: +type FType = F +test.js:43:17,43:21 + From bd1cda37e8c58732a095f7501f720fbb26d5f829 Mon Sep 17 00:00:00 2001 From: Marshall Roch Date: Tue, 8 Dec 2020 09:05:49 -0800 Subject: [PATCH 26/43] more info when can't extract flowlibs Summary: we've received reports of "Could not locate flowlib files" errors, so add some extra details to figure out why that's happening. Reviewed By: Hans-Halverson Differential Revision: D25398001 fbshipit-source-id: 3314605e6bf9d05b9141edaea0b67c9a2bb018fe --- src/commands/commandUtils.ml | 10 ++++++---- src/common/exit_status/flowExitStatus.ml | 5 +++++ src/common/exit_status/flowExitStatus.mli | 1 + src/monitor/flowServerMonitorServer.ml | 1 + 4 files changed, 13 insertions(+), 4 deletions(-) diff --git a/src/commands/commandUtils.ml b/src/commands/commandUtils.ml index 0ca81bb13f7..8d91a2d7018 100644 --- a/src/commands/commandUtils.ml +++ b/src/commands/commandUtils.ml @@ -539,13 +539,15 @@ let remove_exclusion pattern = let file_options = let default_lib_dir ~no_flowlib tmp_dir = - let lib_dir = Flowlib.mkdir ~no_flowlib tmp_dir in try + let lib_dir = Flowlib.mkdir ~no_flowlib tmp_dir in Flowlib.extract ~no_flowlib lib_dir; lib_dir - with _ -> - let msg = "Could not locate flowlib files" in - FlowExitStatus.(exit ~msg Could_not_find_flowconfig) + with e -> + let e = Exception.wrap e in + let err = Exception.get_ctor_string e in + let msg = Printf.sprintf "Could not locate flowlib files: %s" err in + FlowExitStatus.(exit ~msg Could_not_extract_flowlibs) in let ignores_of_arg root patterns extras = let expand_project_root_token = Files.expand_project_root_token ~root in diff --git a/src/common/exit_status/flowExitStatus.ml b/src/common/exit_status/flowExitStatus.ml index 365b730a535..0a747aaac87 100644 --- a/src/common/exit_status/flowExitStatus.ml +++ b/src/common/exit_status/flowExitStatus.ml @@ -36,6 +36,8 @@ type t = | Lock_stolen (* Specific error for not being able to find a .flowconfig *) | Could_not_find_flowconfig + (* Failed to extract flowlibs into temp dir *) + | Could_not_extract_flowlibs (* Generic out-of-date error. This could be a version thing or maybe * something changed and Flow can't handle it incrementally yet *) | Server_out_of_date @@ -113,6 +115,7 @@ let error_code = function | Killed_by_monitor -> 19 | Invalid_saved_state -> 20 | Restart -> 21 + | Could_not_extract_flowlibs -> 22 | Commandline_usage_error -> 64 | No_input -> 66 | Server_start_failed _ -> 78 @@ -150,6 +153,7 @@ let error_type = function | 19 -> Killed_by_monitor | 20 -> Invalid_saved_state | 21 -> Restart + | 22 -> Could_not_extract_flowlibs | 64 -> Commandline_usage_error | 66 -> No_input (* The process status is made up *) @@ -176,6 +180,7 @@ let to_string = function | No_error -> "Ok" | Input_error -> "Input_error" | Could_not_find_flowconfig -> "Could_not_find_flowconfig" + | Could_not_extract_flowlibs -> "Could_not_extract_flowlibs" | Server_out_of_date -> "Server_out_of_date" | Server_client_directory_mismatch -> "Server_client_directory_mismatch" | Out_of_shared_memory -> "Out_of_shared_memory" diff --git a/src/common/exit_status/flowExitStatus.mli b/src/common/exit_status/flowExitStatus.mli index 031ce106d71..ee82bf54cba 100644 --- a/src/common/exit_status/flowExitStatus.mli +++ b/src/common/exit_status/flowExitStatus.mli @@ -21,6 +21,7 @@ type t = | Input_error | Lock_stolen | Could_not_find_flowconfig + | Could_not_extract_flowlibs | Server_out_of_date | Out_of_shared_memory | Flowconfig_changed diff --git a/src/monitor/flowServerMonitorServer.ml b/src/monitor/flowServerMonitorServer.ml index 49b801a8279..bbfcf4cf6bc 100644 --- a/src/monitor/flowServerMonitorServer.ml +++ b/src/monitor/flowServerMonitorServer.ml @@ -520,6 +520,7 @@ module KeepAliveLoop = LwtLoop.Make (struct (* The heap is full. Restarting might help clear out cruft, but it could also just be too small, leading to a crash loop. We should limit how often we try restarting before recovering from this. *) + | Could_not_extract_flowlibs (**** Things that the server shouldn't use, but would imply that the monitor should exit ****) | Interrupted | Build_id_mismatch From 0fd9538ffac3c7f24678ed8a833a42b0b7b6f4eb Mon Sep 17 00:00:00 2001 From: Marshall Roch Date: Tue, 8 Dec 2020 11:35:01 -0800 Subject: [PATCH 27/43] extract entire token being autocompleted Summary: autocomplete is requested at a cursor location. we walk the AST and find the token under the cursor. this diff returns the text of that token. it's currently not hooked up to anything, but is useful for filtering results based on what the user has typed so far. for example, if you complete `fo|`, we now get back `Acid { name = "fo"; ... }`, so we can filter to only identifiers that (fuzzy) match `fo`, say `foo` and not `bar`. today, we return all identifiers that are in the cursor's scope, regardless of what you've typed. Reviewed By: vrama628 Differential Revision: D25377140 fbshipit-source-id: d9823b2cec86326caaab90958d9173bf8fffd1b8 --- .../autocomplete/autocompleteService_js.ml | 36 +++++++++++++++---- src/services/autocomplete/autocomplete_js.ml | 18 +++++----- 2 files changed, 39 insertions(+), 15 deletions(-) diff --git a/src/services/autocomplete/autocompleteService_js.ml b/src/services/autocomplete/autocompleteService_js.ml index a9dd4638b0a..567261f9f7e 100644 --- a/src/services/autocomplete/autocompleteService_js.ml +++ b/src/services/autocomplete/autocompleteService_js.ml @@ -567,7 +567,18 @@ let local_value_identifiers ~options ~reader ~cx ~ac_loc ~file_sig ~ast ~typed_a (* env is all visible bound names at cursor *) let autocomplete_id - ~options ~reader ~cx ~ac_loc ~file_sig ~ast ~typed_ast ~include_super ~include_this ~tparams = + ~options + ~reader + ~cx + ~ac_loc + ~file_sig + ~ast + ~typed_ast + ~include_super + ~include_this + ~tparams + ~token:_ = + (* TODO: filter to results that match `token` *) let open ServerProt.Response.Completion in let ac_loc = loc_of_aloc ~reader ac_loc |> remove_autocomplete_token_from_loc in let exact_by_default = Context.exact_by_default cx in @@ -792,7 +803,9 @@ let type_exports_of_module_ty ~ac_loc ~exact_by_default ~documentation_of_module |> Base.List.mapi ~f:(fun i r -> { r with sort_text = sort_text_of_rank i }) | _ -> [] -let autocomplete_unqualified_type ~options ~reader ~cx ~tparams ~file_sig ~ac_loc ~ast ~typed_ast = +let autocomplete_unqualified_type + ~options ~reader ~cx ~tparams ~file_sig ~ac_loc ~ast ~typed_ast ~token:_ = + (* TODO: filter to results that match `token` *) let open ServerProt.Response.Completion in let ac_loc = loc_of_aloc ~reader ac_loc |> remove_autocomplete_token_from_loc in let exact_by_default = Context.exact_by_default cx in @@ -904,7 +917,7 @@ let autocomplete_get_results ~options ~reader ~cx ~file_sig ~ast ~typed_ast trig (* TODO: complete object keys based on their upper bounds *) let result = { ServerProt.Response.Completion.items = []; is_incomplete = false } in ("Ackey", AcResult { result; errors_to_log = [] }) - | Some (tparams, ac_loc, Acid { include_super; include_this }) -> + | Some (tparams, ac_loc, Acid { token; include_super; include_this }) -> ( "Acid", autocomplete_id ~options @@ -916,7 +929,8 @@ let autocomplete_get_results ~options ~reader ~cx ~file_sig ~ast ~typed_ast trig ~typed_ast ~include_super ~include_this - ~tparams ) + ~tparams + ~token ) | Some (tparams, ac_loc, Acmem { obj_type; in_optional_chain }) -> ( "Acmem", autocomplete_member @@ -932,10 +946,18 @@ let autocomplete_get_results ~options ~reader ~cx ~file_sig ~ast ~typed_ast trig | Some (tparams, ac_loc, Acjsx (ac_name, used_attr_names, cls)) -> ( "Acjsx", autocomplete_jsx ~reader cx file_sig typed_ast cls ac_name ~used_attr_names ac_loc ~tparams ) - | Some (tparams, ac_loc, Actype) -> + | Some (tparams, ac_loc, Actype { token }) -> ( "Actype", - autocomplete_unqualified_type ~options ~reader ~cx ~tparams ~ac_loc ~ast ~typed_ast ~file_sig - ) + autocomplete_unqualified_type + ~options + ~reader + ~cx + ~tparams + ~ac_loc + ~ast + ~typed_ast + ~file_sig + ~token ) | Some (tparams, ac_loc, Acqualifiedtype qtype) -> ( "Acqualifiedtype", autocomplete_qualified_type ~reader ~cx ~ac_loc ~file_sig ~typed_ast ~tparams ~qtype ) diff --git a/src/services/autocomplete/autocomplete_js.ml b/src/services/autocomplete/autocomplete_js.ml index d1f7b930de8..97b62e8f2c0 100644 --- a/src/services/autocomplete/autocomplete_js.ml +++ b/src/services/autocomplete/autocomplete_js.ml @@ -14,6 +14,7 @@ type autocomplete_type = | Accomment (* identifier references *) | Acid of { + token: string; include_super: bool; include_this: bool; } @@ -24,7 +25,7 @@ type autocomplete_type = (* a module name *) | Acmodule (* type identifiers *) - | Actype + | Actype of { token: string } (* qualified type identifiers *) | Acqualifiedtype of Type.t (* member expressions *) @@ -81,15 +82,15 @@ class process_request_searcher (from_trigger_character : bool) (cursor : Loc.t) else super#comment c - method! t_identifier (((loc, _), _) as ident) = + method! t_identifier (((loc, _), { Flow_ast.Identifier.name; _ }) as ident) = if this#covers_target loc then - this#find loc (Acid { include_super = false; include_this = false }) + this#find loc (Acid { token = name; include_super = false; include_this = false }) else super#t_identifier ident - method! jsx_identifier (((ac_loc, _), _) as ident) = + method! jsx_identifier (((ac_loc, _), { Flow_ast.JSX.Identifier.name; _ }) as ident) = if this#covers_target ac_loc then - this#find ac_loc (Acid { include_super = false; include_this = false }); + this#find ac_loc (Acid { token = name; include_super = false; include_this = false }); ident method! member expr = @@ -217,8 +218,8 @@ class process_request_searcher (from_trigger_character : bool) (cursor : Loc.t) method! class_body x = try super#class_body x - with Found (tparams, loc, Acid _) -> - raise (Found (tparams, loc, Acid { include_super = true; include_this = true })) + with Found (tparams, loc, Acid id) -> + raise (Found (tparams, loc, Acid { id with include_super = true; include_this = true })) method! function_expression x = try super#function_expression x @@ -234,7 +235,8 @@ class process_request_searcher (from_trigger_character : bool) (cursor : Loc.t) let open Flow_ast.Type.Generic.Identifier in begin match id with - | Unqualified ((loc, _), _) when this#covers_target loc -> this#find loc Actype + | Unqualified ((loc, _), { Flow_ast.Identifier.name; _ }) when this#covers_target loc -> + this#find loc (Actype { token = name }) | Qualified (_, { qualification; id = ((loc, _), _) }) when this#covers_target loc -> let qualification_type = type_of_qualification qualification in this#find loc (Acqualifiedtype qualification_type) From 0e4894b2109d47b86823986c10e01ce5425704f4 Mon Sep 17 00:00:00 2001 From: Sam Goldman Date: Tue, 8 Dec 2020 11:46:07 -0800 Subject: [PATCH 28/43] Fix issue when trying to remove non-existent heap entry Summary: hh_remove has an unenforced precondition that the removed item must exist. Before D24594657 (https://github.com/facebook/flow/commit/6a35c176f16ecffb8a42a71ab8d4a7b508c7af88), this was satisfied because we would always check hh_mem before calling hh_remove, but that diff removed the mem check. If we oldify an entry, the slot remains occupied (the hash field is set) but the addr is cleared to NULL. If the addr field is null, hh_remove would try to dereference the NULL address, leading to bad behavior. The NULL address is 0, which points to the hash slot of the first hash table element. We read this 64 bit hash as a header and extract a size from it. However, this size is just some meaningless number, and if the slot is occupied, likely to be very large. The net effect of this bug is that we added a large number to the "free heap size" counter, which triggered GC too aggressively. I observed this behavior by looking at the logs, observing that we triggered a compaction with only 0.6% wasted space instead of the expected 20%. Presumably, this also causes the hcounter_filled counter to be bogus. This is scary as well, since the counter is unsigned, and would wrap around to MAX_LONG if we decremented it past zero. One question that remains is, why do we try to remove something after first oldifying it? I traced the example I found which led to the "commit modules" phase in the Module_js module. If a change includes removing a file, we will first open a transaction including that deleted file, which has the effect of oldifying the NameHeap entry for that file's Haste module name. During commit modules, we also call `remove_and_replace` which tries to directly remove the same NameHeap entry. I suspect that the attempt to remove the entry is left over from an earlier time, before cancelable rechecks and transactions, but I'm leaving further investigation on this point for follow-up work. Reviewed By: nmote Differential Revision: D25387033 fbshipit-source-id: dc9684a840a0d41082c6cf1a7ca87c237acaf5c2 --- src/heap/hh_shared.c | 1 + src/heap/sharedMem.ml | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/heap/hh_shared.c b/src/heap/hh_shared.c index 61ce0b51d6b..af2741e2917 100644 --- a/src/heap/hh_shared.c +++ b/src/heap/hh_shared.c @@ -1540,6 +1540,7 @@ CAMLprim value hh_remove(value key) { assert_master(); assert(hashtbl[slot].hash == get_hash(key)); + assert(hashtbl[slot].addr != NULL_ADDR); // see hh_alloc for the source of this size heap_entry_t *entry = Entry_of_addr(hashtbl[slot].addr); // TODO: Wasted heap size calculation is under-counted. This will prevent GC diff --git a/src/heap/sharedMem.ml b/src/heap/sharedMem.ml index b86250505ee..d69ab410710 100644 --- a/src/heap/sharedMem.ml +++ b/src/heap/sharedMem.ml @@ -209,7 +209,9 @@ module HashtblSegment (Key : Key) = struct let get_old k = get_hash (old_hash_of_key k) - let remove k = hh_remove (new_hash_of_key k) + let remove k = + let new_hash = new_hash_of_key k in + if hh_mem new_hash then hh_remove new_hash (* We oldify entries that might be changed by an operation, which involves * moving the address of the current heap value from the "new" key to the From 35b1787691d95ae9cbb678e33853dd943a7be347 Mon Sep 17 00:00:00 2001 From: Marshall Roch Date: Tue, 8 Dec 2020 12:06:01 -0800 Subject: [PATCH 29/43] unique flowlib dir per user (fixes "Could not locate flowlib files") Summary: the files in `/tmp/flow` have default permissions of `644` (rw-r--r--). in v0.138.0, we started using a consistent directory for flowlibs, `/tmp/flow/flowlib_[hash]`, which ends up writable only by the user that first creates it. if multiple users run flow on the same machine, the second user gets a permission error. to fix, we now include the effective user ID in the path: `/tmp/flow/flowlib_[hash]_[euid]`. the euid is what is used for file permission checks, so the directory is unique per user. on Windows, the euid is always 1, but we write to `TEMP` which is per-user on Windows. NOTE: technically, we still have the same problem with all the other files (logs, pid, sockets, etc) but we have things relying on those paths right now so I'm not changing them. it's much less likely for 2 users to run flow on the same directory. the workaround is to use `FLOW_TEMP_DIR` to write everything into a per-user temp directory, say `~/.cache/flow`. Fixes https://github.com/facebook/flow/issues/8539 Reviewed By: samwgoldman Differential Revision: D25400628 fbshipit-source-id: 45f0c4f174ace707dd0565409051ad71c7b5f69e --- src/flowlib/flowlib.ml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/flowlib/flowlib.ml b/src/flowlib/flowlib.ml index 0ed33cc5027..e28c752e99f 100644 --- a/src/flowlib/flowlib.ml +++ b/src/flowlib/flowlib.ml @@ -17,8 +17,14 @@ let contents no_flowlib : (string * string) array = else [%flowlib_contents] +(** [mkdir ~no_flowlib parent_dir] creates a directory under [parent_dir] + within which the flowlib files will be extracted. This directory is + named uniquely based on the flowlib contents, as well as the effective + user ID (euid) of the current process. The euid is used to ensure that + the directory is writable by the current user. *) let mkdir ~no_flowlib parent_dir = - let libdir = Path.concat parent_dir (Printf.sprintf "flowlib_%s" (hash no_flowlib)) in + let euid = Unix.geteuid () in + let libdir = Path.concat parent_dir (Printf.sprintf "flowlib_%s_%d" (hash no_flowlib) euid) in Sys_utils.mkdir_no_fail (Path.to_string parent_dir); Sys_utils.mkdir_no_fail (Path.to_string libdir); libdir From 2951e3681fbb66f6552c282610953f2b41dd46ac Mon Sep 17 00:00:00 2001 From: Marshall Roch Date: Tue, 8 Dec 2020 12:16:01 -0800 Subject: [PATCH 30/43] remove unused Sys_utils.logname Reviewed By: gkz Differential Revision: D25356988 fbshipit-source-id: 2e81467a1a26dd4406f0e94ebc4e8f59b63d5830 --- src/hack_forked/utils/sys/sys_utils.ml | 36 -------------------------- 1 file changed, 36 deletions(-) diff --git a/src/hack_forked/utils/sys/sys_utils.ml b/src/hack_forked/utils/sys/sys_utils.ml index fefb2df0719..5e7439d4f8a 100644 --- a/src/hack_forked/utils/sys/sys_utils.ml +++ b/src/hack_forked/utils/sys/sys_utils.ml @@ -16,18 +16,6 @@ external is_apple_os : unit -> bool = "hh_sysinfo_is_apple_os" (** Option type intead of exception throwing. *) let get_env name = (try Some (Sys.getenv name) with Not_found -> None) -let getenv_user () = - let user_var = - if Sys.win32 then - "USERNAME" - else - "USER" - in - let logname_var = "LOGNAME" in - let user = get_env user_var in - let logname = get_env logname_var in - Base.Option.first_some user logname - let getenv_home () = let home_var = if Sys.win32 then @@ -199,30 +187,6 @@ let restart () = let argv = Sys.argv in Unix.execv cmd argv -let logname_impl () = - match getenv_user () with - | Some user -> user - | None -> - (* If this function is generally useful, it can be lifted to toplevel - in this file, but this is the only place we need it for now. *) - let exec_try_read cmd = - let ic = Unix.open_process_in cmd in - let out = (try Some (input_line ic) with End_of_file -> None) in - let status = Unix.close_process_in ic in - match (out, status) with - | (Some _, Unix.WEXITED 0) -> out - | _ -> None - in - (try Utils.unsafe_opt (exec_try_read "logname") - with Invalid_argument _ -> - (try Utils.unsafe_opt (exec_try_read "id -un") with Invalid_argument _ -> "[unknown]")) - -let logname_ref = ref None - -let logname () = - if !logname_ref = None then logname_ref := Some (logname_impl ()); - Utils.unsafe_opt !logname_ref - let with_umask umask f = let old_umask = ref 0 in Utils.with_context From 3b9cc521d29e22fea91272c7b31eea68eb2be3dc Mon Sep 17 00:00:00 2001 From: Vijay Ramamurthy Date: Wed, 9 Dec 2020 09:43:32 -0800 Subject: [PATCH 31/43] write documentation coordinates to glean index Reviewed By: zsol Differential Revision: D24396332 fbshipit-source-id: 1c5490cc6721ebf49840507406215d3b52f24c06 --- src/services/jsdoc/find_documentation.ml | 29 ++++++++++++++++------- src/services/jsdoc/find_documentation.mli | 2 ++ 2 files changed, 23 insertions(+), 8 deletions(-) diff --git a/src/services/jsdoc/find_documentation.ml b/src/services/jsdoc/find_documentation.ml index 7da970303e1..fa5c41ce704 100644 --- a/src/services/jsdoc/find_documentation.ml +++ b/src/services/jsdoc/find_documentation.ml @@ -5,10 +5,10 @@ * LICENSE file in the root directory of this source tree. *) -exception Found of Jsdoc.t +exception FoundJsdoc of Jsdoc.t -let find comments = - Base.Option.iter (Jsdoc.of_comments comments) ~f:(fun jsdoc -> raise (Found jsdoc)) +let find_jsdoc comments = + Base.Option.iter (Jsdoc.of_comments comments) ~f:(fun jsdoc -> raise (FoundJsdoc jsdoc)) let loc_of_object_key = let open Flow_ast.Expression.Object.Property in @@ -54,7 +54,7 @@ let replace_comments_of_statement ~comments = | EnumDeclaration x -> EnumDeclaration EnumDeclaration.{ x with comments } | other -> other) -class documentation_searcher (def_loc : Loc.t) = +class documentation_searcher find (def_loc : Loc.t) = object (this) inherit [unit, Loc.t] Flow_ast_visitor.visitor ~init:() as super @@ -224,12 +224,12 @@ class documentation_searcher (def_loc : Loc.t) = super#interface loc interface end -let search def_loc ast = - let searcher = new documentation_searcher def_loc in +let search_jsdoc def_loc ast = + let searcher = new documentation_searcher find_jsdoc def_loc in try ignore (searcher#program ast); None - with Found documentation -> Some documentation + with FoundJsdoc documentation -> Some documentation module Remove_types = struct open Parsing_heaps_utils @@ -264,7 +264,7 @@ let jsdoc_of_getdef_loc ?current_ast ~reader def_loc = | Some _ as some_ast -> some_ast | None -> Parsing_heaps.Reader.get_ast ~reader source in - search def_loc ast + search_jsdoc def_loc ast let documentation_of_jsdoc jsdoc = let documentation_of_unrecognized_tag (tag_name, tag_description) = @@ -283,3 +283,16 @@ let documentation_of_jsdoc jsdoc = match documentation_strings with | [] -> None | _ -> Some (String.concat "\n\n" documentation_strings) + +exception FoundCommentLoc of Loc.t + +let find_comment_loc = + Base.Option.iter ~f:(fun Flow_ast.Syntax.{ leading; _ } -> + Base.Option.iter (Base.List.last leading) ~f:(fun (loc, _) -> raise (FoundCommentLoc loc))) + +let comment_loc_of_getdef_loc ast def_loc = + let searcher = new documentation_searcher find_comment_loc def_loc in + try + ignore (searcher#program ast); + None + with FoundCommentLoc comment_loc -> Some comment_loc diff --git a/src/services/jsdoc/find_documentation.mli b/src/services/jsdoc/find_documentation.mli index 19d4f40e2a1..4e9f6442a0b 100644 --- a/src/services/jsdoc/find_documentation.mli +++ b/src/services/jsdoc/find_documentation.mli @@ -12,3 +12,5 @@ val jsdoc_of_getdef_loc : Jsdoc.t option val documentation_of_jsdoc : Jsdoc.t -> string option + +val comment_loc_of_getdef_loc : (Loc.t, Loc.t) Flow_ast.Program.t -> Loc.t -> Loc.t option From f850ebc80e6a0f6132923441dad5af87c04dcfc6 Mon Sep 17 00:00:00 2001 From: Vijay Ramamurthy Date: Wed, 9 Dec 2020 09:43:32 -0800 Subject: [PATCH 32/43] make type-at-pos find documentation on opaque types Summary: Looks like I forgot to make the JSDoc documentation searcher find documentation on opaque type declarations. This fixes that. Reviewed By: gkz Differential Revision: D24929709 fbshipit-source-id: c83eb206827176836de811b04835ac79673af1bf --- src/services/jsdoc/find_documentation.ml | 6 + tests/type_at_pos_jsdoc/type-exports.js | 2 + tests/type_at_pos_jsdoc/type_at_pos_jsdoc.exp | 128 ++++++++++++------ tests/type_at_pos_jsdoc/types.js | 8 +- 4 files changed, 98 insertions(+), 46 deletions(-) diff --git a/src/services/jsdoc/find_documentation.ml b/src/services/jsdoc/find_documentation.ml index fa5c41ce704..90cb3ea48ea 100644 --- a/src/services/jsdoc/find_documentation.ml +++ b/src/services/jsdoc/find_documentation.ml @@ -217,6 +217,12 @@ class documentation_searcher find (def_loc : Loc.t) = if this#is_target id_loc then find comments; super#type_alias loc type_alias + method! opaque_type loc opaque_type = + let open Flow_ast.Statement.OpaqueType in + let { id = (id_loc, _); comments; _ } = opaque_type in + if this#is_target id_loc then find comments; + super#opaque_type loc opaque_type + method! interface loc interface = let open Flow_ast.Statement.Interface in let { id = (id_loc, _); comments; _ } = interface in diff --git a/tests/type_at_pos_jsdoc/type-exports.js b/tests/type_at_pos_jsdoc/type-exports.js index 3e727ff50f9..92822eac08a 100644 --- a/tests/type_at_pos_jsdoc/type-exports.js +++ b/tests/type_at_pos_jsdoc/type-exports.js @@ -4,6 +4,8 @@ export type ExportFoo = number; /** this is ExportBar */ export type ExportBar = ?T; +/** this is ExportOpaque */ +export opaque type ExportOpaque = [T,T]; /** this is ExportClass */ export class ExportClass {}; /** this is ExportInterface */ diff --git a/tests/type_at_pos_jsdoc/type_at_pos_jsdoc.exp b/tests/type_at_pos_jsdoc/type_at_pos_jsdoc.exp index 125743b844e..dd774ca40fb 100644 --- a/tests/type_at_pos_jsdoc/type_at_pos_jsdoc.exp +++ b/tests/type_at_pos_jsdoc/type_at_pos_jsdoc.exp @@ -1214,7 +1214,7 @@ Flags: --strip-root --pretty "end":6 } -types.js:14:7 +types.js:16:7 Flags: --strip-root --pretty { "documentation":"this is myFoo", @@ -1223,17 +1223,17 @@ Flags: --strip-root --pretty "loc":{ "source":"types.js", "type":"SourceFile", - "start":{"line":14,"column":6,"offset":345}, - "end":{"line":14,"column":10,"offset":350} + "start":{"line":16,"column":6,"offset":416}, + "end":{"line":16,"column":10,"offset":421} }, "path":"types.js", - "line":14, - "endline":14, + "line":16, + "endline":16, "start":6, "end":10 } -types.js:16:7 +types.js:18:7 Flags: --strip-root --pretty { "documentation":"this is myBar", @@ -1242,17 +1242,36 @@ Flags: --strip-root --pretty "loc":{ "source":"types.js", "type":"SourceFile", - "start":{"line":16,"column":6,"offset":372}, - "end":{"line":16,"column":10,"offset":377} + "start":{"line":18,"column":6,"offset":443}, + "end":{"line":18,"column":10,"offset":448} }, "path":"types.js", - "line":16, - "endline":16, + "line":18, + "endline":18, "start":6, "end":10 } -types.js:18:7 +types.js:20:7 +Flags: --strip-root --pretty +{ + "documentation":"this is myOpaque", + "type":"type myOpaque = [T, T]", + "reasons":[], + "loc":{ + "source":"types.js", + "type":"SourceFile", + "start":{"line":20,"column":6,"offset":478}, + "end":{"line":20,"column":13,"offset":486} + }, + "path":"types.js", + "line":20, + "endline":20, + "start":6, + "end":13 +} + +types.js:22:7 Flags: --strip-root --pretty { "documentation":"this is myClass", @@ -1261,17 +1280,17 @@ Flags: --strip-root --pretty "loc":{ "source":"types.js", "type":"SourceFile", - "start":{"line":18,"column":6,"offset":407}, - "end":{"line":18,"column":12,"offset":414} + "start":{"line":22,"column":6,"offset":516}, + "end":{"line":22,"column":12,"offset":523} }, "path":"types.js", - "line":18, - "endline":18, + "line":22, + "endline":22, "start":6, "end":12 } -types.js:20:7 +types.js:24:7 Flags: --strip-root --pretty { "documentation":"this is myInterface", @@ -1280,17 +1299,17 @@ Flags: --strip-root --pretty "loc":{ "source":"types.js", "type":"SourceFile", - "start":{"line":20,"column":6,"offset":436}, - "end":{"line":20,"column":16,"offset":447} + "start":{"line":24,"column":6,"offset":545}, + "end":{"line":24,"column":16,"offset":556} }, "path":"types.js", - "line":20, - "endline":20, + "line":24, + "endline":24, "start":6, "end":16 } -types.js:22:7 +types.js:26:7 Flags: --strip-root --pretty { "documentation":"this is ExportFoo", @@ -1299,17 +1318,17 @@ Flags: --strip-root --pretty "loc":{ "source":"types.js", "type":"SourceFile", - "start":{"line":22,"column":6,"offset":469}, - "end":{"line":22,"column":14,"offset":478} + "start":{"line":26,"column":6,"offset":578}, + "end":{"line":26,"column":14,"offset":587} }, "path":"types.js", - "line":22, - "endline":22, + "line":26, + "endline":26, "start":6, "end":14 } -types.js:24:7 +types.js:28:7 Flags: --strip-root --pretty { "documentation":"this is ExportBar", @@ -1318,17 +1337,36 @@ Flags: --strip-root --pretty "loc":{ "source":"types.js", "type":"SourceFile", - "start":{"line":24,"column":6,"offset":500}, - "end":{"line":24,"column":14,"offset":509} + "start":{"line":28,"column":6,"offset":609}, + "end":{"line":28,"column":14,"offset":618} }, "path":"types.js", - "line":24, - "endline":24, + "line":28, + "endline":28, "start":6, "end":14 } -types.js:26:7 +types.js:30:7 +Flags: --strip-root --pretty +{ + "documentation":"this is ExportOpaque", + "type":"type ExportOpaque", + "reasons":[], + "loc":{ + "source":"types.js", + "type":"SourceFile", + "start":{"line":30,"column":6,"offset":648}, + "end":{"line":30,"column":17,"offset":660} + }, + "path":"types.js", + "line":30, + "endline":30, + "start":6, + "end":17 +} + +types.js:32:7 Flags: --strip-root --pretty { "documentation":"this is ExportClass", @@ -1337,17 +1375,17 @@ Flags: --strip-root --pretty "loc":{ "source":"types.js", "type":"SourceFile", - "start":{"line":26,"column":6,"offset":539}, - "end":{"line":26,"column":16,"offset":550} + "start":{"line":32,"column":6,"offset":690}, + "end":{"line":32,"column":16,"offset":701} }, "path":"types.js", - "line":26, - "endline":26, + "line":32, + "endline":32, "start":6, "end":16 } -types.js:28:7 +types.js:34:7 Flags: --strip-root --pretty { "documentation":"this is ExportInterface", @@ -1356,17 +1394,17 @@ Flags: --strip-root --pretty "loc":{ "source":"types.js", "type":"SourceFile", - "start":{"line":28,"column":6,"offset":572}, - "end":{"line":28,"column":20,"offset":587} + "start":{"line":34,"column":6,"offset":723}, + "end":{"line":34,"column":20,"offset":738} }, "path":"types.js", - "line":28, - "endline":28, + "line":34, + "endline":34, "start":6, "end":20 } -types.js:32:7 +types.js:38:7 Flags: --strip-root --pretty { "documentation":"this is ExportValue", @@ -1375,12 +1413,12 @@ Flags: --strip-root --pretty "loc":{ "source":"types.js", "type":"SourceFile", - "start":{"line":32,"column":6,"offset":681}, - "end":{"line":32,"column":16,"offset":692} + "start":{"line":38,"column":6,"offset":832}, + "end":{"line":38,"column":16,"offset":843} }, "path":"types.js", - "line":32, - "endline":32, + "line":38, + "endline":38, "start":6, "end":16 } diff --git a/tests/type_at_pos_jsdoc/types.js b/tests/type_at_pos_jsdoc/types.js index 26baf1bb6ae..db9f08c73be 100644 --- a/tests/type_at_pos_jsdoc/types.js +++ b/tests/type_at_pos_jsdoc/types.js @@ -4,17 +4,21 @@ type myFoo = number; /** this is myBar */ type myBar = ?T; +/** this is myOpaque */ +opaque type myOpaque = [T,T]; /** this is myClass */ class myClass {}; /** this is myInterface */ interface myInterface {}; -import type {ExportFoo, ExportBar, ExportClass, ExportInterface, ExportEnum} from './type-exports'; +import type {ExportFoo, ExportBar, ExportOpaque, ExportClass, ExportInterface, ExportEnum} from './type-exports'; import { typeof exportValue } from './type-exports'; (x : myFoo) => {}; // ^ (x : myBar) => {}; // ^ +(x : myOpaque) => {}; +// ^ (x : myClass) => {}; // ^ (x : myInterface) => {}; @@ -23,6 +27,8 @@ import { typeof exportValue } from './type-exports'; // ^ (x : ExportBar) => {}; // ^ +(x : ExportOpaque) => {}; +// ^ (x : ExportClass) => {}; // ^ (x : ExportInterface) => {}; From c39ef3c334cfdb3f1f3b76faf70e47a7a6d5916b Mon Sep 17 00:00:00 2001 From: Vijay Ramamurthy Date: Wed, 9 Dec 2020 09:43:32 -0800 Subject: [PATCH 33/43] fix loc attached to namespace imports in file_sig Summary: `File_sig` provides information about a module's imports, in which it includes information about the identifiers in the import statements. The location stored for `import * as foo from 'bar'` used to be the location corresponding to `* as foo`; this diff changes it to instead store the location corresponding to `foo`. The `File_sig` API provides this loc in a `type Flow_ast_utils.ident = Loc.t * string` where it'd be paired with the string `"foo"`, so the new behavior is more in line with expectations. Reviewed By: nmote Differential Revision: D24868725 fbshipit-source-id: 8d285f0add8dfe67463984d17fe099810dec86ea --- src/parser_utils/__tests__/file_sig_test.ml | 4 ++-- src/parser_utils/file_sig.ml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/parser_utils/__tests__/file_sig_test.ml b/src/parser_utils/__tests__/file_sig_test.ml index 8b14de39466..afa1d6f110b 100644 --- a/src/parser_utils/__tests__/file_sig_test.ml +++ b/src/parser_utils/__tests__/file_sig_test.ml @@ -309,7 +309,7 @@ let tests = let { module_sig = { requires; _ }; _ } = visit source in match requires with | [Import { source = (_, "foo"); ns = Some (loc, "Foo"); _ }] -> - assert_substring_equal ~ctxt "* as Foo" source loc + assert_substring_equal ~ctxt "Foo" source loc | _ -> assert_failure "Unexpected requires" ); ( "es_import_type" >:: fun ctxt -> let source = "import type A from 'foo'" in @@ -382,7 +382,7 @@ let tests = let { module_sig = { requires; _ }; _ } = visit source in match requires with | [Import { source = (_, "foo"); typesof_ns = Some (loc, "Foo"); _ }] -> - assert_substring_equal ~ctxt "* as Foo" source loc + assert_substring_equal ~ctxt "Foo" source loc | _ -> assert_failure "Unexpected requires" ); ( "cjs_default" >:: fun ctxt -> let source = "" in diff --git a/src/parser_utils/file_sig.ml b/src/parser_utils/file_sig.ml index b2ead9488ac..0b11061eb8b 100644 --- a/src/parser_utils/file_sig.ml +++ b/src/parser_utils/file_sig.ml @@ -626,7 +626,7 @@ struct default; Base.Option.iter ~f:(function - | ImportNamespaceSpecifier (loc, (_, { Ast.Identifier.name = local; comments = _ })) + | ImportNamespaceSpecifier (_, (loc, { Ast.Identifier.name = local; comments = _ })) -> (match import_kind with | ImportType -> failwith "import type * is a parse error" From 74a877730182ffdd860d4147f045fefdf17237dc Mon Sep 17 00:00:00 2001 From: Nat Mote Date: Wed, 9 Dec 2020 11:02:04 -0800 Subject: [PATCH 34/43] Recompute the resolved requires hash when denormalizing saved state Summary: This hash has absolute file paths mixed in, so it needs to be recomputed instead of loaded from saved state. This issue leads to Flow incorrectly believing that the resolved requires changed upon a recheck, since it makes that determination based on the hash alone. Currently, this doesn't cause any problems in practice, because this knowledge is only used: * To decide whether to clear the direct dependents file cache, which would not be populated on the initial recheck for a file anyway. * To decide whether we can consider directly-dependent files for skipping in the check phase, which we currently cannot do on the initial lazy recheck for a file anyway, because we don't have the sighashes. However, my upcoming work will allow us to store sighashes in saved state and use them to skip work on initial lazy rechecks. This incorrect hash causes Flow to believe, incorrectly, that the resolved requires have changed, which stymies that work by preventing Flow from skipping direct dependent files in the check phase. We could reduce the size of the saved state by removing this hash from it, now that we regenerate it anyway. I'll do that another time. Reviewed By: samwgoldman Differential Revision: D25414311 fbshipit-source-id: 1c08f357123a92c75ad4839d478f776c65becc32 --- src/services/saved_state/saved_state.ml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/services/saved_state/saved_state.ml b/src/services/saved_state/saved_state.ml index 42455a2789c..bb4cec87bdc 100644 --- a/src/services/saved_state/saved_state.ml +++ b/src/services/saved_state/saved_state.ml @@ -514,13 +514,13 @@ end = struct denormalize_info_generic ~denormalize:(denormalize_file_key_nocache ~root) info let denormalize_resolved_requires - ~root { Module_heaps.resolved_modules; phantom_dependents; hash } = + ~root { Module_heaps.resolved_modules; phantom_dependents; hash = _ } = (* We do our best to avoid reading the file system (which Path.make will do) *) let phantom_dependents = SSet.map (Files.absolute_path root) phantom_dependents in let resolved_modules = SMap.map (modulename_map_fn ~f:(denormalize_file_key_nocache ~root)) resolved_modules in - { Module_heaps.resolved_modules; phantom_dependents; hash } + Module_heaps.mk_resolved_requires ~resolved_modules ~phantom_dependents (** Turns all the relative paths in a file's data back into absolute paths. *) let denormalize_file_data ~root { resolved_requires; hash } = From 3c1e901d7d275567311243a7a53b932d7ce7f973 Mon Sep 17 00:00:00 2001 From: Nat Mote Date: Wed, 9 Dec 2020 11:42:36 -0800 Subject: [PATCH 35/43] Remove unused export_file state in ContextOptimizer Reviewed By: samwgoldman Differential Revision: D25430331 fbshipit-source-id: 04bb366d51d17b4faaad1eea6dfcffae5d91afc8 --- src/typing/merge_js.ml | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/typing/merge_js.ml b/src/typing/merge_js.ml index a22f1c3c5f0..c2a0447ea43 100644 --- a/src/typing/merge_js.ml +++ b/src/typing/merge_js.ml @@ -658,11 +658,8 @@ module ContextOptimizer = struct val mutable export_reason = None - val mutable export_file = None - method reduce cx module_ref = let export = Context.find_module cx module_ref in - export_file <- reason_of_t export |> Reason.aloc_of_reason |> ALoc.source; let export' = self#type_ cx Polarity.Neutral export in reduced_module_map <- SMap.add module_ref export' reduced_module_map From 1a31d6095932241e52139fa1d5da14f95fecb6ec Mon Sep 17 00:00:00 2001 From: Sam Goldman Date: Wed, 9 Dec 2020 13:56:00 -0800 Subject: [PATCH 36/43] Consolidate sharedmem collections Summary: To my knowledge, Flow has never been particularly disciplined with respect to where sharedmem collections were attempted. We would sometimes attempt a "gentle" collection during a recheck after removing something, and eventually an "aggressive" collection during idle time. With the introduction of cancelable rechecks, attempting collection during a recheck became mostly useless. Existing entries are oldified at the beginning of a recheck, but are not removed until the entire transaction commits. I propose that there are exactly two places where it's necessary to attempt a collection: 1. Between rechecks, during idle time, is the best time to try to collect. We will have either committed or rolled back the transaction, in either case potentially leaving garbage in the shared heap. 2. After a canceled recheck, before the coalesced re-attempt. This is really a special-case of (1), since it's "between rechecks," but it does not hit the same code paths. This is also not idle time, since the user is presumably waiting on the follow-up recheck to complete. This diff consolidates all calls to SharedMem_js.collect to those two places. Reviewed By: nmote Differential Revision: D25280252 fbshipit-source-id: 68d2d87bcaa513744576ea97abc6b5cc3377e9e3 --- src/parsing/parsing_service_js.ml | 1 - src/server/rechecker/rechecker.ml | 5 +++++ src/server/shmem/sharedMem_js.ml | 6 ++---- src/services/inference/types_js.ml | 2 +- src/state/heaps/context/context_heaps.ml | 3 +-- src/state/heaps/parsing/parsing_heaps.ml | 3 +-- 6 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/parsing/parsing_service_js.ml b/src/parsing/parsing_service_js.ml index b854adb981e..988578c82b8 100644 --- a/src/parsing/parsing_service_js.ml +++ b/src/parsing/parsing_service_js.ml @@ -813,7 +813,6 @@ let reparse in let modified = FilenameSet.union modified results.parse_not_found_skips in let modified = FilenameSet.union modified results.parse_hash_mismatch_skips in - SharedMem_js.collect `gentle; let unchanged = FilenameSet.diff files modified in (* restore old parsing info for unchanged files *) Parsing_heaps.Reparse_mutator.revive_files master_mutator unchanged; diff --git a/src/server/rechecker/rechecker.ml b/src/server/rechecker/rechecker.ml index 5e79180d0b0..a1a93aabd93 100644 --- a/src/server/rechecker/rechecker.ml +++ b/src/server/rechecker/rechecker.ml @@ -207,6 +207,11 @@ let rec recheck_single let post_cancel () = Hh_logger.info "Recheck successfully canceled. Restarting the recheck to include new file changes"; + (* The canceled recheck, or a preceding sequence of canceled rechecks where none completed, + * may have introduced garbage into shared memory. Since we immediately start another + * recheck, we should first check whether we need to compact. Otherwise, sharedmem could + * potentially grow unbounded. *) + SharedMem_js.collect `aggressive; recheck_single ~files_to_recheck ~files_to_force diff --git a/src/server/shmem/sharedMem_js.ml b/src/server/shmem/sharedMem_js.ml index 62228a1cedf..e6f36524cce 100644 --- a/src/server/shmem/sharedMem_js.ml +++ b/src/server/shmem/sharedMem_js.ml @@ -12,8 +12,7 @@ module Ident = Ident module Collect : sig val collect : [ `gentle | `aggressive | `always_TEST ] -> unit - val with_memory_profiling_lwt : - profiling:Profiling_js.running -> collect_at_end:bool -> (unit -> 'a Lwt.t) -> 'a Lwt.t + val with_memory_profiling_lwt : profiling:Profiling_js.running -> (unit -> 'a Lwt.t) -> 'a Lwt.t val with_memory_timer_lwt : ?options:Options.t -> string -> Profiling_js.running -> (unit -> 'a Lwt.t) -> 'a Lwt.t @@ -38,12 +37,11 @@ end = struct Profiling_js.sample_memory profiling ~metric:"hash_slots" ~value:(float_of_int slots); Profiling_js.sample_memory profiling ~metric:"hash_used_slots" ~value:(float_of_int used_slots) - let with_memory_profiling_lwt ~profiling ~collect_at_end f = + let with_memory_profiling_lwt ~profiling f = sample_memory profiling; (profile_before_collect_callback := (fun () -> sample_memory profiling)); let%lwt ret = f () in - if collect_at_end then collect `aggressive; sample_memory profiling; (profile_before_collect_callback := (fun () -> ())); diff --git a/src/services/inference/types_js.ml b/src/services/inference/types_js.ml index 46254f232ea..4ecfbd79137 100644 --- a/src/services/inference/types_js.ml +++ b/src/services/inference/types_js.ml @@ -2322,7 +2322,7 @@ let recheck ~recheck_reasons ~will_be_checked_files = let%lwt (env, stats, first_internal_error) = - SharedMem_js.with_memory_profiling_lwt ~profiling ~collect_at_end:true (fun () -> + SharedMem_js.with_memory_profiling_lwt ~profiling (fun () -> with_transaction (fun transaction reader -> Recheck.full ~profiling diff --git a/src/state/heaps/context/context_heaps.ml b/src/state/heaps/context/context_heaps.ml index 16ad3d4b38a..babfc0b6f2c 100644 --- a/src/state/heaps/context/context_heaps.ml +++ b/src/state/heaps/context/context_heaps.ml @@ -56,8 +56,7 @@ let remove_old_merge_batch files = WorkerCancel.with_no_cancellations (fun () -> LeaderHeap.remove_old_batch files; SigContextHeap.remove_old_batch files; - SigHashHeap.remove_old_batch files; - SharedMem_js.collect `gentle) + SigHashHeap.remove_old_batch files) let revive_merge_batch files = WorkerCancel.with_no_cancellations (fun () -> diff --git a/src/state/heaps/parsing/parsing_heaps.ml b/src/state/heaps/parsing/parsing_heaps.ml index b2f899d7cc5..356b8822393 100644 --- a/src/state/heaps/parsing/parsing_heaps.ml +++ b/src/state/heaps/parsing/parsing_heaps.ml @@ -201,8 +201,7 @@ module ParsingHeaps = struct DocblockHeap.remove_old_batch files; FileSigHeap.remove_old_batch files; SigFileSigHeap.remove_old_batch files; - FileHashHeap.remove_old_batch files; - SharedMem_js.collect `gentle) + FileHashHeap.remove_old_batch files) let revive_batch files = WorkerCancel.with_no_cancellations (fun () -> From 26e2cec17058a43c85435e40dc2465dbd5544e1b Mon Sep 17 00:00:00 2001 From: Sam Goldman Date: Wed, 9 Dec 2020 13:56:00 -0800 Subject: [PATCH 37/43] Drop profile_before_collect_callback, which never runs Summary: With the changes from the previous diff, we will no longer ever attempt to collect during merge, and therefore not attempt to collect within the callback argument to `with_memory_profiling_lwt`. I don't imagine this callback has fired since cancelable rechecks were enabled, since merging will not create garbage until the transaction is committed. Reviewed By: nmote Differential Revision: D25280251 fbshipit-source-id: 70d606ce5400128e044426b8a6fec4d5151a05b0 --- src/server/shmem/sharedMem_js.ml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/server/shmem/sharedMem_js.ml b/src/server/shmem/sharedMem_js.ml index e6f36524cce..7174c09af37 100644 --- a/src/server/shmem/sharedMem_js.ml +++ b/src/server/shmem/sharedMem_js.ml @@ -17,11 +17,8 @@ module Collect : sig val with_memory_timer_lwt : ?options:Options.t -> string -> Profiling_js.running -> (unit -> 'a Lwt.t) -> 'a Lwt.t end = struct - let profile_before_collect_callback = ref (fun () -> ()) - let collect effort = if SharedMem.should_collect effort then ( - !profile_before_collect_callback (); MonitorRPC.status_update ~event:ServerStatus.GC_start; SharedMem.collect effort ) @@ -39,12 +36,10 @@ end = struct let with_memory_profiling_lwt ~profiling f = sample_memory profiling; - (profile_before_collect_callback := (fun () -> sample_memory profiling)); let%lwt ret = f () in sample_memory profiling; - (profile_before_collect_callback := (fun () -> ())); Lwt.return ret From 60e023e5f32d1aac45e6f14baed254fa1ae40684 Mon Sep 17 00:00:00 2001 From: Sam Goldman Date: Wed, 9 Dec 2020 13:56:00 -0800 Subject: [PATCH 38/43] Add `on_compact` callback to sharedmem API Summary: The sharedmem API's `collect` function does not always compact the heap. Instead, it checks the amount of wasted space and compacts conditionally. Additionally, there are a few different hooks we'd like to trigger when the heap is compacted: 1. Log the compaction event (Hh_logger and EventLogger) 2. Update the server status, because compaction can take a few seconds Before this diff, these two hooks were triggered using two different methods: 1. Logging code was added directly to the implementation of `collect`, causing the SharedMem module to be dependent on logging modules, with logging behaviors hardcoded in. 2. The SharedMem_js module also wraps the SharedMem module, duplicates the conditional logic (calls shared function). We need to be careful ensure we call the wrapped module's `collect` function if we want the server status to be updated. Internally we have a pattern lint for this. This diff takes a different approach. We allow the application to provide an explicit hook, by setting the `on_compact` ref on the SharedMem module. If calling `collect` does ultimately trigger a compaction, the callback function will be called. The callback function has a slightly unusual signature, which allows it to bracket the compaction. That is, the callback is applied with a single argument before compaction, returning a continuation which is called afterward. I have a few motivations for this change, but the primary motivation is to make it easier to land the incremental GC. Similar to today, calling `collect` on the incremental GC might not actually compact the heap, but the condition for when is more complicated -- we need to have completed the marking and sweeping phases, and have the appropriate of wasted space. Otherwise, this change allows us to: * Remove the `should_collect` function from the public API (done here) * Remove the SharedMem_js wrapper module (next diff) I'm not thrilled about the module-level ref for the callback. I considered some alternatives, but ultimately decided they were not significant improvements. That said, I'm certainly open to different approaches. For example, I experimented passing the callback to the `collect` method directly. This avoids the state in the module, but we call `collect` from two places, and it's fragile to ensure they are both providing the callback. I also experimented with initializing the `on_compact` callback as part of `SharedMem.init` -- i.e., passing the callback to init, using it in subsequent collect calls, but not exposing the ref itself. This still seems reasonable to me, but it made testing code a bit more awkward. Ultimately, the SharedMem module is awkwardly written, and is just a stateful module by design, so I think the module-level ref is a good fit, but I would welcome changes to make this module less stateful overall. Reviewed By: nmote Differential Revision: D25280250 fbshipit-source-id: 1f530e501c0e52d8f5510750bcd3ce160a64f570 --- .../test/unit/heap/test_hashtbl.ml | 41 ++++++++----------- src/heap/sharedMem.ml | 31 +++++++------- src/heap/sharedMem.mli | 11 +++-- src/server/server.ml | 17 ++++++++ src/server/shmem/sharedMem_js.ml | 8 ---- 5 files changed, 58 insertions(+), 50 deletions(-) diff --git a/src/hack_forked/test/unit/heap/test_hashtbl.ml b/src/hack_forked/test/unit/heap/test_hashtbl.ml index 5097945d08b..4526f43fc1f 100644 --- a/src/hack_forked/test/unit/heap/test_hashtbl.ml +++ b/src/hack_forked/test/unit/heap/test_hashtbl.ml @@ -84,27 +84,25 @@ let expect_get key expected = ~msg:(Printf.sprintf "Expected key '%s' to have value '%s', got '%s" key expected value) (value = expected) -let expect_gentle_collect expected = +let expect_compact effort expected = + let old_cb = !SharedMem.on_compact in + let actual = ref false in + (SharedMem.on_compact := (fun _ _ -> actual := true)); + SharedMem.collect effort; + SharedMem.on_compact := old_cb; expect ~msg: (Printf.sprintf - "Expected gentle collection to be %sneeded" + "Expected %s collection to be %sneeded" + (match effort with + | `gentle -> "gentle" + | `aggressive -> "aggressive" + | `always_TEST -> "always") ( if expected then "" else "not " )) - (SharedMem.should_collect `gentle = expected) - -let expect_aggressive_collect expected = - expect - ~msg: - (Printf.sprintf - "Expected aggressive collection to be %sneeded" - ( if expected then - "" - else - "not " )) - (SharedMem.should_collect `aggressive = expected) + (!actual = expected) let test_ops () = expect_stats ~nonempty:0 ~used:0; @@ -226,8 +224,8 @@ let test_gc_collect () = add "1" "1"; (* no memory is wasted *) - expect_gentle_collect false; - expect_aggressive_collect false; + expect_compact `gentle false; + expect_compact `aggressive false; expect_heap_size 2; expect_mem "0"; expect_mem "1"; @@ -235,7 +233,7 @@ let test_gc_collect () = expect_heap_size 2; (* Garbage collection should remove the space taken by the removed element *) - SharedMem.collect `gentle; + expect_compact `gentle true; expect_heap_size 1; expect_mem "0" @@ -249,21 +247,18 @@ let test_gc_aggressive () = (* Since latest heap size is zero, now it should gc, but theres nothing to gc, so the heap will stay the same *) - expect_gentle_collect false; - SharedMem.collect `gentle; + expect_compact `gentle false; expect_heap_size 2; remove "1"; add "2" "2"; expect_heap_size 3; (* Gentle garbage collection shouldn't catch this *) - expect_gentle_collect false; - SharedMem.collect `gentle; + expect_compact `gentle false; expect_heap_size 3; (* Aggressive garbage collection should run *) - expect_aggressive_collect true; - SharedMem.collect `aggressive; + expect_compact `aggressive true; expect_heap_size 2 let test_heapsize_decrease () = diff --git a/src/heap/sharedMem.ml b/src/heap/sharedMem.ml index d69ab410710..ac54c2d5427 100644 --- a/src/heap/sharedMem.ml +++ b/src/heap/sharedMem.ml @@ -29,6 +29,12 @@ type heap = (nativeint, Bigarray.nativeint_elt, Bigarray.c_layout) Bigarray.Arra * Internally, these are all just ints, so be careful! *) type _ addr = int +type effort = + [ `aggressive + | `always_TEST + | `gentle + ] + let heap_ref : heap option ref = ref None exception Out_of_shared_memory @@ -105,7 +111,9 @@ external hash_stats : unit -> table_stats = "hh_hash_stats" (*****************************************************************************) let init_done () = EventLogger.sharedmem_init_done (heap_size ()) -let should_collect (effort : [ `gentle | `aggressive | `always_TEST ]) = +let on_compact = ref (fun _ _ -> ()) + +let should_collect effort = let overhead = match effort with | `always_TEST -> 1.0 @@ -117,21 +125,12 @@ let should_collect (effort : [ `gentle | `aggressive | `always_TEST ]) = let reachable = used - wasted in used >= truncate (float reachable *. overhead) -let collect (effort : [ `gentle | `aggressive | `always_TEST ]) = - let old_size = heap_size () in - let start_t = Unix.gettimeofday () in - (* The wrapper is used to run the function in a worker instead of master. *) - if should_collect effort then hh_collect (); - let new_size = heap_size () in - let time_taken = Unix.gettimeofday () -. start_t in - if old_size <> new_size then ( - Hh_logger.log - "Sharedmem GC: %d bytes before; %d bytes after; in %f seconds" - old_size - new_size - time_taken; - EventLogger.sharedmem_gc_ran effort old_size new_size time_taken - ) +let collect effort = + if should_collect effort then begin + let k = !on_compact effort in + hh_collect (); + k () + end (* Compute size of values in the garbage-collected heap *) let value_size r = diff --git a/src/heap/sharedMem.mli b/src/heap/sharedMem.mli index 0c5ae957191..dc7fe1bf33d 100644 --- a/src/heap/sharedMem.mli +++ b/src/heap/sharedMem.mli @@ -18,6 +18,12 @@ type handle = Unix.file_descr * passed where a `bar addr` is expected. *) type 'k addr [@@immediate] +type effort = + [ `aggressive + | `always_TEST + | `gentle + ] + exception Out_of_shared_memory exception Hash_table_full @@ -26,10 +32,9 @@ exception Heap_full val connect : handle -> worker_id:int -> unit -(* TODO - can we hide after inlining SharedMem_js? *) -val should_collect : [ `aggressive | `always_TEST | `gentle ] -> bool +val on_compact : (effort -> unit -> unit) ref -val collect : [ `aggressive | `always_TEST | `gentle ] -> unit +val collect : effort -> unit type table_stats = { nonempty_slots: int; diff --git a/src/server/server.ml b/src/server/server.ml index a69fc88ec7b..35c4924d09b 100644 --- a/src/server/server.ml +++ b/src/server/server.ml @@ -135,6 +135,22 @@ let rec serve ~genv ~env = serve ~genv ~env +let on_compact effort = + MonitorRPC.status_update ~event:ServerStatus.GC_start; + let old_size = SharedMem_js.heap_size () in + let start_t = Unix.gettimeofday () in + fun () -> + let new_size = SharedMem_js.heap_size () in + let time_taken = Unix.gettimeofday () -. start_t in + if old_size <> new_size then ( + Hh_logger.log + "Sharedmem GC: %d bytes before; %d bytes after; in %f seconds" + old_size + new_size + time_taken; + EventLogger.sharedmem_gc_ran effort old_size new_size time_taken + ) + (* The main entry point of the daemon * the only trick to understand here, is that env.modified is the set * of files that changed, it is only set back to SSet.empty when the @@ -142,6 +158,7 @@ let rec serve ~genv ~env = * we look if env.modified changed. *) let create_program_init ~shared_mem_config ~init_id ?focus_targets options = + SharedMem_js.on_compact := on_compact; let num_workers = Options.max_workers options in let handle = SharedMem_js.init ~num_workers shared_mem_config in let genv = ServerEnvBuild.make_genv ~options ~init_id handle in diff --git a/src/server/shmem/sharedMem_js.ml b/src/server/shmem/sharedMem_js.ml index 7174c09af37..ddcd368437f 100644 --- a/src/server/shmem/sharedMem_js.ml +++ b/src/server/shmem/sharedMem_js.ml @@ -10,19 +10,11 @@ module Prefix = Prefix module Ident = Ident module Collect : sig - val collect : [ `gentle | `aggressive | `always_TEST ] -> unit - val with_memory_profiling_lwt : profiling:Profiling_js.running -> (unit -> 'a Lwt.t) -> 'a Lwt.t val with_memory_timer_lwt : ?options:Options.t -> string -> Profiling_js.running -> (unit -> 'a Lwt.t) -> 'a Lwt.t end = struct - let collect effort = - if SharedMem.should_collect effort then ( - MonitorRPC.status_update ~event:ServerStatus.GC_start; - SharedMem.collect effort - ) - let sample_memory profiling = let heap = heap_size () in let { nonempty_slots; used_slots; slots } = hash_stats () in From 2ac682ed843dbfd3ff56aa8b1ecc15223b45ad42 Mon Sep 17 00:00:00 2001 From: Sam Goldman Date: Wed, 9 Dec 2020 13:56:00 -0800 Subject: [PATCH 39/43] Drop wrapping behavior from SharedMem_js module Summary: In the previous diff, I changed the SharedMem_js module. It used to trigger a server status update, but that now happens in the newly introduced `on_compact` hook. After that change, it is no longer useful for SharedMem_js to "wrap" the SharedMem module. Instead, we can export just the memory sampling utilities. In a follow-up, I will move and rename this module to reflect that it contains only these utilities. A pattern lint ensured that we always used SharedMem_js instead of SharedMem, to ensure we always had the wrapped behavior. This lint is no longer needed, so I removed it here. Reviewed By: nmote Differential Revision: D25280253 fbshipit-source-id: 98cebc417ec504d1e73f1b51c372e9800ab3acd9 --- src/codemods/utils/codemod_utils.ml | 2 +- src/commands/commandUtils.ml | 2 +- src/flow.ml | 2 +- src/monitor/flowServerMonitorOptions.ml | 2 +- src/server/rechecker/rechecker.ml | 2 +- src/server/server.ml | 22 +- src/server/server.mli | 4 +- src/server/serverWorker.mli | 2 +- src/server/server_daemon.ml | 4 +- src/server/server_daemon.mli | 4 +- src/server/shmem/sharedMem_js.ml | 231 +++++++++--------- src/server/shmem/sharedMem_js.mli | 11 + .../inference/__tests__/types_js_test.ml | 4 +- src/services/inference/merge_service.ml | 7 +- src/state/heaps/context/context_heaps.ml | 6 +- src/state/heaps/diffing/diff_heaps.ml | 2 +- src/state/heaps/module/module_heaps.ml | 6 +- src/state/heaps/package/package_heaps.ml | 4 +- src/state/heaps/parsing/parsing_heaps.ml | 16 +- src/typing/__tests__/typing_tests.ml | 2 +- 20 files changed, 166 insertions(+), 169 deletions(-) create mode 100644 src/server/shmem/sharedMem_js.mli diff --git a/src/codemods/utils/codemod_utils.ml b/src/codemods/utils/codemod_utils.ml index d74f9575772..06a29718249 100644 --- a/src/codemods/utils/codemod_utils.ml +++ b/src/codemods/utils/codemod_utils.ml @@ -75,7 +75,7 @@ module MakeMain (Runner : Codemod_runner.RUNNABLE) = struct let initial_lwt_thread () = let genv = let num_workers = Options.max_workers options in - let handle = SharedMem_js.init ~num_workers shared_mem_config in + let handle = SharedMem.init ~num_workers shared_mem_config in ServerEnvBuild.make_genv ~init_id ~options handle in Runner.run ~genv ~write ~repeat roots diff --git a/src/commands/commandUtils.ml b/src/commands/commandUtils.ml index 8d91a2d7018..a715e9496a8 100644 --- a/src/commands/commandUtils.ml +++ b/src/commands/commandUtils.ml @@ -274,7 +274,7 @@ let shm_config shm_flags flowconfig = let log_level = Base.Option.value shm_flags.shm_log_level ~default:(FlowConfig.shm_log_level flowconfig) in - { SharedMem_js.heap_size = FlowConfig.shm_heap_size flowconfig; hash_table_pow; log_level } + { SharedMem.heap_size = FlowConfig.shm_heap_size flowconfig; hash_table_pow; log_level } let from_flag = let collector main from = diff --git a/src/flow.ml b/src/flow.ml index 0e7902a5c35..bb8b97e5ad2 100644 --- a/src/flow.ml +++ b/src/flow.ml @@ -108,7 +108,7 @@ let _ = (* this call might not return *) FlowShell.main () with - | SharedMem_js.Out_of_shared_memory as e -> + | SharedMem.Out_of_shared_memory as e -> let e = Exception.wrap e in let bt = Exception.get_backtrace_string e in let msg = diff --git a/src/monitor/flowServerMonitorOptions.ml b/src/monitor/flowServerMonitorOptions.ml index 98f5980443b..3037c03074f 100644 --- a/src/monitor/flowServerMonitorOptions.ml +++ b/src/monitor/flowServerMonitorOptions.ml @@ -34,7 +34,7 @@ type t = { (* The server's options *) server_options: Options.t; (* The shared memory config *) - shared_mem_config: SharedMem_js.config; + shared_mem_config: SharedMem.config; (* The argv of the process which created the server monitor *) argv: string array; (* What to use for file watching *) diff --git a/src/server/rechecker/rechecker.ml b/src/server/rechecker/rechecker.ml index a1a93aabd93..0f27a43889f 100644 --- a/src/server/rechecker/rechecker.ml +++ b/src/server/rechecker/rechecker.ml @@ -211,7 +211,7 @@ let rec recheck_single * may have introduced garbage into shared memory. Since we immediately start another * recheck, we should first check whether we need to compact. Otherwise, sharedmem could * potentially grow unbounded. *) - SharedMem_js.collect `aggressive; + SharedMem.collect `aggressive; recheck_single ~files_to_recheck ~files_to_force diff --git a/src/server/server.ml b/src/server/server.ml index 35c4924d09b..925773e7f5d 100644 --- a/src/server/server.ml +++ b/src/server/server.ml @@ -6,7 +6,7 @@ *) let sample_init_memory profiling = - SharedMem_js.( + SharedMem.( let hash_stats = hash_stats () in let heap_size = heap_size () in let memory_metrics = @@ -54,7 +54,7 @@ let init ~profiling ?focus_targets genv = in sample_init_memory profiling; - SharedMem_js.init_done (); + SharedMem.init_done (); (* Return an env that initializes invariants required and maintained by recheck, namely that `files` contains files that parsed successfully, and @@ -102,7 +102,7 @@ let rec log_on_idle = let rec serve ~genv ~env = Hh_logger.debug "Starting aggressive shared mem GC"; - SharedMem_js.collect `aggressive; + SharedMem.collect `aggressive; Hh_logger.debug "Finished aggressive shared mem GC"; MonitorRPC.status_update ~event:ServerStatus.Ready; @@ -137,10 +137,10 @@ let rec serve ~genv ~env = let on_compact effort = MonitorRPC.status_update ~event:ServerStatus.GC_start; - let old_size = SharedMem_js.heap_size () in + let old_size = SharedMem.heap_size () in let start_t = Unix.gettimeofday () in fun () -> - let new_size = SharedMem_js.heap_size () in + let new_size = SharedMem.heap_size () in let time_taken = Unix.gettimeofday () -. start_t in if old_size <> new_size then ( Hh_logger.log @@ -158,13 +158,13 @@ let on_compact effort = * we look if env.modified changed. *) let create_program_init ~shared_mem_config ~init_id ?focus_targets options = - SharedMem_js.on_compact := on_compact; + SharedMem.on_compact := on_compact; let num_workers = Options.max_workers options in - let handle = SharedMem_js.init ~num_workers shared_mem_config in + let handle = SharedMem.init ~num_workers shared_mem_config in let genv = ServerEnvBuild.make_genv ~options ~init_id handle in let program_init profiling = let%lwt ret = init ~profiling ?focus_targets genv in - if shared_mem_config.SharedMem_js.log_level > 0 then Measure.print_stats (); + if shared_mem_config.SharedMem.log_level > 0 then Measure.print_stats (); Lwt.return ret in (genv, program_init) @@ -238,15 +238,15 @@ let exit_msg_of_exception exn msg = let run_from_daemonize ~init_id ~monitor_channels ~shared_mem_config options = try run ~monitor_channels ~shared_mem_config ~init_id options with - | SharedMem_js.Out_of_shared_memory as exn -> + | SharedMem.Out_of_shared_memory as exn -> let exn = Exception.wrap exn in let msg = exit_msg_of_exception exn "Out of shared memory" in FlowExitStatus.(exit ~msg Out_of_shared_memory) - | SharedMem_js.Hash_table_full as exn -> + | SharedMem.Hash_table_full as exn -> let exn = Exception.wrap exn in let msg = exit_msg_of_exception exn "Hash table is full" in FlowExitStatus.(exit ~msg Hash_table_full) - | SharedMem_js.Heap_full as exn -> + | SharedMem.Heap_full as exn -> let exn = Exception.wrap exn in let msg = exit_msg_of_exception exn "Heap is full" in FlowExitStatus.(exit ~msg Heap_full) diff --git a/src/server/server.mli b/src/server/server.mli index b115c76d841..bbe263e349f 100644 --- a/src/server/server.mli +++ b/src/server/server.mli @@ -7,7 +7,7 @@ val check_once : init_id:string -> - shared_mem_config:SharedMem_js.config -> + shared_mem_config:SharedMem.config -> format_errors: (Errors.ConcreteLocPrintableErrorSet.t * (* errors *) @@ -25,7 +25,7 @@ val check_once : val daemonize : init_id:string -> log_file:string -> - shared_mem_config:SharedMem_js.config -> + shared_mem_config:SharedMem.config -> argv:string array -> file_watcher_pid:int option -> Options.t -> diff --git a/src/server/serverWorker.mli b/src/server/serverWorker.mli index 273c8420a18..84237fe9eb2 100644 --- a/src/server/serverWorker.mli +++ b/src/server/serverWorker.mli @@ -5,4 +5,4 @@ * LICENSE file in the root directory of this source tree. *) -val make : n:int -> init_id:string -> SharedMem_js.handle -> MultiWorkerLwt.worker list +val make : n:int -> init_id:string -> SharedMem.handle -> MultiWorkerLwt.worker list diff --git a/src/server/server_daemon.ml b/src/server/server_daemon.ml index b492714d239..6e0015ecb27 100644 --- a/src/server/server_daemon.ml +++ b/src/server/server_daemon.ml @@ -9,7 +9,7 @@ open Utils_js module Server_files = Server_files_js type args = { - shared_mem_config: SharedMem_js.config; + shared_mem_config: SharedMem.config; options: Options.t; init_id: string; logging_context: FlowEventLogger.logging_context; @@ -51,7 +51,7 @@ let register_entry_point (main : init_id:string -> monitor_channels:MonitorRPC.channels -> - shared_mem_config:SharedMem_js.config -> + shared_mem_config:SharedMem.config -> Options.t -> unit) : entry_point = Daemon.register_entry_point (new_entry_point ()) (fun args monitor_channels -> diff --git a/src/server/server_daemon.mli b/src/server/server_daemon.mli index f705973c181..fabe08278a1 100644 --- a/src/server/server_daemon.mli +++ b/src/server/server_daemon.mli @@ -10,7 +10,7 @@ type entry_point val register_entry_point : (init_id:string -> monitor_channels:MonitorRPC.channels -> - shared_mem_config:SharedMem_js.config -> + shared_mem_config:SharedMem.config -> Options.t -> unit) -> entry_point @@ -20,7 +20,7 @@ val open_log_file : string -> Unix.file_descr val daemonize : init_id:string -> log_file:string -> - shared_mem_config:SharedMem_js.config -> + shared_mem_config:SharedMem.config -> argv:string array -> options:Options.t -> file_watcher_pid:int option -> diff --git a/src/server/shmem/sharedMem_js.ml b/src/server/shmem/sharedMem_js.ml index ddcd368437f..2131443495a 100644 --- a/src/server/shmem/sharedMem_js.ml +++ b/src/server/shmem/sharedMem_js.ml @@ -5,127 +5,114 @@ * LICENSE file in the root directory of this source tree. *) -include SharedMem -module Prefix = Prefix -module Ident = Ident - -module Collect : sig - val with_memory_profiling_lwt : profiling:Profiling_js.running -> (unit -> 'a Lwt.t) -> 'a Lwt.t - - val with_memory_timer_lwt : - ?options:Options.t -> string -> Profiling_js.running -> (unit -> 'a Lwt.t) -> 'a Lwt.t -end = struct - let sample_memory profiling = - let heap = heap_size () in - let { nonempty_slots; used_slots; slots } = hash_stats () in - Profiling_js.sample_memory profiling ~metric:"heap" ~value:(float_of_int heap); - Profiling_js.sample_memory - profiling - ~metric:"hash_nonempty_slots" - ~value:(float_of_int nonempty_slots); - Profiling_js.sample_memory profiling ~metric:"hash_slots" ~value:(float_of_int slots); - Profiling_js.sample_memory profiling ~metric:"hash_used_slots" ~value:(float_of_int used_slots) - - let with_memory_profiling_lwt ~profiling f = - sample_memory profiling; - - let%lwt ret = f () in - - sample_memory profiling; - - Lwt.return ret - - let with_memory_info callback = - let%lwt cgroup_stats = CGroup.get_stats () in - (* Reading hash_stats while workers are writing can cause assertion errors *) - let hash_stats = (try Some (hash_stats ()) with _ -> None) in - let heap_size = heap_size () in - callback ~cgroup_stats ~hash_stats ~heap_size; +let sample_memory profiling = + let heap = SharedMem.heap_size () in + let { SharedMem.nonempty_slots; used_slots; slots } = SharedMem.hash_stats () in + Profiling_js.sample_memory profiling ~metric:"heap" ~value:(float_of_int heap); + Profiling_js.sample_memory + profiling + ~metric:"hash_nonempty_slots" + ~value:(float_of_int nonempty_slots); + Profiling_js.sample_memory profiling ~metric:"hash_slots" ~value:(float_of_int slots); + Profiling_js.sample_memory profiling ~metric:"hash_used_slots" ~value:(float_of_int used_slots) + +let with_memory_profiling_lwt ~profiling f = + sample_memory profiling; + + let%lwt ret = f () in + + sample_memory profiling; + + Lwt.return ret + +let with_memory_info callback = + let%lwt cgroup_stats = CGroup.get_stats () in + (* Reading hash_stats while workers are writing can cause assertion errors *) + let hash_stats = (try Some (SharedMem.hash_stats ()) with _ -> None) in + let heap_size = SharedMem.heap_size () in + callback ~cgroup_stats ~hash_stats ~heap_size; + Lwt.return_unit + +module MemorySamplingLoop = LwtLoop.Make (struct + type acc = + cgroup_stats:(CGroup.stats, string) result -> + hash_stats:SharedMem.table_stats option -> + heap_size:int -> + unit + + let main callback = + let%lwt () = with_memory_info callback in + let%lwt () = Lwt_unix.sleep 1.0 in + Lwt.return callback + + let catch _ exn = + Hh_logger.error "Exception in MemorySamplingLoop: %s" (Exception.to_string exn); Lwt.return_unit - - module MemorySamplingLoop = LwtLoop.Make (struct - type acc = - cgroup_stats:(CGroup.stats, string) result -> - hash_stats:table_stats option -> - heap_size:int -> - unit - - let main callback = - let%lwt () = with_memory_info callback in - let%lwt () = Lwt_unix.sleep 1.0 in - Lwt.return callback - - let catch _ exn = - Hh_logger.error "Exception in MemorySamplingLoop: %s" (Exception.to_string exn); - Lwt.return_unit - end) - - let with_memory_timer_lwt = - let module P = Profiling_js in - let clear_worker_memory () = - ["worker_rss_start"; "worker_rss_delta"; "worker_rss_hwm_delta"] |> List.iter Measure.delete +end) + +let with_memory_timer_lwt = + let module P = Profiling_js in + let clear_worker_memory () = + ["worker_rss_start"; "worker_rss_delta"; "worker_rss_hwm_delta"] |> List.iter Measure.delete + in + let profile_add_memory profiling getter group metric = + getter "worker_rss_start" + |> Base.Option.iter ~f:(fun start -> + getter "worker_rss_delta" + |> Base.Option.iter ~f:(fun delta -> + getter "worker_rss_hwm_delta" + |> Base.Option.iter ~f:(fun hwm_delta -> + P.add_memory ~group ~metric ~start ~delta ~hwm_delta profiling))) + in + let sample_memory timer profiling ~cgroup_stats ~hash_stats ~heap_size = + P.sample_memory profiling ~group:timer ~metric:"heap" ~value:(float heap_size); + + Base.Option.iter hash_stats ~f:(fun { SharedMem.nonempty_slots; used_slots; slots } -> + P.sample_memory + profiling + ~group:timer + ~metric:"hash_nonempty_slots" + ~value:(float nonempty_slots); + + P.sample_memory profiling ~group:timer ~metric:"hash_used_slots" ~value:(float used_slots); + + P.sample_memory profiling ~group:timer ~metric:"hash_slots" ~value:(float slots)); + + match cgroup_stats with + | Error _ -> () + | Ok { CGroup.total; total_swap; anon; file; shmem } -> + P.sample_memory profiling ~group:timer ~metric:"cgroup_total" ~value:(float total); + + P.sample_memory profiling ~group:timer ~metric:"cgroup_swap" ~value:(float total_swap); + + P.sample_memory profiling ~group:timer ~metric:"cgroup_anon" ~value:(float anon); + + P.sample_memory profiling ~group:timer ~metric:"cgroup_shmem" ~value:(float shmem); + + P.sample_memory profiling ~group:timer ~metric:"cgroup_file" ~value:(float file) + in + fun ?options timer profiling f -> + let should_print = Base.Option.value_map options ~default:false ~f:Options.should_profile in + let sample_memory = sample_memory timer profiling in + clear_worker_memory (); + + (* Record the cgroup info at the start *) + let%lwt () = with_memory_info sample_memory in + (* Asynchronously run a thread that periodically grabs the cgroup stats *) + let sampling_loop = MemorySamplingLoop.run sample_memory in + let%lwt ret = + try%lwt + let%lwt ret = P.with_timer_lwt ~should_print ~timer ~f profiling in + Lwt.cancel sampling_loop; + Lwt.return ret + with exn -> + let exn = Exception.wrap exn in + Lwt.cancel sampling_loop; + Exception.reraise exn in - let profile_add_memory profiling getter group metric = - getter "worker_rss_start" - |> Base.Option.iter ~f:(fun start -> - getter "worker_rss_delta" - |> Base.Option.iter ~f:(fun delta -> - getter "worker_rss_hwm_delta" - |> Base.Option.iter ~f:(fun hwm_delta -> - P.add_memory ~group ~metric ~start ~delta ~hwm_delta profiling))) - in - let sample_memory timer profiling ~cgroup_stats ~hash_stats ~heap_size = - P.sample_memory profiling ~group:timer ~metric:"heap" ~value:(float heap_size); - - Base.Option.iter hash_stats ~f:(fun { nonempty_slots; used_slots; slots } -> - P.sample_memory - profiling - ~group:timer - ~metric:"hash_nonempty_slots" - ~value:(float nonempty_slots); - - P.sample_memory profiling ~group:timer ~metric:"hash_used_slots" ~value:(float used_slots); - - P.sample_memory profiling ~group:timer ~metric:"hash_slots" ~value:(float slots)); - - match cgroup_stats with - | Error _ -> () - | Ok { CGroup.total; total_swap; anon; file; shmem } -> - P.sample_memory profiling ~group:timer ~metric:"cgroup_total" ~value:(float total); - - P.sample_memory profiling ~group:timer ~metric:"cgroup_swap" ~value:(float total_swap); - - P.sample_memory profiling ~group:timer ~metric:"cgroup_anon" ~value:(float anon); - - P.sample_memory profiling ~group:timer ~metric:"cgroup_shmem" ~value:(float shmem); - - P.sample_memory profiling ~group:timer ~metric:"cgroup_file" ~value:(float file) - in - fun ?options timer profiling f -> - let should_print = Base.Option.value_map options ~default:false ~f:Options.should_profile in - let sample_memory = sample_memory timer profiling in - clear_worker_memory (); - - (* Record the cgroup info at the start *) - let%lwt () = with_memory_info sample_memory in - (* Asynchronously run a thread that periodically grabs the cgroup stats *) - let sampling_loop = MemorySamplingLoop.run sample_memory in - let%lwt ret = - try%lwt - let%lwt ret = P.with_timer_lwt ~should_print ~timer ~f profiling in - Lwt.cancel sampling_loop; - Lwt.return ret - with exn -> - let exn = Exception.wrap exn in - Lwt.cancel sampling_loop; - Exception.reraise exn - in - (* Record the cgroup info at the end *) - let%lwt () = with_memory_info sample_memory in - profile_add_memory profiling Measure.get_mean timer "worker_rss_avg"; - profile_add_memory profiling Measure.get_max timer "worker_rss_max"; - clear_worker_memory (); - Lwt.return ret -end - -include Collect + (* Record the cgroup info at the end *) + let%lwt () = with_memory_info sample_memory in + profile_add_memory profiling Measure.get_mean timer "worker_rss_avg"; + profile_add_memory profiling Measure.get_max timer "worker_rss_max"; + clear_worker_memory (); + Lwt.return ret diff --git a/src/server/shmem/sharedMem_js.mli b/src/server/shmem/sharedMem_js.mli new file mode 100644 index 00000000000..97dce33fa43 --- /dev/null +++ b/src/server/shmem/sharedMem_js.mli @@ -0,0 +1,11 @@ +(* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +val with_memory_profiling_lwt : profiling:Profiling_js.running -> (unit -> 'a Lwt.t) -> 'a Lwt.t + +val with_memory_timer_lwt : + ?options:Options.t -> string -> Profiling_js.running -> (unit -> 'a Lwt.t) -> 'a Lwt.t diff --git a/src/services/inference/__tests__/types_js_test.ml b/src/services/inference/__tests__/types_js_test.ml index 37382d32ca4..c18d5994e7e 100644 --- a/src/services/inference/__tests__/types_js_test.ml +++ b/src/services/inference/__tests__/types_js_test.ml @@ -209,9 +209,9 @@ let include_dependencies_and_dependents (* There is memory sampling embedded throughout the code under test. It polls the shared memory * system to get information about its usage. If the shared memory system is not initialized, we get * crashes, so we have to initialize it before running tests. *) -let sharedmem_config = { SharedMem_js.heap_size = 1024 * 1024; hash_table_pow = 19; log_level = 0 } +let sharedmem_config = { SharedMem.heap_size = 1024 * 1024; hash_table_pow = 19; log_level = 0 } -let _ = SharedMem_js.init sharedmem_config ~num_workers:1 +let _ = SharedMem.init sharedmem_config ~num_workers:1 let tests = "types_js" diff --git a/src/services/inference/merge_service.ml b/src/services/inference/merge_service.ml index 09cf10a0fb3..64fda779da5 100644 --- a/src/services/inference/merge_service.ml +++ b/src/services/inference/merge_service.ml @@ -605,8 +605,8 @@ let merge_job ~worker_mutator ~reader ~job ~options merged elements = ); (Nel.hd component, ret) :: merged) with - | (SharedMem_js.Out_of_shared_memory | SharedMem_js.Heap_full | SharedMem_js.Hash_table_full) - as exc -> + | (SharedMem.Out_of_shared_memory | SharedMem.Heap_full | SharedMem.Hash_table_full) as exc + -> raise exc (* A catch all suppression is probably a bad idea... *) | unwrapped_exc -> @@ -734,8 +734,7 @@ let check options ~reader file = raise (Error_message.ECheckTimeout (run_time, file_str)))) ~f:(fun () -> Ok (check_file options ~reader file)) with - | (SharedMem_js.Out_of_shared_memory | SharedMem_js.Heap_full | SharedMem_js.Hash_table_full) as - exc -> + | (SharedMem.Out_of_shared_memory | SharedMem.Heap_full | SharedMem.Hash_table_full) as exc -> raise exc (* A catch all suppression is probably a bad idea... *) | unwrapped_exc -> diff --git a/src/state/heaps/context/context_heaps.ml b/src/state/heaps/context/context_heaps.ml index babfc0b6f2c..ffbda696076 100644 --- a/src/state/heaps/context/context_heaps.ml +++ b/src/state/heaps/context/context_heaps.ml @@ -10,7 +10,7 @@ open Utils_js (****************** shared context heap *********************) module SigContextHeap = - SharedMem_js.WithCache + SharedMem.WithCache (File_key) (struct type t = Context.sig_t @@ -29,7 +29,7 @@ let add_sig ~audit cx = add_sig_context ~audit cx_file (Context.sig_cx cx)) module SigHashHeap = - SharedMem_js.NoCache + SharedMem.NoCache (File_key) (struct type t = Xx.hash @@ -38,7 +38,7 @@ module SigHashHeap = end) module LeaderHeap = - SharedMem_js.WithCache + SharedMem.WithCache (File_key) (struct type t = File_key.t diff --git a/src/state/heaps/diffing/diff_heaps.ml b/src/state/heaps/diffing/diff_heaps.ml index 8593baf2350..1d64ab7b6e1 100644 --- a/src/state/heaps/diffing/diff_heaps.ml +++ b/src/state/heaps/diffing/diff_heaps.ml @@ -14,7 +14,7 @@ type patch = (int * int * string) list type key = File_key.t module DiffPatchHeap = - SharedMem_js.NoCache + SharedMem.NoCache (File_key) (struct type t = patch diff --git a/src/state/heaps/module/module_heaps.ml b/src/state/heaps/module/module_heaps.ml index 59797c8bdfa..6d34153f44a 100644 --- a/src/state/heaps/module/module_heaps.ml +++ b/src/state/heaps/module/module_heaps.ml @@ -9,7 +9,7 @@ (* Maps module names to the filenames which provide those modules *) module NameHeap = - SharedMem_js.WithCache + SharedMem.WithCache (Modulename.Key) (struct type t = File_key.t @@ -53,7 +53,7 @@ let mk_resolved_requires ~resolved_modules ~phantom_dependents = { resolved_modules; phantom_dependents; hash = Xx.digest state } module ResolvedRequiresHeap = - SharedMem_js.WithCache + SharedMem.WithCache (File_key) (struct type t = resolved_requires @@ -74,7 +74,7 @@ type info = { } module InfoHeap = - SharedMem_js.WithCache + SharedMem.WithCache (File_key) (struct type t = info diff --git a/src/state/heaps/package/package_heaps.ml b/src/state/heaps/package/package_heaps.ml index 77c7f9ae6be..ed278dead98 100644 --- a/src/state/heaps/package/package_heaps.ml +++ b/src/state/heaps/package/package_heaps.ml @@ -11,7 +11,7 @@ (* shared heap for package.json tokens by filename *) module PackageHeap = - SharedMem_js.WithCache + SharedMem.WithCache (StringKey) (struct type t = (Package_json.t, unit) result @@ -21,7 +21,7 @@ module PackageHeap = (* shared heap for package.json directories by package name *) module ReversePackageHeap = - SharedMem_js.WithCache + SharedMem.WithCache (StringKey) (struct type t = string diff --git a/src/state/heaps/parsing/parsing_heaps.ml b/src/state/heaps/parsing/parsing_heaps.ml index 356b8822393..6897d2e6658 100644 --- a/src/state/heaps/parsing/parsing_heaps.ml +++ b/src/state/heaps/parsing/parsing_heaps.ml @@ -10,7 +10,7 @@ open Parsing_heaps_exceptions (* shared heap for parsed ASTs by filename *) module ASTHeap = - SharedMem_js.WithCache + SharedMem.WithCache (File_key) (struct type t = (RelativeLoc.t, RelativeLoc.t) Flow_ast.Program.t @@ -19,7 +19,7 @@ module ASTHeap = end) module SigASTHeap = - SharedMem_js.WithCache + SharedMem.WithCache (File_key) (struct type t = (ALoc.t, ALoc.t) Flow_ast.Program.t @@ -28,7 +28,7 @@ module SigASTHeap = end) module SigASTALocTableHeap = - SharedMem_js.WithCache + SharedMem.WithCache (File_key) (struct type t = ALoc.table @@ -39,7 +39,7 @@ module SigASTALocTableHeap = type type_sig = Type_sig_collections.Locs.index Packed_type_sig.t module TypeSigHeap = - SharedMem_js.NoCache + SharedMem.NoCache (File_key) (struct type t = type_sig @@ -102,7 +102,7 @@ let decompactify_loc file ast = (loc_decompactifier (Some file))#program ast let add_source_aloc file ast = (source_adder_aloc (Some file))#program ast module DocblockHeap = - SharedMem_js.WithCache + SharedMem.WithCache (File_key) (struct type t = Docblock.t @@ -111,7 +111,7 @@ module DocblockHeap = end) module FileSigHeap = - SharedMem_js.WithCache + SharedMem.WithCache (File_key) (struct type t = File_sig.With_Loc.t @@ -120,7 +120,7 @@ module FileSigHeap = end) module SigFileSigHeap = - SharedMem_js.WithCache + SharedMem.WithCache (File_key) (struct type t = File_sig.With_ALoc.t @@ -130,7 +130,7 @@ module SigFileSigHeap = (* Contains the hash for every file we even consider parsing *) module FileHashHeap = - SharedMem_js.WithCache + SharedMem.WithCache (File_key) (struct (* In the future I imagine a system like this: diff --git a/src/typing/__tests__/typing_tests.ml b/src/typing/__tests__/typing_tests.ml index dadd77b5588..3456557fbe9 100644 --- a/src/typing/__tests__/typing_tests.ml +++ b/src/typing/__tests__/typing_tests.ml @@ -13,6 +13,6 @@ let tests = let _handle = let one_gig = 1024 * 1024 * 1024 in - SharedMem_js.(init ~num_workers:0 { heap_size = 5 * one_gig; hash_table_pow = 19; log_level = 0 }) + SharedMem.(init ~num_workers:0 { heap_size = 5 * one_gig; hash_table_pow = 19; log_level = 0 }) let () = run_test_tt_main tests From b19631593b1c7a7ad8f2f58e4661d29ca9a6b0b6 Mon Sep 17 00:00:00 2001 From: Sam Goldman Date: Wed, 9 Dec 2020 13:56:00 -0800 Subject: [PATCH 40/43] Drop unnecessary exception handler Summary: The exception mentioned in the deleted comment is no longer possible after D16939688 (https://github.com/facebook/flow/commit/9c04b09a5f4729da86a7b2546fca711c26eba538), so we can remove the error handler. As is, the hash_stats function can not possibly raise an exception. Reviewed By: nmote Differential Revision: D25280254 fbshipit-source-id: cda353a3046a319c0167205a482fb9a8aa8023c9 --- src/server/shmem/sharedMem_js.ml | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/src/server/shmem/sharedMem_js.ml b/src/server/shmem/sharedMem_js.ml index 2131443495a..355f004cd70 100644 --- a/src/server/shmem/sharedMem_js.ml +++ b/src/server/shmem/sharedMem_js.ml @@ -27,8 +27,7 @@ let with_memory_profiling_lwt ~profiling f = let with_memory_info callback = let%lwt cgroup_stats = CGroup.get_stats () in - (* Reading hash_stats while workers are writing can cause assertion errors *) - let hash_stats = (try Some (SharedMem.hash_stats ()) with _ -> None) in + let hash_stats = SharedMem.hash_stats () in let heap_size = SharedMem.heap_size () in callback ~cgroup_stats ~hash_stats ~heap_size; Lwt.return_unit @@ -36,7 +35,7 @@ let with_memory_info callback = module MemorySamplingLoop = LwtLoop.Make (struct type acc = cgroup_stats:(CGroup.stats, string) result -> - hash_stats:SharedMem.table_stats option -> + hash_stats:SharedMem.table_stats -> heap_size:int -> unit @@ -67,16 +66,16 @@ let with_memory_timer_lwt = let sample_memory timer profiling ~cgroup_stats ~hash_stats ~heap_size = P.sample_memory profiling ~group:timer ~metric:"heap" ~value:(float heap_size); - Base.Option.iter hash_stats ~f:(fun { SharedMem.nonempty_slots; used_slots; slots } -> - P.sample_memory - profiling - ~group:timer - ~metric:"hash_nonempty_slots" - ~value:(float nonempty_slots); + let { SharedMem.nonempty_slots; used_slots; slots } = hash_stats in + P.sample_memory + profiling + ~group:timer + ~metric:"hash_nonempty_slots" + ~value:(float nonempty_slots); - P.sample_memory profiling ~group:timer ~metric:"hash_used_slots" ~value:(float used_slots); + P.sample_memory profiling ~group:timer ~metric:"hash_used_slots" ~value:(float used_slots); - P.sample_memory profiling ~group:timer ~metric:"hash_slots" ~value:(float slots)); + P.sample_memory profiling ~group:timer ~metric:"hash_slots" ~value:(float slots); match cgroup_stats with | Error _ -> () From ea5eb02f29898db0c96601ef9c04ead282acf93b Mon Sep 17 00:00:00 2001 From: Sam Goldman Date: Wed, 9 Dec 2020 13:56:00 -0800 Subject: [PATCH 41/43] Move and rename SharedMem_js module Summary: In a previous diff, I removed the wrapping behavior of this module. After this change, the module only exports a couple of useful memory logging utilities. This diff simply moves the file to a new Memory_utils module in the inference target, where the functions are most commonly used. Faceook: I suppose feel free to bikeshed on this diff. Reviewed By: nmote Differential Revision: D25280249 fbshipit-source-id: de1b9502bbf8a5cd0d6c5361070c2373e6263b09 --- src/codemods/utils/codemod_runner.ml | 2 +- src/server/shmem/dune | 9 --- .../inference/memory_utils.ml} | 0 .../inference/memory_utils.mli} | 0 src/services/inference/types_js.ml | 56 +++++++++---------- 5 files changed, 29 insertions(+), 38 deletions(-) delete mode 100644 src/server/shmem/dune rename src/{server/shmem/sharedMem_js.ml => services/inference/memory_utils.ml} (100%) rename src/{server/shmem/sharedMem_js.mli => services/inference/memory_utils.mli} (100%) diff --git a/src/codemods/utils/codemod_runner.ml b/src/codemods/utils/codemod_runner.ml index e5874f6691e..9d7967291e4 100644 --- a/src/codemods/utils/codemod_runner.ml +++ b/src/codemods/utils/codemod_runner.ml @@ -389,7 +389,7 @@ module TypedRunnerWithPrepass (C : TYPED_RUNNER_WITH_PREPASS_CONFIG) : TYPED_RUN (* Calculate dependencies that need to be merged *) let%lwt (sig_dependency_graph, component_map, files_to_merge, files_to_check) = let get_dependent_files sig_dependency_graph implementation_dependency_graph roots = - SharedMem_js.with_memory_timer_lwt ~options "AllDependentFiles" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "AllDependentFiles" profiling (fun () -> Lwt.return (Pure_dep_graph_operations.calc_all_dependents ~sig_dependency_graph diff --git a/src/server/shmem/dune b/src/server/shmem/dune deleted file mode 100644 index 818aaf42766..00000000000 --- a/src/server/shmem/dune +++ /dev/null @@ -1,9 +0,0 @@ -(library - (name flow_shared_mem) - (wrapped false) - (libraries - flow_monitor_rpc - heap_shared_mem ; hack - ) - (preprocess (pps lwt_ppx)) -) diff --git a/src/server/shmem/sharedMem_js.ml b/src/services/inference/memory_utils.ml similarity index 100% rename from src/server/shmem/sharedMem_js.ml rename to src/services/inference/memory_utils.ml diff --git a/src/server/shmem/sharedMem_js.mli b/src/services/inference/memory_utils.mli similarity index 100% rename from src/server/shmem/sharedMem_js.mli rename to src/services/inference/memory_utils.mli diff --git a/src/services/inference/types_js.ml b/src/services/inference/types_js.ml index 4ecfbd79137..ad524d438b4 100644 --- a/src/services/inference/types_js.ml +++ b/src/services/inference/types_js.ml @@ -116,12 +116,12 @@ let collate_parse_results ~options parse_results = (parse_ok, unparsed, parse_unchanged, local_errors, parse_package_json) let parse ~options ~profiling ~workers ~reader parse_next = - SharedMem_js.with_memory_timer_lwt ~options "Parsing" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "Parsing" profiling (fun () -> let%lwt results = Parsing_service_js.parse_with_defaults ~reader options workers parse_next in Lwt.return (collate_parse_results ~options results)) let reparse ~options ~profiling ~transaction ~reader ~workers ~modified ~deleted = - SharedMem_js.with_memory_timer_lwt ~options "Parsing" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "Parsing" profiling (fun () -> let%lwt (new_or_changed, results) = Parsing_service_js.reparse_with_defaults ~transaction @@ -213,7 +213,7 @@ let (commit_modules, commit_modules_from_saved_state) = ~new_or_changed = (* conservatively approximate set of modules whose providers will change *) (* register providers for modules, warn on dupes etc. *) - SharedMem_js.with_memory_timer_lwt ~options "CommitModules" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "CommitModules" profiling (fun () -> let all_files_set = FilenameSet.union (FilenameSet.union parsed_set unparsed_set) deleted in let mutator = Module_heaps.Introduce_files_mutator.create transaction all_files_set in let%lwt new_modules = @@ -303,7 +303,7 @@ let resolve_requires ~transaction ~reader ~options ~profiling ~workers ~parsed ~ (changed1 || changed2, FilenameMap.union errors1 errors2) in let%lwt (resolved_requires_changed, errors) = - SharedMem_js.with_memory_timer_lwt ~options "ResolveRequires" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "ResolveRequires" profiling (fun () -> MultiWorkerLwt.call workers ~job: @@ -381,7 +381,7 @@ let error_set_of_internal_error file (loc, internal_error) = |> Flow_error.ErrorSet.singleton let calc_deps ~options ~profiling ~sig_dependency_graph ~components to_merge = - SharedMem_js.with_memory_timer_lwt ~options "CalcDeps" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "CalcDeps" profiling (fun () -> let sig_dependency_graph = Pure_dep_graph_operations.filter_dependency_graph sig_dependency_graph to_merge in @@ -416,7 +416,7 @@ let include_dependencies_and_dependents ~sig_dependency_graph ~sig_dependent_files ~all_dependent_files = - SharedMem_js.with_memory_timer_lwt ~options "PruneDeps" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "PruneDeps" profiling (fun () -> (* We need to run the check phase on the entire input set as well as all_dependent_files. * We'll calculate the set of files we need to merge based on this. *) let to_check = CheckedSet.add ~dependents:all_dependent_files input in @@ -528,7 +528,7 @@ let run_merge_service ~component_map ~recheck_set acc = - SharedMem_js.with_memory_timer_lwt ~options "Merge" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "Merge" profiling (fun () -> let%lwt (merged, { Merge_service.skipped_count; sig_new_or_changed }) = Merge_service.merge ~master_mutator @@ -568,7 +568,7 @@ let mk_intermediate_result_callback match persistent_connections with | None -> Lwt.return (fun _ -> ()) | Some clients -> - SharedMem_js.with_memory_timer_lwt ~options "MakeSendErrors" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "MakeSendErrors" profiling (fun () -> (* In classic, each merge step uncovers new errors, warnings, suppressions. While more suppressions may come in later steps, the suppressions we've seen so far are sufficient to filter the errors and warnings we've seen so far. @@ -771,7 +771,7 @@ let merge in let%lwt () = if Options.should_profile options then - SharedMem_js.with_memory_timer_lwt ~options "PrintGCStats" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "PrintGCStats" profiling (fun () -> Lwt.return (Gc.print_stat stderr)) else Lwt.return_unit @@ -938,7 +938,7 @@ end = struct match Options.arch options with | Options.Classic -> Lwt.return (updated_errors, coverage, 0., 0, None, None, None) | Options.TypesFirst _ -> - SharedMem_js.with_memory_timer_lwt ~options "Check" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "Check" profiling (fun () -> Hh_logger.info "Check prep"; Hh_logger.info "new or changed signatures: %d" (FilenameSet.cardinal sig_new_or_changed); let focused_to_check = CheckedSet.focused to_check in @@ -1038,7 +1038,7 @@ let handle_unexpected_file_changes changed_files = raise Lwt.Canceled let ensure_parsed ~options ~profiling ~workers ~reader files = - SharedMem_js.with_memory_timer_lwt ~options "EnsureParsed" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "EnsureParsed" profiling (fun () -> (* The set of files that we expected to parse, but were skipped, either because they had * changed since the last recheck or no longer exist on disk. This is in contrast to files * that were skipped intentionally because they are not @flow, or because they are resource @@ -1190,7 +1190,7 @@ let typecheck_contents ~options ~env ~profiling contents filename = let reader = State_reader.create () in let lazy_table_of_aloc = Parsing_heaps.Reader.get_sig_ast_aloc_table_unsafe_lazy ~reader in let%lwt (parse_result, info) = - SharedMem_js.with_memory_timer_lwt ~options "Parsing" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "Parsing" profiling (fun () -> Lwt.return (parse_contents ~options ~check_syntax:true filename contents)) in (* override docblock info *) @@ -1198,7 +1198,7 @@ let typecheck_contents ~options ~env ~profiling contents filename = match parse_result with | Ok (ast, file_sig, tolerable_errors, _parse_errors) -> let%lwt (cx, typed_ast) = - SharedMem_js.with_memory_timer_lwt ~options "MergeContents" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "MergeContents" profiling (fun () -> merge_contents ~options ~env ~reader filename info (ast, file_sig)) in let (errors, warnings) = @@ -1215,7 +1215,7 @@ let type_contents ~options ~env ~profiling contents filename = try%lwt let reader = State_reader.create () in let%lwt (parse_result, info) = - SharedMem_js.with_memory_timer_lwt ~options "Parsing" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "Parsing" profiling (fun () -> Lwt.return (parse_contents ~options ~check_syntax:false filename contents)) in (* override docblock info *) @@ -1223,7 +1223,7 @@ let type_contents ~options ~env ~profiling contents filename = match parse_result with | Ok (ast, file_sig, tolerable_errors, parse_errors) -> let%lwt (cx, typed_ast) = - SharedMem_js.with_memory_timer_lwt ~options "MergeContents" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "MergeContents" profiling (fun () -> merge_contents ~options ~env ~reader filename info (ast, file_sig)) in Lwt.return (Ok (cx, info, file_sig, tolerable_errors, ast, typed_ast, parse_errors)) @@ -1237,7 +1237,7 @@ let type_contents ~options ~env ~profiling contents filename = Lwt.return (Error e) let init_libs ~options ~profiling ~local_errors ~warnings ~suppressions ~reader ordered_libs = - SharedMem_js.with_memory_timer_lwt ~options "InitLibs" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "InitLibs" profiling (fun () -> let%lwt lib_files = let options = match Options.verbose options with @@ -1363,7 +1363,7 @@ let unfocused_files_and_dependents_to_infer In either case, we can consider the result to be "closed" in terms of expected invariants. *) let files_to_infer ~options ~profiling ~reader ~dependency_info ?focus_targets ~parsed = - SharedMem_js.with_memory_timer_lwt ~options "FilesToInfer" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "FilesToInfer" profiling (fun () -> match focus_targets with | None -> unfocused_files_and_dependents_to_infer @@ -1799,7 +1799,7 @@ end = struct let all_providers_mutator = Module_hashtables.All_providers_mutator.create transaction in (* clear out records of files, and names of modules provided by those files *) let%lwt old_modules = - SharedMem_js.with_memory_timer_lwt ~options "ModuleClearFiles" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "ModuleClearFiles" profiling (fun () -> Module_js.calc_old_modules ~reader workers @@ -1840,7 +1840,7 @@ end = struct (* Figure out which modules the unchanged forced files provide. We need these to figure out * which dependents need to be added to the checked set *) let%lwt unchanged_modules = - SharedMem_js.with_memory_timer_lwt ~options "CalcUnchangedModules" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "CalcUnchangedModules" profiling (fun () -> Module_js.calc_unchanged_modules ~reader workers unchanged_files_with_dependents) in let parsed = FilenameSet.union freshparsed unchanged in @@ -1848,7 +1848,7 @@ end = struct or are new / changed files that are phantom dependents. all_dependent_files are direct_dependent_files plus their dependents (transitive closure) *) let%lwt direct_dependent_files = - SharedMem_js.with_memory_timer_lwt ~options "DirectDependentFiles" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "DirectDependentFiles" profiling (fun () -> let root_files = FilenameSet.union new_or_changed unchanged_files_with_dependents in DirectDependentFilesCache.with_cache ~root_files @@ -1870,7 +1870,7 @@ end = struct Module_heaps.Resolved_requires_mutator.create transaction direct_dependent_files in let%lwt resolved_requires_changed_in_reresolve_direct_dependents = - SharedMem_js.with_memory_timer_lwt ~options "ReresolveDirectDependents" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "ReresolveDirectDependents" profiling (fun () -> let%lwt resolved_requires_changed = MultiWorkerLwt.call workers @@ -1900,7 +1900,7 @@ end = struct in Hh_logger.info "Recalculating dependency graph"; let%lwt dependency_info = - SharedMem_js.with_memory_timer_lwt ~options "CalcDepsTypecheck" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "CalcDepsTypecheck" profiling (fun () -> let files_to_update_dependency_info = FilenameSet.union freshparsed direct_dependent_files in @@ -1961,7 +1961,7 @@ end = struct ~unchanged_files_to_force ~direct_dependent_files = let%lwt (sig_dependent_files, all_dependent_files) = - SharedMem_js.with_memory_timer_lwt ~options "AllDependentFiles" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "AllDependentFiles" profiling (fun () -> Lwt.return (Pure_dep_graph_operations.calc_all_dependents ~sig_dependency_graph @@ -1972,7 +1972,7 @@ end = struct FilenameSet.union freshparsed (CheckedSet.all unchanged_files_to_force) in let%lwt (updated_checked_files, sig_dependent_files, all_dependent_files) = - SharedMem_js.with_memory_timer_lwt ~options "RecalcDepGraph" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "RecalcDepGraph" profiling (fun () -> match Options.lazy_mode options with | Options.NON_LAZY_MODE (* Non lazy mode treats every file as focused. *) @@ -2322,7 +2322,7 @@ let recheck ~recheck_reasons ~will_be_checked_files = let%lwt (env, stats, first_internal_error) = - SharedMem_js.with_memory_profiling_lwt ~profiling (fun () -> + Memory_utils.with_memory_profiling_lwt ~profiling (fun () -> with_transaction (fun transaction reader -> Recheck.full ~profiling @@ -2470,7 +2470,7 @@ let init_from_saved_state ~profiling ~workers ~saved_state ~updates options = Hh_logger.info "Restoring heaps"; let%lwt () = - SharedMem_js.with_memory_timer_lwt ~options "RestoreHeaps" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "RestoreHeaps" profiling (fun () -> let%lwt () = MultiWorkerLwt.call workers @@ -2525,7 +2525,7 @@ let init_from_saved_state ~profiling ~workers ~saved_state ~updates options = MonitorRPC.status_update ServerStatus.Resolving_dependencies_progress; let%lwt (parsed_set, unparsed_set, all_files, parsed, unparsed) = - SharedMem_js.with_memory_timer_lwt ~options "PrepareCommitModules" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "PrepareCommitModules" profiling (fun () -> let (parsed, parsed_set) = List.fold_left (fun (parsed, parsed_set) (fn, data) -> @@ -2701,7 +2701,7 @@ let init_from_scratch ~profiling ~workers options = ~is_init:true in let%lwt dependency_info = - SharedMem_js.with_memory_timer_lwt ~options "CalcDepsTypecheck" profiling (fun () -> + Memory_utils.with_memory_timer_lwt ~options "CalcDepsTypecheck" profiling (fun () -> Dep_service.calc_dependency_info ~options ~reader workers ~parsed) in let env = From 81f2bfe00768a6502d75e9f289919c5bc31ecb9e Mon Sep 17 00:00:00 2001 From: Sam Goldman Date: Wed, 9 Dec 2020 13:56:00 -0800 Subject: [PATCH 42/43] Drop "gentle" collection mode Summary: We no longer need two modes. Now that collection only occurs between rechecks, we only use the "aggressive" mode, which tries to keep the amount of garbage in the heap to less than 20%. The incremental GC will also strive to keep the amount of garbage less than 20%. Removing the gentle mode now makes it clear that the incremental GC is behavior preserving. It might make sense to make the max overhead configurable, but I'm leaving that for future work. Reviewed By: nmote Differential Revision: D25340133 fbshipit-source-id: b780038dac552be9d10356854e61f2e9831de952 --- .../test/unit/heap/test_hashtbl.ml | 49 ++++--------------- src/heap/sharedMem.ml | 2 - src/heap/sharedMem.mli | 1 - 3 files changed, 9 insertions(+), 43 deletions(-) diff --git a/src/hack_forked/test/unit/heap/test_hashtbl.ml b/src/hack_forked/test/unit/heap/test_hashtbl.ml index 4526f43fc1f..11c4ff82533 100644 --- a/src/hack_forked/test/unit/heap/test_hashtbl.ml +++ b/src/hack_forked/test/unit/heap/test_hashtbl.ml @@ -84,20 +84,16 @@ let expect_get key expected = ~msg:(Printf.sprintf "Expected key '%s' to have value '%s', got '%s" key expected value) (value = expected) -let expect_compact effort expected = +let expect_compact expected = let old_cb = !SharedMem.on_compact in let actual = ref false in (SharedMem.on_compact := (fun _ _ -> actual := true)); - SharedMem.collect effort; + SharedMem.collect `aggressive; SharedMem.on_compact := old_cb; expect ~msg: (Printf.sprintf - "Expected %s collection to be %sneeded" - (match effort with - | `gentle -> "gentle" - | `aggressive -> "aggressive" - | `always_TEST -> "always") + "Expected collection to be %sneeded" ( if expected then "" else @@ -224,43 +220,20 @@ let test_gc_collect () = add "1" "1"; (* no memory is wasted *) - expect_compact `gentle false; - expect_compact `aggressive false; + expect_compact false; expect_heap_size 2; expect_mem "0"; expect_mem "1"; + + (* Removing an element does not decrease used heap size *) remove "1"; expect_heap_size 2; (* Garbage collection should remove the space taken by the removed element *) - expect_compact `gentle true; + expect_compact true; expect_heap_size 1; expect_mem "0" -(* Test aggresive garbage collection versus gentle *) -let test_gc_aggressive () = - expect_stats ~nonempty:0 ~used:0; - add "0" "0"; - add "1" "1"; - expect_heap_size 2; - - (* Since latest heap size is zero, - now it should gc, but theres nothing to gc, - so the heap will stay the same *) - expect_compact `gentle false; - expect_heap_size 2; - remove "1"; - add "2" "2"; - expect_heap_size 3; - - (* Gentle garbage collection shouldn't catch this *) - expect_compact `gentle false; - expect_heap_size 3; - - (* Aggressive garbage collection should run *) - expect_compact `aggressive true; - expect_heap_size 2 - let test_heapsize_decrease () = expect_stats ~nonempty:0 ~used:0; add "0" "0"; @@ -274,17 +247,14 @@ let test_heapsize_decrease () = add "4" "4"; add "5" "5"; expect_heap_size 6; - (* This runs because 6 >= 2*3 *) - SharedMem.collect `gentle; + expect_compact true; expect_heap_size 3; add "0" "0"; add "1" "1"; remove "4"; remove "5"; expect_heap_size 5; - (* Aggressive collection should kick in, - * because 5 >= 1.2*3 *) - SharedMem.collect `aggressive; + expect_compact true; expect_heap_size 3; () @@ -314,7 +284,6 @@ let tests () = ("test_no_overwrite", test_no_overwrite); ("test_reuse_slots", test_reuse_slots); ("test_gc_collect", test_gc_collect); - ("test_gc_aggressive", test_gc_aggressive); ("test_heapsize_decrease", test_heapsize_decrease); ("test_full", test_full); ] diff --git a/src/heap/sharedMem.ml b/src/heap/sharedMem.ml index ac54c2d5427..568564ae1d6 100644 --- a/src/heap/sharedMem.ml +++ b/src/heap/sharedMem.ml @@ -32,7 +32,6 @@ type _ addr = int type effort = [ `aggressive | `always_TEST - | `gentle ] let heap_ref : heap option ref = ref None @@ -118,7 +117,6 @@ let should_collect effort = match effort with | `always_TEST -> 1.0 | `aggressive -> 1.2 - | `gentle -> 2.0 in let used = heap_size () in let wasted = wasted_heap_size () in diff --git a/src/heap/sharedMem.mli b/src/heap/sharedMem.mli index dc7fe1bf33d..b44bfd925ae 100644 --- a/src/heap/sharedMem.mli +++ b/src/heap/sharedMem.mli @@ -21,7 +21,6 @@ type 'k addr [@@immediate] type effort = [ `aggressive | `always_TEST - | `gentle ] exception Out_of_shared_memory From 215ac06533dbaa448c9f06fa4390a8e619cb0077 Mon Sep 17 00:00:00 2001 From: Daniel Sainati Date: Wed, 9 Dec 2020 16:09:40 -0800 Subject: [PATCH 43/43] when promoting primitives on method calls, also promote the method call's this argument Summary: When calling methods on primitive types like arrays, number or strings that are present in their wrapper object, we promote the primitive to an instance of the wrapper class. We do not, however, similarly promote the `this` argument to the method call itself. This did not matter before `this` parameters were more strict than simply `any`, however, in a world where the `this` parameter of a method is the type of the class, we need to correctly promote the primitive `this` argument to an instance of its wrapper object as well. This also has the nice side effect of improving type inference for array methods, which would infer a union with `any` as a result of the `any`-type of the method's call. By replacing this with the applied instance of the array wrapper class, we can produce a narrower type. Reviewed By: mvitousek Differential Revision: D24788676 fbshipit-source-id: 16b7495d3c7b259564c375f942983c3c1ae7d881 --- src/typing/flow_js.ml | 34 +++++++++++++++++-- src/typing/type.ml | 4 +++ .../annotate_exports_empty_array.exp | 6 ++-- tests/arrays/arrays.exp | 16 ++++++++- tests/arrays/map.js | 10 ++++++ .../autofix_empty_array.exp | 2 +- 6 files changed, 64 insertions(+), 8 deletions(-) create mode 100644 tests/arrays/map.js diff --git a/src/typing/flow_js.ml b/src/typing/flow_js.ml index 9c70575862d..1de6efc35a8 100644 --- a/src/typing/flow_js.ml +++ b/src/typing/flow_js.ml @@ -7197,9 +7197,14 @@ struct (**********************) (* Array library call *) (**********************) - | ( DefT (reason, _, ArrT (ArrayAT (t, _))), - (GetPropT _ | SetPropT _ | MethodT _ | LookupT _) ) -> + | (DefT (reason, _, ArrT (ArrayAT (t, _))), (GetPropT _ | SetPropT _ | LookupT _)) -> rec_flow cx trace (get_builtin_typeapp cx ~trace reason "Array" [t], u) + | ( DefT (reason, _, ArrT (ArrayAT (t, _))), + MethodT (use_op, call_r, lookup_r, propref, action, t_opt) ) -> + let arr_t = get_builtin_typeapp cx ~trace reason "Array" [t] in + (* Substitute the typeapp for the array primitive in the method call's `this` position *) + let action = replace_this_t_in_method_action arr_t action in + rec_flow cx trace (arr_t, MethodT (use_op, call_r, lookup_r, propref, action, t_opt)) (*************************) (* Tuple "length" access *) (*************************) @@ -7212,27 +7217,50 @@ struct let t = tuple_length reason trust ts in rec_flow_t cx trace ~use_op:unknown_use (reposition cx ~trace loc t, OpenT tout) | ( DefT (reason, _, ArrT ((TupleAT _ | ROArrayAT _) as arrtype)), - (GetPropT _ | SetPropT _ | MethodT _ | LookupT _) ) -> + (GetPropT _ | SetPropT _ | LookupT _) ) -> let t = elemt_of_arrtype arrtype in rec_flow cx trace (get_builtin_typeapp cx ~trace reason "$ReadOnlyArray" [t], u) + | ( DefT (reason, _, ArrT ((TupleAT _ | ROArrayAT _) as arrtype)), + MethodT (use_op, call_r, lookup_r, propref, action, t_opt) ) -> + let t = elemt_of_arrtype arrtype in + let arr_t = get_builtin_typeapp cx ~trace reason "$ReadOnlyArray" [t] in + (* Substitute the typeapp for the array primitive in the method call's `this` position *) + let action = replace_this_t_in_method_action arr_t action in + rec_flow cx trace (arr_t, MethodT (use_op, call_r, lookup_r, propref, action, t_opt)) (***********************) (* String library call *) (***********************) + | (DefT (reason, _, StrT _), MethodT (use_op, call_r, lookup_r, propref, action, t_opt)) -> + let promoted = get_builtin_type cx ~trace reason "String" in + let action = replace_this_t_in_method_action promoted action in + rec_flow cx trace (promoted, MethodT (use_op, call_r, lookup_r, propref, action, t_opt)) | (DefT (reason, _, StrT _), u) when primitive_promoting_use_t u -> rec_flow cx trace (get_builtin_type cx ~trace reason "String", u) (***********************) (* Number library call *) (***********************) + | (DefT (reason, _, NumT _), MethodT (use_op, call_r, lookup_r, propref, action, t_opt)) -> + let promoted = get_builtin_type cx ~trace reason "Number" in + let action = replace_this_t_in_method_action promoted action in + rec_flow cx trace (promoted, MethodT (use_op, call_r, lookup_r, propref, action, t_opt)) | (DefT (reason, _, NumT _), u) when primitive_promoting_use_t u -> rec_flow cx trace (get_builtin_type cx ~trace reason "Number", u) (***********************) (* Boolean library call *) (***********************) + | (DefT (reason, _, BoolT _), MethodT (use_op, call_r, lookup_r, propref, action, t_opt)) -> + let promoted = get_builtin_type cx ~trace reason "Boolean" in + let action = replace_this_t_in_method_action promoted action in + rec_flow cx trace (promoted, MethodT (use_op, call_r, lookup_r, propref, action, t_opt)) | (DefT (reason, _, BoolT _), u) when primitive_promoting_use_t u -> rec_flow cx trace (get_builtin_type cx ~trace reason "Boolean", u) (***********************) (* Symbol library call *) (***********************) + | (DefT (reason, _, SymbolT), MethodT (use_op, call_r, lookup_r, propref, action, t_opt)) -> + let promoted = get_builtin_type cx ~trace reason "Symbol" in + let action = replace_this_t_in_method_action promoted action in + rec_flow cx trace (promoted, MethodT (use_op, call_r, lookup_r, propref, action, t_opt)) | (DefT (reason, _, SymbolT), u) when primitive_promoting_use_t u -> rec_flow cx trace (get_builtin_type cx ~trace reason "Symbol", u) (*****************************************************) diff --git a/src/typing/type.ml b/src/typing/type.ml index 014e24fca89..79829bf7842 100644 --- a/src/typing/type.ml +++ b/src/typing/type.ml @@ -2827,6 +2827,10 @@ let primitive_promoting_use_t = function (* TODO: enumerate all use types *) | _ -> false +let replace_this_t_in_method_action call_this_t = function + | CallM fct -> CallM { fct with call_this_t } + | ChainM (r1, r2, t, fct, tout) -> ChainM (r1, r2, t, { fct with call_this_t }, tout) + let rec fold_use_op f1 f2 = function | Op root -> f1 root | Frame (frame, use_op) -> diff --git a/tests/annotate_exports_empty_array/annotate_exports_empty_array.exp b/tests/annotate_exports_empty_array/annotate_exports_empty_array.exp index 6341c3e8305..88dcf8553cd 100644 --- a/tests/annotate_exports_empty_array/annotate_exports_empty_array.exp +++ b/tests/annotate_exports_empty_array/annotate_exports_empty_array.exp @@ -6,7 +6,7 @@ const a0: Array<$FlowFixMeEmpty> = []; -const a1: Array<$FlowFixMe | number> = []; +const a1: Array = []; a1.push(1); const a2: Array = []; @@ -22,7 +22,7 @@ module.exports = { a0, a1, a2, a3 }; Stats: Files changed: 1 Number of annotations added: 3 - Total size of annotations: 10 + Total size of annotations: 8 Number of sig. ver. errors: 4 Number of annotations required: 4 Number of annotations skipped: 0 @@ -56,7 +56,7 @@ Warnings: --- > const a0 = []; 5c5 -< const a1: Array<$FlowFixMe | number> = []; +< const a1: Array = []; --- > const a1 = []; 8c8 diff --git a/tests/arrays/arrays.exp b/tests/arrays/arrays.exp index 8b45a2c43da..2ec7d3c7c39 100644 --- a/tests/arrays/arrays.exp +++ b/tests/arrays/arrays.exp @@ -69,6 +69,20 @@ References: ^^^^^^^^^^^^ [2] +Error ------------------------------------------------------------------------------------------------------ map.js:8:13 + +empty string [1] is incompatible with number [2]. [incompatible-type] + + map.js:8:13 + 8| if (n !== '') { // number incompatible with string + ^^ [1] + +References: + map.js:4:21 + 4| a: $ReadOnlyArray, + ^^^^^^ [2] + + Error ---------------------------------------------------------------------------------------------- numeric_elem.js:6:1 Cannot assign `0` to `arr[day]` because `Date` [1] is not an array index. [incompatible-use] @@ -113,4 +127,4 @@ References: -Found 7 errors +Found 8 errors diff --git a/tests/arrays/map.js b/tests/arrays/map.js new file mode 100644 index 00000000000..f891dc6b62e --- /dev/null +++ b/tests/arrays/map.js @@ -0,0 +1,10 @@ +// @flow + +declare var x: ?{ + a: $ReadOnlyArray, +}; + +(x ? x.a : []).map(n => { + if (n !== '') { // number incompatible with string + } +}); diff --git a/tests/autofix_empty_array/autofix_empty_array.exp b/tests/autofix_empty_array/autofix_empty_array.exp index 7bd35f9be1a..2af7650a942 100644 --- a/tests/autofix_empty_array/autofix_empty_array.exp +++ b/tests/autofix_empty_array/autofix_empty_array.exp @@ -2,7 +2,7 @@ const a0: Array = []; -const a1: Array = []; +const a1: Array = []; a1.push(1); const a2: Array = [];