diff --git a/.github/workflows/mlkem.yml b/.github/workflows/mlkem.yml index 0d59ce119..00c8b1d23 100644 --- a/.github/workflows/mlkem.yml +++ b/.github/workflows/mlkem.yml @@ -33,7 +33,7 @@ jobs: nix profile install ./hax - name: ⤵ Install FStar - run: nix profile install github:FStarLang/FStar/v2024.01.13 + run: nix profile install github:FStarLang/FStar/v2024.12.03 - name: ⤵ Clone HACL-star repository uses: actions/checkout@v4 diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index c7347c48f..d41802014 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -1,7 +1,7 @@ name: 'Triage stale issues and PRs' on: schedule: - - cron: '00 1 * * *' + - cron: '00 00 * * 4' workflow_dispatch: jobs: diff --git a/Cargo.lock b/Cargo.lock index 5daeaf35d..265b869ac 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -144,7 +144,7 @@ dependencies = [ [[package]] name = "cargo-hax" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "annotate-snippets", "cargo_metadata", @@ -445,7 +445,7 @@ checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" [[package]] name = "hax-adt-into" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "itertools", "proc-macro2", @@ -456,7 +456,7 @@ dependencies = [ [[package]] name = "hax-bounded-integers" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "duplicate", "hax-lib", @@ -465,7 +465,7 @@ dependencies = [ [[package]] name = "hax-driver" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "clap", "colored", @@ -483,7 +483,7 @@ dependencies = [ [[package]] name = "hax-engine-names" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "hax-lib", "hax-lib-protocol", @@ -491,7 +491,7 @@ dependencies = [ [[package]] name = "hax-engine-names-extract" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "hax-adt-into", "hax-engine-names", @@ -502,7 +502,7 @@ dependencies = [ [[package]] name = "hax-frontend-exporter" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "extension-traits", "hax-adt-into", @@ -518,7 +518,7 @@ dependencies = [ [[package]] name = "hax-frontend-exporter-options" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "hax-adt-into", "schemars", @@ -528,7 +528,7 @@ dependencies = [ [[package]] name = "hax-lib" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "hax-lib-macros", "num-bigint", @@ -537,7 +537,7 @@ dependencies = [ [[package]] name = "hax-lib-macros" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "hax-lib", "hax-lib-macros-types", @@ -550,7 +550,7 @@ dependencies = [ [[package]] name = "hax-lib-macros-types" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "proc-macro2", "quote", @@ -562,14 +562,14 @@ dependencies = [ [[package]] name = "hax-lib-protocol" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "libcrux", ] [[package]] name = "hax-lib-protocol-macros" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "proc-macro-error", "proc-macro2", @@ -579,7 +579,7 @@ dependencies = [ [[package]] name = "hax-test-harness" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "assert_cmd", "cargo_metadata", @@ -595,7 +595,7 @@ dependencies = [ [[package]] name = "hax-types" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "annotate-snippets", "clap", diff --git a/Cargo.toml b/Cargo.toml index fe664a8ba..ea7eb8493 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,7 +32,7 @@ default-members = [ resolver = "2" [workspace.package] -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" authors = ["hax Authors"] license = "Apache-2.0" homepage = "https://github.com/hacspec/hax" @@ -71,15 +71,14 @@ colored = "2" annotate-snippets = "0.11" # Crates in this repository -hax-frontend-exporter = { path = "frontend/exporter", version = "=0.1.0-alpha.1", default-features = false } -hax-adt-into = { path = "frontend/exporter/adt-into", version = "=0.1.0-alpha.1" } -hax-frontend-exporter-options = { path = "frontend/exporter/options", version = "=0.1.0-alpha.1" } -hax-lib-macros = { path = "hax-lib/macros", version = "=0.1.0-alpha.1" } -hax-lib-macros-types = { path = "hax-lib/macros/types", version = "=0.1.0-alpha.1" } -hax-lib = { path = "hax-lib", version = "=0.1.0-alpha.1" } -hax-engine-names = { path = "engine/names", version = "=0.1.0-alpha.1" } -hax-types = { path = "hax-types", version = "=0.1.0-alpha.1" } +hax-frontend-exporter = { path = "frontend/exporter", version = "=0.1.0-rc.1", default-features = false } +hax-adt-into = { path = "frontend/exporter/adt-into", version = "=0.1.0-rc.1" } +hax-frontend-exporter-options = { path = "frontend/exporter/options", version = "=0.1.0-rc.1" } +hax-lib-macros = { path = "hax-lib/macros", version = "=0.1.0-rc.1" } +hax-lib-macros-types = { path = "hax-lib/macros/types", version = "=0.1.0-rc.1" } +hax-lib = { path = "hax-lib", version = "=0.1.0-rc.1" } +hax-engine-names = { path = "engine/names", version = "=0.1.0-rc.1" } +hax-types = { path = "hax-types", version = "=0.1.0-rc.1" } [workspace.metadata.release] -owners = ["github:hacspec:crates"] - +owners = ["github:hacspec:crates"] \ No newline at end of file diff --git a/PUBLISHING.md b/PUBLISHING.md index e671dbe78..8929b334e 100644 --- a/PUBLISHING.md +++ b/PUBLISHING.md @@ -49,7 +49,7 @@ engine. Those should not be published on `crate.io`. 2. `cargo-hax-engine-names-extract` ## Procedure - 1. Bump the version number with `cargo release LEVEL --no-publish --execute` (`cargo release --help` for more details on `LEVEL`). This will bump the version of every Rust crate, but also the version in `engine/dune-project`. This will also regenerate `engine/hax-engine.opam`. Note this will *not* publish the crate. + 1. Bump the version number with `cargo release LEVEL --no-publish --no-tag --execute` (`cargo release --help` for more details on `LEVEL`). This will bump the version of every Rust crate, but also the version in `engine/dune-project`. This will also regenerate `engine/hax-engine.opam`. Note this will *not* publish the crate. 2. PR the change 3. when the PR is merged in main, checkout `main` and run `cargo release --execute` diff --git a/README.md b/README.md index 3adf2858d..7984d4c6a 100644 --- a/README.md +++ b/README.md @@ -106,13 +106,14 @@ manager (with flakes enabled 4. Get a shell: `docker run -it --rm -v /some/dir/with/a/crate:/work hax bash` 5. You can now run `cargo-hax --help` (notice here we use `cargo-hax` instead of `cargo hax`) +Note: Please make sure that `$HOME/.cargo/bin` is in your `$PATH`, as +that is where `setup.sh` will install hax. + ## Supported Subset of the Rust Language -Hax intends to support full Rust, with the two following exceptions, promoting a functional style: - 1. no `unsafe` code (see https://github.com/hacspec/hax/issues/417); - 2. mutable references (aka `&mut T`) on return types or when aliasing (see https://github.com/hacspec/hax/issues/420). +Hax intends to support full Rust, with the one exception, promoting a functional style: mutable references (aka `&mut T`) on return types or when aliasing (see https://github.com/hacspec/hax/issues/420) are forbidden. Each unsupported Rust feature is documented as an issue labeled [`unsupported-rust`](https://github.com/hacspec/hax/issues?q=is%3Aissue+is%3Aopen+label%3Aunsupported-rust). When the issue is labeled [`wontfix-v1`](https://github.com/hacspec/hax/issues?q=is%3Aissue+is%3Aopen+label%3Aunsupported-rust+label%3Awontfix%2Cwontfix-v1), that means we don't plan on supporting that feature soon. diff --git a/book/src/contributing/intro.md b/book/src/contributing/intro.md new file mode 100644 index 000000000..383716a6f --- /dev/null +++ b/book/src/contributing/intro.md @@ -0,0 +1,4 @@ +# Contributing +This chapter contains information about internals of hax. + +Please read the [`CONTRIBUTING.md`](https://github.com/hacspec/hax/blob/main/CONTRIBUTING.md) before opening a pull request. diff --git a/cli/driver/src/exporter.rs b/cli/driver/src/exporter.rs index 96adfabb2..ff3239cf3 100644 --- a/cli/driver/src/exporter.rs +++ b/cli/driver/src/exporter.rs @@ -286,6 +286,7 @@ impl Callbacks for ExtractionCallbacks { .flatten() .collect(), def_ids, + hax_version: hax_types::HAX_VERSION.into(), }; haxmeta.write(&mut file, cache_map); } diff --git a/cli/subcommands/build.rs b/cli/subcommands/build.rs index b1a0402c2..68c88cd2e 100644 --- a/cli/subcommands/build.rs +++ b/cli/subcommands/build.rs @@ -14,7 +14,7 @@ fn rustc_version_env_var() { } fn json_schema_static_asset() { - let schema = schemars::schema_for!(( + let mut schema = schemars::schema_for!(( hax_frontend_exporter::Item, hax_types::cli_options::Options, hax_types::diagnostics::Diagnostics, @@ -25,6 +25,7 @@ fn json_schema_static_asset() { hax_types::engine_api::protocol::ToEngine, hax_lib_macros_types::AttrPayload, )); + schema.schema.metadata.get_or_insert_default().id = Some(hax_types::HAX_VERSION.into()); serde_json::to_writer( std::fs::File::create(format!("{}/schema.json", std::env::var("OUT_DIR").unwrap())) .unwrap(), diff --git a/cli/subcommands/src/cargo_hax.rs b/cli/subcommands/src/cargo_hax.rs index 2d2f91749..9a23c5dbd 100644 --- a/cli/subcommands/src/cargo_hax.rs +++ b/cli/subcommands/src/cargo_hax.rs @@ -240,6 +240,7 @@ fn run_engine( message_format: MessageFormat, ) -> bool { let engine_options = EngineOptions { + hax_version: haxmeta.hax_version, backend: backend.clone(), input: haxmeta.items, impl_infos: haxmeta.impl_infos, @@ -532,6 +533,7 @@ fn compute_haxmeta_files(options: &Options) -> (Vec, i32) { } else { 0 }; + (haxmeta_files, exit_code) } diff --git a/engine/backends/coq/coq/coq_backend.ml b/engine/backends/coq/coq/coq_backend.ml index 0e03029fc..c4d0426ba 100644 --- a/engine/backends/coq/coq/coq_backend.ml +++ b/engine/backends/coq/coq/coq_backend.ml @@ -407,14 +407,19 @@ struct method item'_Impl ~super ~generics ~self_ty ~of_trait ~items ~parent_bounds:_ ~safety:_ = let name, args = of_trait#v in - CoqNotation.instance - (name#p ^^ string "_" ^^ string (Int.to_string ([%hash: item] super))) - generics#p [] - (name#p ^^ concat_map (fun x -> space ^^ parens x#p) args) - (braces - (nest 2 - (concat_map (fun x -> break 1 ^^ name#p ^^ !^"_" ^^ x#p) items) - ^^ break 1)) + if Attrs.is_erased super.attrs then empty + else + CoqNotation.instance + (name#p ^^ string "_" + ^^ string (Int.to_string ([%hash: item] super))) + generics#p [] + (name#p ^^ concat_map (fun x -> space ^^ parens x#p) args) + (braces + (nest 2 + (concat_map + (fun x -> break 1 ^^ name#p ^^ !^"_" ^^ x#p) + items) + ^^ break 1)) method item'_NotImplementedYet = string "(* NotImplementedYet *)" diff --git a/engine/backends/fstar/fstar_ast.ml b/engine/backends/fstar/fstar_ast.ml index 0ead59bd5..bd8d3a5cd 100644 --- a/engine/backends/fstar/fstar_ast.ml +++ b/engine/backends/fstar/fstar_ast.ml @@ -9,6 +9,7 @@ module Ident = FStar_Ident let dummyRange = Range.dummyRange let id ident = Ident.mk_ident (ident, dummyRange) +let id_prime (ident : Ident.ident) = id (ident.idText ^ "'") let lid path = let init, last = List.(drop_last_exn path, last_exn path) in @@ -87,6 +88,7 @@ let mk_refined (x : string) (typ : AST.term) (phi : x:AST.term -> AST.term) = term @@ AST.Refine (x_bd, phi (term @@ AST.Var (lid_of_id x))) let type0_term = AST.Name (lid [ "Type0" ]) |> term +let eqtype_term = AST.Name (lid [ "eqtype" ]) |> term let parse_string f s = let open FStar_Parser_ParseIt in diff --git a/engine/backends/fstar/fstar_backend.ml b/engine/backends/fstar/fstar_backend.ml index f179ffe38..7ca56bcb6 100644 --- a/engine/backends/fstar/fstar_backend.ml +++ b/engine/backends/fstar/fstar_backend.ml @@ -111,6 +111,24 @@ module Make struct open Ctx + module StringToFStar = struct + let catch_parsing_error (type a b) kind span (f : a -> b) x = + try f x + with e -> + let kind = + Types.FStarParseError + { + fstar_snippet = ""; + details = + "While parsing a " ^ kind ^ ", error: " + ^ Base.Error.to_string_hum (Base.Error.of_exn e); + } + in + Error.raise { span; kind } + + let term span = catch_parsing_error "term" span F.term_of_string + end + let doc_to_string : PPrint.document -> string = FStar_Pprint.pretty_string 1.0 (Z.of_int ctx.line_width) @@ -401,8 +419,9 @@ struct let ppat = ppat' false in match p.p with | PWild -> F.wild - | PAscription { typ; pat } -> + | PAscription { typ; pat = { p = PBinding _; _ } as pat } -> F.pat @@ F.AST.PatAscribed (ppat pat, (pty p.span typ, None)) + | PAscription { pat; _ } -> ppat pat | PBinding { mut = Immutable; @@ -666,17 +685,17 @@ struct kind = UnsupportedMacro { id = [%show: global_ident] macro }; span = e.span; } - | Quote quote -> pquote e.span quote |> F.term_of_string + | Quote quote -> pquote e.span quote |> StringToFStar.term e.span | _ -> . (** Prints a `quote` to a string *) and pquote span { contents; _ } = List.map ~f:(function - | `Verbatim code -> code - | `Expr e -> pexpr e |> term_to_string - | `Pat p -> ppat p |> pat_to_string - | `Typ p -> pty span p |> term_to_string) + | Verbatim code -> code + | Expr e -> pexpr e |> term_to_string + | Pattern p -> ppat p |> pat_to_string + | Typ p -> pty span p |> term_to_string) contents |> String.concat @@ -874,6 +893,30 @@ struct | `VerbatimIntf of string * [ `NoNewline | `Newline ] | `Comment of string ] list = + let is_erased = Attrs.is_erased e.attrs in + let erased_impl name ty attrs binders = + let name' = F.id_prime name in + let pat = F.AST.PatVar (name, None, []) in + let term = F.term @@ F.AST.Var (F.lid_of_id @@ name') in + let pat, term = + match binders with + | [] -> (pat, term) + | _ -> + ( F.AST.PatApp + (F.pat pat, List.map ~f:FStarBinder.to_pattern binders), + List.fold_left binders ~init:term ~f:(fun term binder -> + let binder_term, binder_imp = + FStarBinder.to_qualified_term binder + in + F.term @@ F.AST.App (term, binder_term, binder_imp)) ) + in + [ + F.decl ~quals:[ Assumption ] ~fsti:false ~attrs + @@ F.AST.Assume (name', ty); + F.decl ~fsti:false + @@ F.AST.TopLevelLet (NoLetQualifier, [ (F.pat @@ pat, term) ]); + ] + in match e.v with | Alias { name; item } -> (* These should come from bundled items (in the case of cyclic module dependencies). @@ -904,10 +947,11 @@ struct F.decl ~fsti:false @@ F.AST.TopLevelLet (qualifier, [ (pat, pexpr body) ]) in - let interface_mode = ctx.interface_mode && not (List.is_empty params) in + let is_const = List.is_empty params in let ty = - add_clauses_effect_type ~no_tot_abbrev:interface_mode e.attrs - (pty body.span body.typ) + add_clauses_effect_type + ~no_tot_abbrev:(ctx.interface_mode && not is_const) + e.attrs (pty body.span body.typ) in let arrow_typ = F.term @@ -938,7 +982,13 @@ struct in let intf = F.decl ~fsti:true (F.AST.Val (name, arrow_typ)) in - if interface_mode then [ impl; intf ] else [ full ] + + let erased = erased_impl name arrow_typ [] generics in + let impl, full = + if is_erased then (erased, erased) else ([ impl ], [ full ]) + in + if ctx.interface_mode && ((not is_const) || is_erased) then intf :: impl + else full | TyAlias { name; generics; ty } -> let pat = F.pat @@ -971,31 +1021,20 @@ struct |> List.map ~f:to_pattern) ), ty ); ] ) - | Type { name; generics; _ } - when Attrs.find_unique_attr e.attrs - ~f:([%eq: Types.ha_payload] OpaqueType >> Fn.flip Option.some_if ()) - |> Option.is_some -> - if not ctx.interface_mode then - Error.raise - @@ { - kind = - AttributeRejected - { - reason = - "a type cannot be opaque if its module is not extracted \ - as an interface"; - }; - span = e.span; - } - else - let generics = FStarBinder.of_generics e.span generics in - let ty = F.type0_term in - let arrow_typ = - F.term - @@ F.AST.Product (List.map ~f:FStarBinder.to_binder generics, ty) - in - let name = F.id @@ U.Concrete_ident_view.to_definition_name name in - [ F.decl ~fsti:true (F.AST.Val (name, arrow_typ)) ] + | Type { name; generics; _ } when is_erased -> + let generics = + FStarBinder.of_generics e.span generics + |> List.map ~f:FStarBinder.implicit_to_explicit + in + let ty = F.eqtype_term in + let arrow_typ = + F.term + @@ F.AST.Product (List.map ~f:FStarBinder.to_binder generics, ty) + in + let name = F.id @@ U.Concrete_ident_view.to_definition_name name in + let erased = erased_impl name arrow_typ [] generics in + let intf = F.decl ~fsti:true (F.AST.Val (name, arrow_typ)) in + if ctx.interface_mode then intf :: erased else erased | Type { name; @@ -1475,15 +1514,20 @@ struct List.exists items ~f:(fun { ii_v; _ } -> match ii_v with IIType _ -> true | _ -> false) in - let impl_val = ctx.interface_mode && not has_type in let let_impl = F.AST.TopLevelLet (NoLetQualifier, [ (pat, body) ]) in - if impl_val then - let generics_binders = List.map ~f:FStarBinder.to_binder generics in - let val_type = F.term @@ F.AST.Product (generics_binders, typ) in - let v = F.AST.Val (name, val_type) in - (F.decls ~fsti:true ~attrs:[ tcinst ] @@ v) - @ F.decls ~fsti:false ~attrs:[ tcinst ] let_impl - else F.decls ~fsti:ctx.interface_mode ~attrs:[ tcinst ] let_impl + let generics_binders = List.map ~f:FStarBinder.to_binder generics in + let val_type = F.term @@ F.AST.Product (generics_binders, typ) in + let v = F.AST.Val (name, val_type) in + let intf = F.decls ~fsti:true ~attrs:[ tcinst ] v in + let impl = + if is_erased then erased_impl name val_type [ tcinst ] generics + else + F.decls + ~fsti:(ctx.interface_mode && has_type) + ~attrs:[ tcinst ] let_impl + in + let intf = if has_type && not is_erased then [] else intf in + if ctx.interface_mode then intf @ impl else impl | Quote { quote; _ } -> let fstar_opts = Attrs.find_unique_attr e.attrs ~f:(function @@ -1529,8 +1573,7 @@ let make (module M : Attrs.WITH_ITEMS) ctx = let ctx = ctx end) : S) -let strings_of_item ~signature_only (bo : BackendOptions.t) m items - (item : item) : +let strings_of_item (bo : BackendOptions.t) m items (item : item) : ([> `Impl of string | `Intf of string ] * [ `NoNewline | `Newline ]) list = let interface_mode' : Types.inclusion_kind = List.rev bo.interfaces @@ -1548,8 +1591,7 @@ let strings_of_item ~signature_only (bo : BackendOptions.t) m items |> Option.value ~default:(Types.Excluded : Types.inclusion_kind) in let interface_mode = - signature_only - || not ([%matches? (Types.Excluded : Types.inclusion_kind)] interface_mode') + not ([%matches? (Types.Excluded : Types.inclusion_kind)] interface_mode') in let (module Print) = make m @@ -1560,30 +1602,27 @@ let strings_of_item ~signature_only (bo : BackendOptions.t) m items line_width = bo.line_width; } in - let mk_impl = if interface_mode then fun i -> `Impl i else fun i -> `Impl i in + let mk_impl i = `Impl i in let mk_intf = if interface_mode then fun i -> `Intf i else fun i -> `Impl i in let no_impl = - signature_only - || [%matches? (Types.Included None' : Types.inclusion_kind)] interface_mode' + [%matches? (Types.Included None' : Types.inclusion_kind)] interface_mode' in Print.pitem item - |> List.filter ~f:(function `Impl _ when no_impl -> false | _ -> true) |> List.concat_map ~f:(function | `Impl i -> [ (mk_impl (Print.decl_to_string i), `Newline) ] | `Intf i -> [ (mk_intf (Print.decl_to_string i), `Newline) ] - | `VerbatimIntf (s, nl) -> - [ ((if interface_mode then `Intf s else `Impl s), nl) ] - | `VerbatimImpl (s, nl) -> - [ ((if interface_mode then `Impl s else `Impl s), nl) ] + | `VerbatimIntf (s, nl) -> [ (mk_intf s, nl) ] + | `VerbatimImpl (s, nl) -> [ (`Impl s, nl) ] | `Comment s -> let s = "(* " ^ s ^ " *)" in if interface_mode then [ (`Impl s, `Newline); (`Intf s, `Newline) ] else [ (`Impl s, `Newline) ]) + |> List.filter ~f:(function `Impl _, _ when no_impl -> false | _ -> true) type rec_prefix = NonRec | FirstMutRec | MutRec -let string_of_items ~signature_only ~mod_name ~bundles (bo : BackendOptions.t) m - items : string * string = +let string_of_items ~mod_name ~bundles (bo : BackendOptions.t) m items : + string * string = let collect_trait_goal_idents = object inherit [_] Visitors.reduce as super @@ -1656,7 +1695,7 @@ let string_of_items ~signature_only ~mod_name ~bundles (bo : BackendOptions.t) m List.concat_map ~f:(fun item -> let recursivity_prefix = get_recursivity_prefix item in - let strs = strings_of_item ~signature_only bo m items item in + let strs = strings_of_item bo m items item in match (recursivity_prefix, item.v) with | FirstMutRec, Fn _ -> replace_in_strs ~pattern:"let" ~with_:"let rec" strs @@ -1709,20 +1748,8 @@ let translate_as_fstar m (bo : BackendOptions.t) ~(bundles : AST.item list list) U.group_items_by_namespace items |> Map.to_alist |> List.concat_map ~f:(fun (ns, items) -> - let signature_only = - let is_dropped_body = - Concrete_ident.eq_name Rust_primitives__hax__dropped_body - in - let contains_dropped_body = - U.Reducers.collect_concrete_idents#visit_item () - >> Set.exists ~f:is_dropped_body - in - List.exists ~f:contains_dropped_body items - in let mod_name = module_name ns in - let impl, intf = - string_of_items ~signature_only ~mod_name ~bundles bo m items - in + let impl, intf = string_of_items ~mod_name ~bundles bo m items in let make ~ext body = if String.is_empty body then None else diff --git a/engine/backends/proverif/proverif_backend.ml b/engine/backends/proverif/proverif_backend.ml index 3b1c6e56b..95778622b 100644 --- a/engine/backends/proverif/proverif_backend.ml +++ b/engine/backends/proverif/proverif_backend.ml @@ -630,8 +630,7 @@ module Make (Options : OPTS) : MAKE = struct if Attrs.find_unique_attr item.attrs ~f: - ([%eq: Types.ha_payload] OpaqueType - >> Fn.flip Option.some_if ()) + ([%eq: Types.ha_payload] Erased >> Fn.flip Option.some_if ()) |> Option.is_some then default_lines else default_lines ^^ destructor_lines diff --git a/engine/bin/lib.ml b/engine/bin/lib.ml index 944089d01..71f302bf8 100644 --- a/engine/bin/lib.ml +++ b/engine/bin/lib.ml @@ -36,14 +36,14 @@ let import_thir_items (include_clauses : Types.inclusion_clause list) include_clauses |> List.last in - let drop_body = + let type_only = (* Shall we drop the body? *) Option.map ~f:(fun clause -> [%matches? Types.SignatureOnly] clause.kind) most_precise_clause |> Option.value ~default:false in - Import_thir.import_item ~drop_body item) + Import_thir.import_item ~type_only item) items |> List.map ~f:snd in @@ -186,10 +186,25 @@ let parse_options () = let table, json = Hax_io.read_json () |> Option.value_exn |> parse_id_table_node in + let version = + try Yojson.Safe.Util.(member "hax_version" json |> to_string) + with _ -> "unknown" + in + if String.equal version Types.hax_version |> not then ( + prerr_endline + [%string + {| +The versions of `hax-engine` and of `cargo-hax` are different: + - `hax-engine` version: %{Types.hax_version} + - `cargo-hax` version: %{version} + +Please reinstall hax. +|}]; + Stdlib.exit 1); table |> List.iter ~f:(fun (id, json) -> Hashtbl.add_exn Types.cache_map ~key:id ~data:(`JSON json)); - let options = Types.parse_engine_options json in + let options = [%of_yojson: Types.engine_options] json in Profiling.enabled := options.backend.profile; options diff --git a/engine/dune-project b/engine/dune-project index 11531cc1b..5769b572c 100644 --- a/engine/dune-project +++ b/engine/dune-project @@ -2,7 +2,7 @@ (name hax-engine) -(version 0.1.0-alpha.1) +(version 0.1.0-rc.1) (generate_opam_files true) diff --git a/engine/hax-engine.opam b/engine/hax-engine.opam index a1c46c487..e317a03f5 100644 --- a/engine/hax-engine.opam +++ b/engine/hax-engine.opam @@ -1,6 +1,6 @@ # This file is generated by dune, edit dune-project instead opam-version: "2.0" -version: "0.1.0-alpha.1" +version: "0.1.0-rc.1" synopsis: "The engine of hax, a Rust verification tool" description: "Hax is divided in two: a frontend (written in Rust) and an engine (written in OCaml). This is the engine." diff --git a/engine/lib/ast.ml b/engine/lib/ast.ml index 7b9564869..fbc798cb2 100644 --- a/engine/lib/ast.ml +++ b/engine/lib/ast.ml @@ -328,12 +328,13 @@ functor interleaved with Rust code *) and expr = { e : expr'; span : span; typ : ty } + and quote = { contents : quote_content list; witness : F.quote } - and quote = { - contents : - [ `Verbatim of string | `Expr of expr | `Pat of pat | `Typ of ty ] list; - witness : F.quote; - } + and quote_content = + | Verbatim of string + | Expr of expr + | Pattern of pat + | Typ of ty and supported_monads = | MException of ty diff --git a/engine/lib/ast_builder.ml b/engine/lib/ast_builder.ml index a092f9225..926fe262b 100644 --- a/engine/lib/ast_builder.ml +++ b/engine/lib/ast_builder.ml @@ -210,7 +210,8 @@ module Make (F : Features.T) = struct end module type S = module type of Make0 (struct - let span = failwith "dummy" + (* This [failwith] is OK: this module is never actually used for computation. It is useful only for typing. *) + let span = failwith "type only module: this will never be computed" end) module Make (Span : sig diff --git a/engine/lib/ast_utils.ml b/engine/lib/ast_utils.ml index 1c149404f..899221e21 100644 --- a/engine/lib/ast_utils.ml +++ b/engine/lib/ast_utils.ml @@ -226,9 +226,12 @@ module Make (F : Features.T) = struct ((enabled, s) : bool * (string, string) Hashtbl.t) gc = match gc with | GCType { goal; name } when enabled -> - let data = "i" ^ Int.to_string (Hashtbl.length s) in - let _ = Hashtbl.add s ~key:name ~data in - GCType { goal; name = data } + let new_name = + Hashtbl.find_or_add s name ~default:(fun () -> + "i" ^ Int.to_string (Hashtbl.length s)) + in + let goal = super#visit_trait_goal (enabled, s) goal in + GCType { goal; name = new_name } | _ -> super#visit_generic_constraint (enabled, s) gc method! visit_trait_item (_, s) = super#visit_trait_item (true, s) @@ -365,6 +368,10 @@ module Make (F : Features.T) = struct bounds_impls; }; } + (* Match scrutinees need to be ascribed as well + (see https://github.com/hacspec/hax/issues/1207).*) + | Match { scrutinee; arms } -> + { e with e = Match { scrutinee = ascribe scrutinee; arms } } | _ -> (* Ascribe the return type of a function application & constructors *) if (ascribe_app && is_app e.e) || [%matches? Construct _] e.e @@ -653,6 +660,13 @@ module Make (F : Features.T) = struct self#zero | _ -> super#visit_expr' () e end + + let collect_attrs = + object (_self) + inherit [_] Visitors.reduce + inherit [_] expr_list_monoid + method! visit_attrs () attrs = attrs + end end (** Produces a local identifier which is locally fresh **with respect @@ -950,8 +964,11 @@ module Make (F : Features.T) = struct module Debug : sig val expr : ?label:string -> AST.expr -> unit (** Prints an expression pretty-printed as Rust, with its full - AST encoded as JSON, available as a file, so that one can - `jless` or `jq` into it. *) + AST encoded as JSON, available as a file, so that one can + `jless` or `jq` into it. *) + + val item' : ?label:string -> AST.item -> string + val item : ?label:string -> AST.item -> unit end = struct let expr ?(label = "") (e : AST.expr) : unit = let path = tempfile_path ~suffix:".json" in @@ -962,6 +979,18 @@ module Make (F : Features.T) = struct ^ "\n```\x1b[34m JSON-encoded AST available at \x1b[1m" ^ path ^ "\x1b[0m (hint: use `jless " ^ path ^ "`)" |> Stdio.prerr_endline + + let item' ?(label = "") (e : AST.item) : string = + let path = tempfile_path ~suffix:".json" in + Core.Out_channel.write_all path + ~data:([%yojson_of: AST.item] e |> Yojson.Safe.pretty_to_string); + let e = LiftToFullAst.item e in + "```rust " ^ label ^ "\n" ^ Print_rust.pitem_str e + ^ "\n```\x1b[34m JSON-encoded AST available at \x1b[1m" ^ path + ^ "\x1b[0m (hint: use `jless " ^ path ^ "`)" + + let item ?(label = "") (e : AST.item) = + item' ~label e |> Stdio.prerr_endline end let unbox_expr' (next : expr -> expr) (e : expr) : expr = diff --git a/engine/lib/attr_payloads.ml b/engine/lib/attr_payloads.ml index 5cce36a06..c3c330c64 100644 --- a/engine/lib/attr_payloads.ml +++ b/engine/lib/attr_payloads.ml @@ -6,7 +6,18 @@ let payloads : attrs -> (Types.ha_payload * span) list = let parse = (* we have to parse ["JSON"]: first a string, then a ha_payload *) function - | `String s -> Yojson.Safe.from_string s |> Types.parse_ha_payload + | `String s -> ( + match Yojson.Safe.from_string s |> Types.safe_ha_payload_of_yojson with + | Error _ -> + Stdlib.prerr_endline + [%string + {| +The hax engine could not parse a hax attribute. +This means that the crate being extracted and the version of hax engine are incompatible. +Please make sure the `hax-lib` dependency of the extracted crate matches hax-engine's version (%{Types.hax_version}). +|}]; + Stdlib.exit 1 + | Ok value -> value) | x -> Stdlib.failwith @@ "Attr_payloads: payloads: expected a string while parsing JSON, got " @@ -23,7 +34,7 @@ let payloads : attrs -> (Types.ha_payload * span) list = (** Create a attribute out of a [payload] *) let to_attr (payload : Types.ha_payload) (span : span) : attr = let json = - `String (Yojson.Safe.to_string (Types.to_json_ha_payload payload)) + `String (Yojson.Safe.to_string ([%yojson_of: Types.ha_payload] payload)) in let kind : attr_kind = Tool { path = "_hax::json"; tokens = Yojson.Safe.to_string json } @@ -108,6 +119,11 @@ module MakeBase (Error : Phase_utils.ERROR) = struct let late_skip : attrs -> bool = status >> [%matches? Types.Included { late_skip = true }] + let is_erased : attrs -> bool = + find_unique_attr + ~f:([%eq: Types.ha_payload] Erased >> Fn.flip Option.some_if ()) + >> Option.is_some + let uid : attrs -> UId.t option = let f = function Types.Uid uid -> Some (UId.of_raw uid) | _ -> None in find_unique_attr ~f diff --git a/engine/lib/concrete_ident/concrete_ident.ml b/engine/lib/concrete_ident/concrete_ident.ml index b0d0cc160..75505210d 100644 --- a/engine/lib/concrete_ident/concrete_ident.ml +++ b/engine/lib/concrete_ident/concrete_ident.ml @@ -596,17 +596,16 @@ module Create = struct let len x = List.length x.def_id.path in let compare x y = len x - len y in let id = List.min_elt ~compare from |> Option.value_exn in - let parent = parent id in { kind = Kind.Value; def_id = { - parent.def_id with + id.def_id with path = - parent.def_id.path + id.def_id.path @ [ { - data = TypeNs "rec_bundle"; + data = TypeNs "cyclic_bundle"; disambiguator = [%hash: t list] from; }; ]; @@ -635,6 +634,17 @@ module Create = struct let constructor name = let path = name.def_id.path @ [ { data = Ctor; disambiguator = 0 } ] in { name with def_id = { name.def_id with path } } + + let add_disambiguator name disambiguator = + let path = name.def_id.path in + if List.is_empty path then name + else + (* The following two `exn` function calls cannot fail as the path is not empty. *) + let last = List.last_exn path in + let path = + List.drop_last_exn path @ [ { data = last.data; disambiguator } ] + in + { name with def_id = { name.def_id with path } } end let lookup_raw_impl_info (impl : t) : Types.impl_infos option = diff --git a/engine/lib/concrete_ident/concrete_ident.mli b/engine/lib/concrete_ident/concrete_ident.mli index e87f71b22..545a23da3 100644 --- a/engine/lib/concrete_ident/concrete_ident.mli +++ b/engine/lib/concrete_ident/concrete_ident.mli @@ -27,6 +27,7 @@ val eq_name : name -> t -> bool val to_debug_string : t -> string module Create : sig + val parent : t -> t val fresh_module : from:t list -> t val move_under : new_parent:t -> t -> t @@ -37,6 +38,10 @@ module Create : sig val map_last : f:(string -> string) -> t -> t (** [map_last f ident] applies [f] on the last chunk of [ident]'s path if it holds a string *) + + val add_disambiguator : t -> int -> t + (** [add_disambiguator ident d] changes the disambiguator on + the last chunk of [ident]'s path to [d] *) end type view = { crate : string; path : string list; definition : string } diff --git a/engine/lib/dependencies.ml b/engine/lib/dependencies.ml index e46fcb555..19e5ff1ad 100644 --- a/engine/lib/dependencies.ml +++ b/engine/lib/dependencies.ml @@ -88,8 +88,9 @@ module Make (F : Features.T) = struct = List.concat_map ~f:(fun i -> + let attrs = U.Reducers.collect_attrs#visit_item () i in let assoc = - uid_associated_items i.attrs |> List.map ~f:(fun i -> i.ident) + uid_associated_items attrs |> List.map ~f:(fun i -> i.ident) in vertices_of_item i @ assoc |> List.map ~f:(Fn.const i.ident &&& Fn.id)) items @@ -413,8 +414,12 @@ module Make (F : Features.T) = struct in let transform (bundle : item list) = + let module_names = + List.map ~f:(ident_of >> Concrete_ident.Create.parent) bundle + |> List.dedup_and_sort ~compare:Concrete_ident.compare + in let ns : Concrete_ident.t = - Concrete_ident.Create.fresh_module ~from:(List.map ~f:ident_of bundle) + Concrete_ident.Create.fresh_module ~from:module_names in let new_name_under_ns : Concrete_ident.t -> Concrete_ident.t = Concrete_ident.Create.move_under ~new_parent:ns @@ -431,10 +436,7 @@ module Make (F : Features.T) = struct (List.mem duplicates (new_name_under_ns id) ~equal:Concrete_ident.equal) then id - else - Concrete_ident.Create.map_last - ~f:(fun name -> name ^ (Concrete_ident.hash id |> Int.to_string)) - id + else Concrete_ident.Create.add_disambiguator id (Concrete_ident.hash id) in let renamings = List.map @@ -465,9 +467,19 @@ module Make (F : Features.T) = struct ]) in let renamings = - Map.of_alist_exn - (module Concrete_ident) - (renamings @ variant_and_constructors_renamings) + match + Map.of_alist + (module Concrete_ident) + (renamings @ variant_and_constructors_renamings) + with + | `Duplicate_key dup -> + failwith + [%string + "Fatal error: in dependency analysis, we construct a renaming \ + key-value list with a guarantee of unicity in keys. However, \ + we found the following key twice:\n\ + %{[%show: concrete_ident] dup}"] + | `Ok value -> value in let rename = let renamer _lvl i = Map.find renamings i |> Option.value ~default:i in @@ -493,7 +505,15 @@ module Make (F : Features.T) = struct include Comparable.Make (T) end in let bundle_of_item = - Hashtbl.of_alist_exn (module ComparableItem) bundle_transforms + match Hashtbl.of_alist (module ComparableItem) bundle_transforms with + | `Duplicate_key dup -> + failwith + [%string + "Fatal error: in dependency analysis, [bundles_transforms] is \ + expected to be a key-value list with a guarantee of unicity in \ + keys. However, we found the following key (an item) twice:\n\ + %{U.Debug.item' dup}"] + | `Ok value -> value in let maybe_transform_item item = match Hashtbl.find bundle_of_item item with diff --git a/engine/lib/deprecated_generic_printer/deprecated_generic_printer.ml b/engine/lib/deprecated_generic_printer/deprecated_generic_printer.ml index 3b1c190aa..37fe9bc4c 100644 --- a/engine/lib/deprecated_generic_printer/deprecated_generic_printer.ml +++ b/engine/lib/deprecated_generic_printer/deprecated_generic_printer.ml @@ -234,10 +234,10 @@ module Make (F : Features.T) (View : Concrete_ident.VIEW_API) = struct method quote { contents; _ } = List.map ~f:(function - | `Verbatim code -> string code - | `Expr e -> print#expr_at Expr_Quote e - | `Pat p -> print#pat_at Expr_Quote p - | `Typ p -> print#ty_at Expr_Quote p) + | Verbatim code -> string code + | Expr e -> print#expr_at Expr_Quote e + | Pattern p -> print#pat_at Expr_Quote p + | Typ p -> print#ty_at Expr_Quote p) contents |> concat diff --git a/engine/lib/generic_printer/generic_printer.ml b/engine/lib/generic_printer/generic_printer.ml index 811051c0e..81a5c793f 100644 --- a/engine/lib/generic_printer/generic_printer.ml +++ b/engine/lib/generic_printer/generic_printer.ml @@ -231,15 +231,13 @@ module Make (F : Features.T) = struct [local] is true, otherwise it prints the full path, separated by `module_path_separator`. *) - method quote (quote : quote) : document = - List.map - ~f:(function - | `Verbatim code -> string code - | `Expr e -> self#print_expr AstPosition_Quote e - | `Pat p -> self#print_pat AstPosition_Quote p - | `Typ p -> self#print_ty AstPosition_Quote p) - quote.contents - |> concat + method quote ~contents ~witness:_ : document = + List.map ~f:(fun doc -> doc#p) contents |> concat + + method quote_content_Verbatim v = string v + method quote_content_Expr e = e#p + method quote_content_Pattern p = p#p + method quote_content_Typ t = t#p (** {2:specialize-expr Specialized printers for [expr]} *) @@ -629,10 +627,6 @@ module Make (F : Features.T) = struct ^ "]")) ast_position id - method _do_not_override_lazy_of_quote ast_position (value : quote) - : quote lazy_doc = - lazy_doc (fun (value : quote) -> self#quote value) ast_position value - method! _do_not_override_lazy_of_item ast_position (value : item) : item lazy_doc = let module View = (val concrete_ident_view) in diff --git a/engine/lib/generic_printer/generic_printer_template.ml b/engine/lib/generic_printer/generic_printer_template.ml index 1bcd61085..60c76d2a4 100644 --- a/engine/lib/generic_printer/generic_printer_template.ml +++ b/engine/lib/generic_printer/generic_printer_template.ml @@ -35,6 +35,11 @@ struct method borrow_kind_Mut _x1 = default_document_for "borrow_kind_Mut" method borrow_kind_Shared = default_document_for "borrow_kind_Shared" method borrow_kind_Unique = default_document_for "borrow_kind_Unique" + method cf_kind_BreakOnly = default_document_for "cf_kind_BreakOnly" + + method cf_kind_BreakOrReturn = + default_document_for "cf_kind_BreakOrReturn" + method common_array _x1 = default_document_for "common_array" method dyn_trait_goal ~trait:_ ~non_self_args:_ = @@ -124,10 +129,6 @@ struct method expr'_Return ~super:_ ~e:_ ~witness:_ = default_document_for "expr'_Return" - method cf_kind_BreakOrReturn = - default_document_for "cf_kind_BreakOrReturn" - - method cf_kind_BreakOnly = default_document_for "cf_kind_BreakOnly" method field_pat ~field:_ ~pat:_ = default_document_for "field_pat" method generic_constraint_GCLifetime _x1 _x2 = diff --git a/engine/lib/hax_io.ml b/engine/lib/hax_io.ml index 0038375be..d0e836e32 100644 --- a/engine/lib/hax_io.ml +++ b/engine/lib/hax_io.ml @@ -36,10 +36,10 @@ include ( end) let read () : Types.to_engine = - read_json () |> Option.value_exn |> Types.parse_to_engine + read_json () |> Option.value_exn |> [%of_yojson: Types.to_engine] let write (msg : Types.from_engine) : unit = - Types.to_json_from_engine msg |> write_json + [%yojson_of: Types.from_engine] msg |> write_json let close () : unit = write Exit; diff --git a/engine/lib/import_thir.ml b/engine/lib/import_thir.ml index f1f2550db..356b8eb07 100644 --- a/engine/lib/import_thir.ml +++ b/engine/lib/import_thir.ml @@ -24,7 +24,7 @@ let unimplemented ~issue_id (span : Thir.span list) (details : string) = let kind = T.Unimplemented { - issue_id = Some (MyInt64.of_int_exn issue_id); + issue_id = Some (MyInt64.of_int issue_id); details = String.(if details = "" then None else Some details); } in @@ -676,7 +676,12 @@ end) : EXPR = struct LocalVar { name = id.name; - id = Local_ident.mk_id Cnst (MyInt64.to_int_exn id.index); + id = + Local_ident.mk_id Cnst + (MyInt64.to_int id.index + |> Option.value_or_thunk ~default:(fun _ -> + assertion_failure [ e.span ] + "Expected const id to fit in an OCaml native int")); } | Repeat { value; count } -> let value = c_expr value in @@ -999,8 +1004,8 @@ end) : EXPR = struct | Float k -> TFloat (match k with F16 -> F16 | F32 -> F32 | F64 -> F64 | F128 -> F128) - | Arrow value -> - let ({ inputs; output; _ } : Thir.ty_fn_sig) = value.value in + | Arrow signature | Closure (_, { untupled_sig = signature; _ }) -> + let ({ inputs; output; _ } : Thir.ty_fn_sig) = signature.value in let inputs = if List.is_empty inputs then [ U.unit_typ ] else List.map ~f:(c_ty span) inputs @@ -1038,7 +1043,16 @@ end) : EXPR = struct assertion_failure [ span ] "Ty::Alias with AliasTyKind::Weak" | Param { index; name } -> (* TODO: [id] might not unique *) - TParam { name; id = Local_ident.mk_id Typ (MyInt64.to_int_exn index) } + TParam + { + name; + id = + Local_ident.mk_id Typ + (MyInt64.to_int index + |> Option.value_or_thunk ~default:(fun _ -> + assertion_failure [ span ] + "Expected param id to fit in an OCaml native int")); + } | Error -> assertion_failure [ span ] "got type `Error`: Rust compilation probably failed." @@ -1172,7 +1186,11 @@ end) : EXPR = struct { typ_span = Option.map ~f:Span.of_thir param.ty_span; typ = c_ty (Option.value ~default:span param.ty_span) param.ty; - pat = c_pat (Option.value_exn param.pat); + pat = + c_pat + (Option.value_or_thunk param.pat ~default:(fun _ -> + assertion_failure [ span ] + "c_param: expected param.pat to be non-empty")); attrs = c_attrs param.attributes; } @@ -1330,17 +1348,9 @@ let is_automatically_derived (attrs : Thir.attribute list) = | _ -> false) attrs -let is_hax_skip (attrs : Thir.attribute list) = - List.exists - ~f:(function - | { kind = Normal { item = { path; _ }; _ }; _ } -> - String.equal path "_hax::skip" - | _ -> false) - attrs - let should_skip (attrs : Thir.item_attributes) = let attrs = attrs.attributes @ attrs.parent_attributes in - is_hax_skip attrs || is_automatically_derived attrs + is_automatically_derived attrs (** Converts a generic parameter to a generic value. This assumes the parameter is bound. *) @@ -1440,354 +1450,409 @@ let cast_of_enum typ_name generics typ thir_span in { v; span; ident; attrs = [] } -let rec c_item ~ident ~drop_body (item : Thir.item) : item list = +let rec c_item ~ident ~type_only (item : Thir.item) : item list = try Span.with_owner_hint item.owner_id (fun _ -> - c_item_unwrapped ~ident ~drop_body item) + c_item_unwrapped ~ident ~type_only item) with Diagnostics.SpanFreeError.Exn payload -> let context, kind = Diagnostics.SpanFreeError.payload payload in let error = Diagnostics.pretty_print_context_kind context kind in let span = Span.of_thir item.span in [ make_hax_error_item span ident error ] -and c_item_unwrapped ~ident ~drop_body (item : Thir.item) : item list = +and c_item_unwrapped ~ident ~type_only (item : Thir.item) : item list = let open (val make ~krate:item.owner_id.contents.value.krate : EXPR) in - if should_skip item.attributes then [] - else - let span = Span.of_thir item.span in - let attrs = c_item_attrs item.attributes in - let mk_one v = { span; v; ident; attrs } in - let mk v = [ mk_one v ] in - let drop_body = - drop_body - && Attr_payloads.payloads attrs - |> List.exists - ~f:(fst >> [%matches? (NeverDropBody : Types.ha_payload)]) - |> not - in - let c_body = if drop_body then c_expr_drop_body else c_expr in - (* TODO: things might be unnamed (e.g. constants) *) - match (item.kind : Thir.item_kind) with - | Const (_, generics, body) -> - mk - @@ Fn - { - name = - Concrete_ident.of_def_id Value (Option.value_exn item.def_id); - generics = c_generics generics; - body = c_expr body; - params = []; - safety = Safe; - } - | TyAlias (ty, generics) -> - mk - @@ TyAlias - { - name = - Concrete_ident.of_def_id Type (Option.value_exn item.def_id); - generics = c_generics generics; - ty = c_ty item.span ty; - } - | Fn (generics, { body; params; header = { safety; _ }; _ }) -> - mk - @@ Fn - { - name = - Concrete_ident.of_def_id Value (Option.value_exn item.def_id); - generics = c_generics generics; - body = c_body body; - params = c_fn_params item.span params; - safety = csafety safety; - } - | Enum (variants, generics, repr) -> - let def_id = Option.value_exn item.def_id in - let generics = c_generics generics in - let is_struct = false in - let kind = Concrete_ident.Kind.Constructor { is_struct } in - let discs = - (* Each variant might introduce a anonymous constant defining its discriminant integer *) - List.filter_map ~f:(fun v -> v.disr_expr) variants - |> List.map ~f:(fun Types.{ def_id; body; _ } -> - let name = Concrete_ident.of_def_id kind def_id in - let generics = { params = []; constraints = [] } in - let body = c_expr body in - { - v = Fn { name; generics; body; params = []; safety = Safe }; - span; - ident = name; - attrs = []; - }) - in - let is_primitive = - List.for_all - ~f:(fun { data; _ } -> - match data with - | Unit _ | Tuple ([], _, _) | Struct { fields = []; _ } -> true - | _ -> false) - variants - in - let variants = - List.map - ~f:(fun - ({ data; def_id = variant_id; attributes; _ } as original) -> - let is_record = - [%matches? (Struct { fields = _ :: _; _ } : Types.variant_data)] - data - in - let name = Concrete_ident.of_def_id kind variant_id in - let arguments = - match data with - | Tuple (fields, _, _) | Struct { fields; _ } -> - List.map - ~f:(fun { def_id = id; ty; span; attributes; _ } -> - ( Concrete_ident.of_def_id Field id, - c_ty span ty, - c_attrs attributes )) - fields - | Unit _ -> [] - in - let attrs = c_attrs attributes in - ({ name; arguments; is_record; attrs }, original)) - variants - in - let name = Concrete_ident.of_def_id Type def_id in - let cast_fun = - cast_of_enum name generics (c_ty item.span repr.typ) item.span - variants - in - let variants, _ = List.unzip variants in - let result = - mk_one (Type { name; generics; variants; is_struct }) :: discs - in - if is_primitive then cast_fun :: result else result - | Struct (v, generics) -> - let generics = c_generics generics in - let def_id = Option.value_exn item.def_id in - let is_struct = true in - (* repeating the attributes of the item in the variant: TODO is that ok? *) - let v = - let kind = Concrete_ident.Kind.Constructor { is_struct } in - let name = Concrete_ident.of_def_id kind def_id in - let name = Concrete_ident.Create.move_under name ~new_parent:name in - let mk fields is_record = + let span = Span.of_thir item.span in + let attrs = c_item_attrs item.attributes in + (* this is true if the user explicilty requested to erase using the `opaque` macro *) + let erased_by_user attrs = + Attr_payloads.payloads attrs + |> List.exists ~f:(fst >> [%matches? (Erased : Types.ha_payload)]) + in + let item_erased_by_user = erased_by_user attrs in + let type_only = + type_only + && Attr_payloads.payloads attrs + |> List.exists ~f:(fst >> [%matches? (NeverErased : Types.ha_payload)]) + |> not + in + (* This is true if the item should be erased because we are in type-only mode + (Only certain kinds of items are erased in this case). *) + let erased_by_hax = + should_skip item.attributes + || type_only + && + match item.kind with + | Fn _ | Static _ -> true + | Impl { of_trait = Some _; items; _ } + when List.exists items ~f:(fun item -> + match item.kind with Type _ -> true | _ -> false) + |> not -> + true + | _ -> false + in + (* If the item is erased by hax we need to add the Erased attribute. + It is already present if the item is erased by user. *) + let attrs_with_erased erased_by_hax erased_by_user attrs = + if erased_by_hax && not erased_by_user then + Attr_payloads.to_attr Erased span :: attrs + else attrs + in + let attrs = attrs_with_erased erased_by_hax item_erased_by_user attrs in + let erased = item_erased_by_user || erased_by_hax in + + let mk_one v = { span; v; ident; attrs } in + let mk v = [ mk_one v ] in + let drop_body = + erased + && Attr_payloads.payloads attrs + |> List.exists ~f:(fst >> [%matches? (NeverErased : Types.ha_payload)]) + |> not + in + let c_body = if drop_body then c_expr_drop_body else c_expr in + let assert_item_def_id () = + Option.value_or_thunk item.def_id ~default:(fun _ -> + assertion_failure [ item.span ] "Expected this item to have a `def_id`") + in + (* TODO: things might be unnamed (e.g. constants) *) + match (item.kind : Thir.item_kind) with + | Const (_, generics, body) -> + mk + @@ Fn + { + name = Concrete_ident.of_def_id Value (assert_item_def_id ()); + generics = c_generics generics; + body = c_body body; + params = []; + safety = Safe; + } + | TyAlias (ty, generics) -> + mk + @@ TyAlias + { + name = Concrete_ident.of_def_id Type (assert_item_def_id ()); + generics = c_generics generics; + ty = c_ty item.span ty; + } + | Fn (generics, { body; params; header = { safety; _ }; _ }) -> + mk + @@ Fn + { + name = Concrete_ident.of_def_id Value (assert_item_def_id ()); + generics = c_generics generics; + body = c_body body; + params = c_fn_params item.span params; + safety = csafety safety; + } + | (Enum (_, generics, _) | Struct (_, generics)) when erased -> + let generics = c_generics generics in + let is_struct = match item.kind with Struct _ -> true | _ -> false in + let def_id = assert_item_def_id () in + let name = Concrete_ident.of_def_id Type def_id in + mk @@ Type { name; generics; variants = []; is_struct } + | Enum (variants, generics, repr) -> + let def_id = assert_item_def_id () in + let generics = c_generics generics in + let is_struct = false in + let kind = Concrete_ident.Kind.Constructor { is_struct } in + let discs = + (* Each variant might introduce a anonymous constant defining its discriminant integer *) + List.filter_map ~f:(fun v -> v.disr_expr) variants + |> List.map ~f:(fun Types.{ def_id; body; _ } -> + let name = Concrete_ident.of_def_id kind def_id in + let generics = { params = []; constraints = [] } in + let body = c_expr body in + { + v = Fn { name; generics; body; params = []; safety = Safe }; + span; + ident = name; + attrs = []; + }) + in + let is_primitive = + List.for_all + ~f:(fun { data; _ } -> + match data with + | Unit _ | Tuple ([], _, _) | Struct { fields = []; _ } -> true + | _ -> false) + variants + in + let variants = + List.map + ~f:(fun ({ data; def_id = variant_id; attributes; _ } as original) -> + let is_record = + [%matches? (Struct { fields = _ :: _; _ } : Types.variant_data)] + data + in + let name = Concrete_ident.of_def_id kind variant_id in let arguments = - List.map - ~f:(fun Thir.{ def_id = id; ty; span; attributes; _ } -> - ( Concrete_ident.of_def_id Field id, - c_ty span ty, - c_attrs attributes )) - fields + match data with + | Tuple (fields, _, _) | Struct { fields; _ } -> + List.map + ~f:(fun { def_id = id; ty; span; attributes; _ } -> + ( Concrete_ident.of_def_id Field id, + c_ty span ty, + c_attrs attributes )) + fields + | Unit _ -> [] in - { name; arguments; is_record; attrs } + let attrs = c_attrs attributes in + ({ name; arguments; is_record; attrs }, original)) + variants + in + let name = Concrete_ident.of_def_id Type def_id in + let cast_fun = + cast_of_enum name generics (c_ty item.span repr.typ) item.span variants + in + let variants, _ = List.unzip variants in + let result = + mk_one (Type { name; generics; variants; is_struct }) :: discs + in + if is_primitive then cast_fun :: result else result + | Struct (v, generics) -> + let generics = c_generics generics in + let def_id = assert_item_def_id () in + let is_struct = true in + (* repeating the attributes of the item in the variant: TODO is that ok? *) + let v = + let kind = Concrete_ident.Kind.Constructor { is_struct } in + let name = Concrete_ident.of_def_id kind def_id in + let name = Concrete_ident.Create.move_under name ~new_parent:name in + let mk fields is_record = + let arguments = + List.map + ~f:(fun Thir.{ def_id = id; ty; span; attributes; _ } -> + ( Concrete_ident.of_def_id Field id, + c_ty span ty, + c_attrs attributes )) + fields in - match v with - | Tuple (fields, _, _) -> mk fields false - | Struct { fields = _ :: _ as fields; _ } -> mk fields true - | _ -> { name; arguments = []; is_record = false; attrs } - in - let variants = [ v ] in - let name = Concrete_ident.of_def_id Type def_id in - mk @@ Type { name; generics; variants; is_struct } - | MacroInvokation { macro_ident; argument; span } -> - mk - @@ IMacroInvokation - { - macro = Concrete_ident.of_def_id Macro macro_ident; - argument; - span = Span.of_thir span; - witness = W.macro; - } - | Trait (No, safety, generics, _bounds, items) -> - let items = - List.filter - ~f:(fun { attributes; _ } -> not (should_skip attributes)) - items - in - let name = - Concrete_ident.of_def_id Trait (Option.value_exn item.def_id) + { name; arguments; is_record; attrs } in - let { params; constraints } = c_generics generics in - let self = - let id = Local_ident.mk_id Typ 0 (* todo *) in - let ident = Local_ident.{ name = "Self"; id } in - { ident; span; attrs = []; kind = GPType } - in - let params = self :: params in - let generics = { params; constraints } in - let items = List.map ~f:c_trait_item items in - let safety = csafety safety in - mk @@ Trait { name; generics; items; safety } - | Trait (Yes, _, _, _, _) -> - unimplemented ~issue_id:930 [ item.span ] "Auto trait" - | Impl { of_trait = None; generics; items; _ } -> - let items = - List.filter - ~f:(fun { attributes; _ } -> not (should_skip attributes)) - items - in - List.map - ~f:(fun (item : Thir.impl_item) -> - let item_def_id = Concrete_ident.of_def_id Impl item.owner_id in - - let v = - match (item.kind : Thir.impl_item_kind) with - | Fn { body; params; header = { safety; _ }; _ } -> - let params = - if List.is_empty params then [ U.make_unit_param span ] - else List.map ~f:(c_param item.span) params - in - Fn - { - name = item_def_id; - generics = - U.concat_generics (c_generics generics) - (c_generics item.generics); - body = c_body body; - params; - safety = csafety safety; - } - | Const (_ty, e) -> - Fn - { - name = item_def_id; - generics = c_generics generics; - (* does that make sense? can we have `const`? *) - body = c_body e; - params = []; - safety = Safe; - } - | Type _ty -> - assertion_failure [ item.span ] - "Inherent implementations are not supposed to have \ - associated types \ - (https://doc.rust-lang.org/reference/items/implementations.html#inherent-implementations)." - in - let ident = Concrete_ident.of_def_id Value item.owner_id in - let attrs = c_item_attrs item.attributes in - { span = Span.of_thir item.span; v; ident; attrs }) + match v with + | Tuple (fields, _, _) -> mk fields false + | Struct { fields = _ :: _ as fields; _ } -> mk fields true + | _ -> { name; arguments = []; is_record = false; attrs } + in + let variants = [ v ] in + let name = Concrete_ident.of_def_id Type def_id in + mk @@ Type { name; generics; variants; is_struct } + | MacroInvokation { macro_ident; argument; span } -> + mk + @@ IMacroInvokation + { + macro = Concrete_ident.of_def_id Macro macro_ident; + argument; + span = Span.of_thir span; + witness = W.macro; + } + | Trait (No, safety, generics, _bounds, items) -> + let items = + List.filter + ~f:(fun { attributes; _ } -> not (should_skip attributes)) items - | Impl - { - of_trait = Some of_trait; - generics; - self_ty; - items; - safety; - parent_bounds; - _; - } -> - let items = - List.filter - ~f:(fun { attributes; _ } -> not (should_skip attributes)) - items - in - let has_type = - List.exists items ~f:(fun item -> - match item.kind with Type _ -> true | _ -> false) - in - let c_e = if has_type then c_expr else c_body in - mk - @@ Impl - { - generics = c_generics generics; - self_ty = c_ty item.span self_ty; - of_trait = - ( Concrete_ident.of_def_id Trait of_trait.def_id, - List.map ~f:(c_generic_value item.span) of_trait.generic_args - ); - items = - List.map - ~f:(fun (item : Thir.impl_item) -> - (* TODO: introduce a Kind.TraitImplItem or - something. Otherwise we have to assume every - backend will see traits and impls as - records. See https://github.com/hacspec/hax/issues/271. *) - let ii_ident = - Concrete_ident.of_def_id Field item.owner_id - in - { - ii_span = Span.of_thir item.span; - ii_generics = c_generics item.generics; - ii_v = - (match (item.kind : Thir.impl_item_kind) with - | Fn { body; params; _ } -> - let params = - if List.is_empty params then - [ U.make_unit_param span ] - else List.map ~f:(c_param item.span) params - in - IIFn { body = c_e body; params } - | Const (_ty, e) -> IIFn { body = c_e e; params = [] } - | Type { ty; parent_bounds } -> - IIType - { - typ = c_ty item.span ty; - parent_bounds = - List.filter_map - ~f:(fun (clause, impl_expr, span) -> - let* bound = c_clause span clause in - match bound with - | GCType trait_goal -> - Some - ( c_impl_expr span impl_expr, - trait_goal ) - | _ -> None) - parent_bounds; - }); - ii_ident; - ii_attrs = c_item_attrs item.attributes; - }) - items; - parent_bounds = - List.filter_map - ~f:(fun (clause, impl_expr, span) -> - let* bound = c_clause span clause in - match bound with - | GCType trait_goal -> - Some (c_impl_expr span impl_expr, trait_goal) - | _ -> None) - parent_bounds; - safety = csafety safety; - } - | Use ({ span = _; res; segments; rename }, _) -> - let v = - Use - { - path = List.map ~f:(fun x -> fst x.ident) segments; - is_external = - List.exists ~f:(function Err -> true | _ -> false) res; - (* TODO: this should represent local/external? *) - rename; - } - in - (* ident is supposed to always be an actual item, thus here we need to cheat a bit *) - (* TODO: is this DUMMY thing really needed? there's a `Use` segment (see #272) *) - let def_id = item.owner_id in - let def_id : Types.def_id = - let value = - { - def_id.contents.value with - path = - def_id.contents.value.path - @ [ - Types. - { - data = ValueNs "DUMMY"; - disambiguator = MyInt64.of_int 0; - }; - ]; - } + in + let name = Concrete_ident.of_def_id Trait (assert_item_def_id ()) in + let { params; constraints } = c_generics generics in + let self = + let id = Local_ident.mk_id Typ 0 (* todo *) in + let ident = Local_ident.{ name = "Self"; id } in + { ident; span; attrs = []; kind = GPType } + in + let params = self :: params in + let generics = { params; constraints } in + let items = List.map ~f:c_trait_item items in + let safety = csafety safety in + mk @@ Trait { name; generics; items; safety } + | Trait (Yes, _, _, _, _) -> + unimplemented ~issue_id:930 [ item.span ] "Auto trait" + | Impl { of_trait = None; generics; items; _ } -> + let items = + List.filter + ~f:(fun { attributes; _ } -> not (should_skip attributes)) + items + in + List.map + ~f:(fun (item : Thir.impl_item) -> + let item_def_id = Concrete_ident.of_def_id Impl item.owner_id in + let attrs = c_item_attrs item.attributes in + let sub_item_erased_by_user = erased_by_user attrs in + let erased_by_type_only = + type_only && match item.kind with Fn _ -> true | _ -> false + in + let sub_item_erased = + sub_item_erased_by_user || erased_by_type_only + in + let attrs = + attrs_with_erased erased_by_type_only sub_item_erased_by_user attrs in - { contents = { def_id.contents with value } } + let c_body = if sub_item_erased then c_expr_drop_body else c_body in + + let v = + match (item.kind : Thir.impl_item_kind) with + | Fn { body; params; header = { safety; _ }; _ } -> + let params = + if List.is_empty params then [ U.make_unit_param span ] + else List.map ~f:(c_param item.span) params + in + Fn + { + name = item_def_id; + generics = + U.concat_generics (c_generics generics) + (c_generics item.generics); + body = c_body body; + params; + safety = csafety safety; + } + | Const (_ty, e) -> + Fn + { + name = item_def_id; + generics = c_generics generics; + (* does that make sense? can we have `const`? *) + body = c_body e; + params = []; + safety = Safe; + } + | Type _ty -> + assertion_failure [ item.span ] + "Inherent implementations are not supposed to have \ + associated types \ + (https://doc.rust-lang.org/reference/items/implementations.html#inherent-implementations)." + in + let ident = Concrete_ident.of_def_id Value item.owner_id in + { span = Span.of_thir item.span; v; ident; attrs }) + items + | Impl + { + of_trait = Some of_trait; + generics; + self_ty; + items; + safety; + parent_bounds; + _; + } -> + let items = + List.filter + ~f:(fun { attributes; _ } -> not (should_skip attributes)) + items + in + let items = + if erased then + [ + (* Dummy associated item *) + { + ii_span = Span.of_thir item.span; + ii_generics = { params = []; constraints = [] }; + ii_v = + IIFn + { + body = U.unit_expr span; + params = [ U.make_unit_param span ]; + }; + ii_ident = + Concrete_ident.of_name Value Rust_primitives__hax__dropped_body; + ii_attrs = []; + }; + ] + else + List.map + ~f:(fun (item : Thir.impl_item) -> + (* TODO: introduce a Kind.TraitImplItem or + something. Otherwise we have to assume every + backend will see traits and impls as + records. See https://github.com/hacspec/hax/issues/271. *) + let ii_ident = Concrete_ident.of_def_id Field item.owner_id in + { + ii_span = Span.of_thir item.span; + ii_generics = c_generics item.generics; + ii_v = + (match (item.kind : Thir.impl_item_kind) with + | Fn { body; params; _ } -> + let params = + if List.is_empty params then [ U.make_unit_param span ] + else List.map ~f:(c_param item.span) params + in + IIFn { body = c_expr body; params } + | Const (_ty, e) -> IIFn { body = c_expr e; params = [] } + | Type { ty; parent_bounds } -> + IIType + { + typ = c_ty item.span ty; + parent_bounds = + List.filter_map + ~f:(fun (clause, impl_expr, span) -> + let* bound = c_clause span clause in + match bound with + | GCType trait_goal -> + Some (c_impl_expr span impl_expr, trait_goal) + | _ -> None) + parent_bounds; + }); + ii_ident; + ii_attrs = c_item_attrs item.attributes; + }) + items + in + mk + @@ Impl + { + generics = c_generics generics; + self_ty = c_ty item.span self_ty; + of_trait = + ( Concrete_ident.of_def_id Trait of_trait.def_id, + List.map ~f:(c_generic_value item.span) of_trait.generic_args + ); + items; + parent_bounds = + List.filter_map + ~f:(fun (clause, impl_expr, span) -> + let* bound = c_clause span clause in + match bound with + | GCType trait_goal -> + Some (c_impl_expr span impl_expr, trait_goal) + | _ -> None) + parent_bounds; + safety = csafety safety; + } + | Use ({ span = _; res; segments; rename }, _) -> + let v = + Use + { + path = List.map ~f:(fun x -> fst x.ident) segments; + is_external = + List.exists ~f:(function Err -> true | _ -> false) res; + (* TODO: this should represent local/external? *) + rename; + } + in + (* ident is supposed to always be an actual item, thus here we need to cheat a bit *) + (* TODO: is this DUMMY thing really needed? there's a `Use` segment (see #272) *) + let def_id = item.owner_id in + let def_id : Types.def_id = + let value = + { + def_id.contents.value with + path = + def_id.contents.value.path + @ [ + Types. + { data = ValueNs "DUMMY"; disambiguator = MyInt64.of_int 0 }; + ]; + } in - [ { span; v; ident = Concrete_ident.of_def_id Value def_id; attrs } ] - | Union _ -> - unimplemented ~issue_id:998 [ item.span ] "Union types: not supported" - | ExternCrate _ | Static _ | Macro _ | Mod _ | ForeignMod _ | GlobalAsm _ - | TraitAlias _ -> - mk NotImplementedYet - -let import_item ~drop_body (item : Thir.item) : + { contents = { def_id.contents with value } } + in + [ { span; v; ident = Concrete_ident.of_def_id Value def_id; attrs } ] + | Union _ -> + unimplemented ~issue_id:998 [ item.span ] "Union types: not supported" + | ExternCrate _ | Static _ | Macro _ | Mod _ | ForeignMod _ | GlobalAsm _ + | TraitAlias _ -> + mk NotImplementedYet + +let import_item ~type_only (item : Thir.item) : concrete_ident * (item list * Diagnostics.t list) = let ident = Concrete_ident.of_def_id Value item.owner_id in let r, reports = @@ -1797,6 +1862,6 @@ let import_item ~drop_body (item : Thir.item) : >> U.Reducers.disambiguate_local_idents in Diagnostics.Core.capture (fun _ -> - c_item item ~ident ~drop_body |> List.map ~f) + c_item item ~ident ~type_only |> List.map ~f) in (ident, (r, reports)) diff --git a/engine/lib/import_thir.mli b/engine/lib/import_thir.mli index 42a5f2184..370af4c05 100644 --- a/engine/lib/import_thir.mli +++ b/engine/lib/import_thir.mli @@ -5,6 +5,6 @@ val import_clause : Types.span -> Types.clause -> Ast.Rust.generic_constraint option val import_item : - drop_body:bool -> + type_only:bool -> Types.item_for__decorated_for__expr_kind -> Concrete_ident.t * (Ast.Rust.item list * Diagnostics.t list) diff --git a/engine/lib/phases/phase_cf_into_monads.ml b/engine/lib/phases/phase_cf_into_monads.ml index 373bc06ef..d5565c4fb 100644 --- a/engine/lib/phases/phase_cf_into_monads.ml +++ b/engine/lib/phases/phase_cf_into_monads.ml @@ -171,18 +171,19 @@ struct arms in let arms = - if List.is_empty arms then [] - else - let m = - List.map ~f:(fun ({ monad; _ }, _) -> monad) arms - |> List.reduce_exn ~f:(KnownMonads.lub span) - in - List.map - ~f:(fun (mself, (arm_pat, span, body, guard)) -> - let body = KnownMonads.lift "Match" body mself.monad m in - let arm_pat = { arm_pat with typ = body.typ } in - ({ arm = { arm_pat; body; guard }; span } : B.arm)) - arms + let m = + List.map ~f:(fun ({ monad; _ }, _) -> monad) arms + |> List.reduce ~f:(KnownMonads.lub span) + in + match m with + | None -> [] (* [arms] is empty *) + | Some m -> + List.map + ~f:(fun (mself, (arm_pat, span, body, guard)) -> + let body = KnownMonads.lift "Match" body mself.monad m in + let arm_pat = { arm_pat with typ = body.typ } in + ({ arm = { arm_pat; body; guard }; span } : B.arm)) + arms in let typ = match arms with [] -> UB.never_typ | hd :: _ -> hd.arm.body.typ diff --git a/engine/lib/phases/phase_hoist_disjunctive_patterns.ml b/engine/lib/phases/phase_hoist_disjunctive_patterns.ml index 70e92d163..cc637e2c8 100644 --- a/engine/lib/phases/phase_hoist_disjunctive_patterns.ml +++ b/engine/lib/phases/phase_hoist_disjunctive_patterns.ml @@ -72,6 +72,7 @@ module Make (F : Features.T) = List.map (treat_args [ [] ] fields_as_pat) ~f:(fun fields_as_pat -> let fields = + (* exn justification: `rev_map fields` and `fields` have the same length *) List.map2_exn fields_as_pat fields ~f:(fun pat { field; _ } -> { field; pat }) in diff --git a/engine/lib/phases/phase_transform_hax_lib_inline.ml b/engine/lib/phases/phase_transform_hax_lib_inline.ml index 6d739ebaa..0a5c44fc7 100644 --- a/engine/lib/phases/phase_transform_hax_lib_inline.ml +++ b/engine/lib/phases/phase_transform_hax_lib_inline.ml @@ -2,7 +2,6 @@ open! Prelude open! Ast module%inlined_contents Make (F : Features.T) = struct - open Ast module FA = F module FB = struct @@ -96,7 +95,7 @@ module%inlined_contents Make (F : Features.T) = struct Error.assertion_failure span "Malformed call to 'inline': cannot find string payload." in - let code = + let code : B.quote_content list = List.map bindings ~f:(fun (pat, e) -> match UB.Expect.pbinding_simple pat @@ -106,12 +105,22 @@ module%inlined_contents Make (F : Features.T) = struct let id = extract_pattern e |> Option.bind ~f:first_global_ident - |> Option.value_exn + |> Option.value_or_thunk ~default:(fun _ -> + Error.assertion_failure span + "Could not extract pattern (case constructor): \ + this may be a bug in the quote macros in \ + hax-lib.") in - `Expr { e with e = GlobalVar id } + B.Expr { e with e = GlobalVar id } | Some "_pat" -> - let pat = extract_pattern e |> Option.value_exn in - `Pat pat + let pat = + extract_pattern e + |> Option.value_or_thunk ~default:(fun _ -> + Error.assertion_failure span + "Could not extract pattern (case pat): this may \ + be a bug in the quote macros in hax-lib.") + in + Pattern pat | Some "_ty" -> let typ = match pat.typ with @@ -124,33 +133,22 @@ module%inlined_contents Make (F : Features.T) = struct "Malformed call to 'inline': expected type \ `Option<_>`." in - `Typ typ - | _ -> `Expr e) + Typ typ + | _ -> Expr e) in let verbatim = split_str ~on:"SPLIT_QUOTE" str in let contents = - let rec f verbatim - (code : - [ `Verbatim of string - | `Expr of B.expr - | `Pat of B.pat - | `Typ of B.ty ] - list) = + let rec f verbatim (code : B.quote_content list) = match (verbatim, code) with - | s :: s', code :: code' -> `Verbatim s :: code :: f s' code' - | [ s ], [] -> [ `Verbatim s ] + | s :: s', code :: code' -> B.Verbatim s :: code :: f s' code' + | [ s ], [] -> [ Verbatim s ] | [], [] -> [] | _ -> Error.assertion_failure span @@ "Malformed call to 'inline'." ^ "\nverbatim=" ^ [%show: string list] verbatim ^ "\ncode=" - ^ [%show: - [ `Verbatim of string - | `Expr of B.expr - | `Pat of B.pat - | `Typ of B.ty ] - list] code + ^ [%show: B.quote_content list] code in f verbatim code in diff --git a/engine/lib/print_rust.ml b/engine/lib/print_rust.ml index c0b52d5fc..a0ad06b81 100644 --- a/engine/lib/print_rust.ml +++ b/engine/lib/print_rust.ml @@ -235,10 +235,10 @@ module Raw = struct !"quote!(" & List.map ~f:(function - | `Verbatim code -> !code - | `Expr e -> pexpr e - | `Pat p -> ppat p - | `Typ t -> pty span t) + | Verbatim code -> !code + | Expr e -> pexpr e + | Pattern p -> ppat p + | Typ t -> pty span t) quote.contents |> concat ~sep:!"" & !")" diff --git a/engine/lib/subtype.ml b/engine/lib/subtype.ml index 1f9528d49..2fbcde067 100644 --- a/engine/lib/subtype.ml +++ b/engine/lib/subtype.ml @@ -305,10 +305,10 @@ struct and dquote (span : span) ({ contents; witness } : A.quote) : B.quote = let f = function - | `Verbatim code -> `Verbatim code - | `Expr e -> `Expr (dexpr e) - | `Pat p -> `Pat (dpat p) - | `Typ p -> `Typ (dty span p) + | A.Verbatim code -> B.Verbatim code + | Expr e -> Expr (dexpr e) + | Pattern p -> Pattern (dpat p) + | Typ p -> Typ (dty span p) in { contents = List.map ~f contents; witness = S.quote span witness } diff --git a/engine/names/extract/build.rs b/engine/names/extract/build.rs index d5e8f563f..1ffdc56b6 100644 --- a/engine/names/extract/build.rs +++ b/engine/names/extract/build.rs @@ -140,7 +140,7 @@ fn reader_to_str(s: String) -> String { result += "\n"; result += "module Values = struct\n"; for (json, name) in &def_ids { - result += &format!("{TAB}let parsed_{name} = Types.parse_def_id (Yojson.Safe.from_string {}{ESCAPE_KEY}|{}|{ESCAPE_KEY}{})\n", "{", json, "}"); + result += &format!("{TAB}let parsed_{name} = Types.def_id_of_yojson (Yojson.Safe.from_string {}{ESCAPE_KEY}|{}|{ESCAPE_KEY}{})\n", "{", json, "}"); } result += "end\n\n"; @@ -155,7 +155,7 @@ fn reader_to_str(s: String) -> String { result += &format!("let impl_infos_json_list = match Yojson.Safe.from_string {}{ESCAPE_KEY}|{}|{ESCAPE_KEY}{} with | `List l -> l | _ -> failwith \"Expected a list of `def_id * impl_infos`\"\n\n", "{", serde_json::to_string(&impl_infos).unwrap(), "}"); result += - &format!("let impl_infos = Base.List.map ~f:(function | `List [did; ii] -> (Types.parse_def_id did, Types.parse_impl_infos ii) | _ -> failwith \"Expected tuple\") impl_infos_json_list"); + &format!("let impl_infos = Base.List.map ~f:(function | `List [did; ii] -> (Types.def_id_of_yojson did, Types.impl_infos_of_yojson ii) | _ -> failwith \"Expected tuple\") impl_infos_json_list"); result } diff --git a/engine/utils/generate_from_ast/codegen_printer.ml b/engine/utils/generate_from_ast/codegen_printer.ml index cc34663f1..93bbceda1 100644 --- a/engine/utils/generate_from_ast/codegen_printer.ml +++ b/engine/utils/generate_from_ast/codegen_printer.ml @@ -327,9 +327,7 @@ let mk datatypes = in let state = let names_with_doc = List.map ~f:(fun dt -> dt.name) datatypes in - let names_with_doc = - "quote" :: "concrete_ident" :: "local_ident" :: names_with_doc - in + let names_with_doc = "concrete_ident" :: "local_ident" :: names_with_doc in { names_with_doc } in let positions = ref [ "AstPos_Entrypoint"; "AstPos_NotApplicable" ] in diff --git a/engine/utils/generate_from_ast/codegen_visitor.ml b/engine/utils/generate_from_ast/codegen_visitor.ml index e9e65ad24..45034152e 100644 --- a/engine/utils/generate_from_ast/codegen_visitor.ml +++ b/engine/utils/generate_from_ast/codegen_visitor.ml @@ -230,7 +230,6 @@ let is_allowed_opaque name = "span"; "string"; "todo"; - "quote"; "float_kind"; "int_kind"; "item_quote_origin"; diff --git a/engine/utils/ocaml_of_json_schema/ocaml_of_json_schema.js b/engine/utils/ocaml_of_json_schema/ocaml_of_json_schema.js index 2b6bf3b5c..3a9b866b4 100644 --- a/engine/utils/ocaml_of_json_schema/ocaml_of_json_schema.js +++ b/engine/utils/ocaml_of_json_schema/ocaml_of_json_schema.js @@ -1,4 +1,4 @@ -const keys = p => +const keys = p => new Set( Object.keys(p) .filter(k => ![ @@ -7,13 +7,13 @@ const keys = p => .filter(k => p?.additionalProperties !== false || k != 'additionalProperties') ); const eq = (xs, ys) => - xs.size === ys.size && - [...xs].every((x) => ys.has(x)); + xs.size === ys.size && + [...xs].every((x) => ys.has(x)); let todo = (todo = "todo") => null; let assert = (fact, msg = "assert") => { - if(!fact) + if (!fact) throw msg; }; @@ -29,7 +29,7 @@ const clean = o => { if (o instanceof Object && exact_keys(o, 'allOf') && o.allOf.length == 1 - ) { + ) { let first = o.allOf[0]; delete o['allOf']; for (let k in first) @@ -40,13 +40,13 @@ const clean = o => { && o.type instanceof Array && o.type.length === 2 && o.type.includes('null') - ) { + ) { let type = o.type.filter(x => x != 'null')[0]; let other = JSON.parse(JSON.stringify(o)); other.type = type; for (let k in o) delete o[k]; - o.anyOf = [other, {type: 'null'}]; + o.anyOf = [other, { type: 'null' }]; } if (o instanceof Array) { return o @@ -74,17 +74,17 @@ let variantNameOf = s => { return v + "'"; return v; }; -let typeNameOf = s => s.replace(/[A-Z]/g, (l, i) => `${i?'_':''}${l.toLowerCase()}`); +let typeNameOf = s => s.replace(/[A-Z]/g, (l, i) => `${i ? '_' : ''}${l.toLowerCase()}`); let fieldNameOf = s => { - let ocaml_keywords = [ "and", "as", "assert", "asr", "begin", "class", "constraint", - "do", "done", "downto", "else", "end", "exception", "external", - "false", "for", "fun", "function", "functor", "if", "in", - "include", "inherit", "initializer", "land", "lazy", "let", - "lor", "lsl", "lsr", "lxor", "match", "method", "mod", "module", - "mutable", "new", "nonrec", "object", "of", "open", "or", - "private", "rec", "sig", "struct", "then", "to", "true", "try", - "type", "val", "virtual", "when", "while", "with" - ]; + let ocaml_keywords = ["and", "as", "assert", "asr", "begin", "class", "constraint", + "do", "done", "downto", "else", "end", "exception", "external", + "false", "for", "fun", "function", "functor", "if", "in", + "include", "inherit", "initializer", "land", "lazy", "let", + "lor", "lsl", "lsr", "lxor", "match", "method", "mod", "module", + "mutable", "new", "nonrec", "object", "of", "open", "or", + "private", "rec", "sig", "struct", "then", "to", "true", "try", + "type", "val", "virtual", "when", "while", "with" + ]; if (ocaml_keywords.includes(s)) return s + "'"; return s; @@ -93,7 +93,7 @@ let fieldNameOf = s => { let ensureUnique = (() => { let cache = {}; return (kind, v, disambiguer) => { - let key = JSON.stringify({kind, v}); + let key = JSON.stringify({ kind, v }); // TODO: enble check below, find a good solution // if(cache[key]) // throw `dup ${kind}, ${v}`; @@ -103,18 +103,18 @@ let ensureUnique = (() => { })(); const util = require('util'); -let log_full = o => console.error(util.inspect(o, {showHidden: false, depth: null, colors: true})); +let log_full = o => console.error(util.inspect(o, { showHidden: false, depth: null, colors: true })); let trace1 = (name, f) => (input) => { let output = f(input); - log_full({name, input, output}); + log_full({ name, input, output }); return output; }; let ocaml_of_type_expr = (o, path) => { if (!path) throw "Path missing!"; - let {kind, payload} = o; + let { kind, payload } = o; return (({ option: type => `(${ocaml_of_type_expr(type, [...path, 'option'])} option)`, unit: _ => `unit`, @@ -131,13 +131,13 @@ let ocaml_of_type_expr = (o, path) => { name: payload => typeNameOf(payload), })[kind] || (_ => { log_full(o); - throw "ocaml_of_type_expr: bad kind "+kind; + throw "ocaml_of_type_expr: bad kind " + kind; }))(payload); }; let mk_match = (scrut, arms, path) => { - if(!path){ + if (!path) { console.trace(); throw "Path missing!"; } @@ -154,13 +154,13 @@ let wrap_paren = s => `(${s})`; let ocaml_yojson_of_type_expr = (o, subject, path) => { if (!path) throw "Path missing!"; - let {kind, payload} = o; + let { kind, payload } = o; return `(${(({ option: type => `match ${subject} with | Option.Some x -> ${ocaml_yojson_of_type_expr(type, 'x', [...path, 'Some'])} | _ -> \`Null`, unit: _ => `\`Null`, tuple: types => `let (${types.map((t, i) => 'x' + i)}) = ${subject} in \`List [${types.map((t, i) => ocaml_yojson_of_type_expr(t, 'x' + i, [...path, 'tuple', i])).join(';')}]`, - array: type => + array: type => `\`List (List.map (fun x -> ${ocaml_yojson_of_type_expr(type, 'x', [...path, 'array'])}) ${subject})`, boolean: _ => `\`Bool ${subject}`, string: _ => `\`String ${subject}`, @@ -170,10 +170,10 @@ let ocaml_yojson_of_type_expr = (o, subject, path) => { int: `\`Int ${subject}` })[o.repr], char: _ => `\`String (Base.Char.to_string ${subject})`, - name: payload => `to_json_${typeNameOf(payload)} ${subject}`, + name: payload => `yojson_of_${typeNameOf(payload)} ${subject}`, })[kind] || (_ => { log_full(o); - throw "ocaml_arms_of_type_expr: bad kind "+kind; + throw "ocaml_arms_of_type_expr: bad kind " + kind; }))(payload)})`; }; @@ -181,7 +181,7 @@ let ocaml_yojson_of_type_expr = (o, subject, path) => { let ocaml_arms_of_type_expr = (o, path) => { if (!path) throw "Path missing!"; - let {kind, payload} = o; + let { kind, payload } = o; return (({ option: type => [ [`\`Null`, `Option.None`], @@ -189,17 +189,17 @@ let ocaml_arms_of_type_expr = (o, path) => { ], unit: _ => [[`\`Null`, '()']], tuple: types => { - let sub_matches = types.map((type, i) => + let sub_matches = types.map((type, i) => mk_match(`v${i}`, ocaml_arms_of_type_expr(type, [...path, 'tuple', i]), [...path, 'tuple'])); return [ [`\`List [${types.map((_, i) => `v${i}`).join(';')}]`, - `(${sub_matches.join(',')})` + `(${sub_matches.join(',')})` ], ]; }, array: type => [ [`\`List l`, - `List.map (fun x -> ${mk_match('x', ocaml_arms_of_type_expr(type, [...path, 'array']), [...path, 'array'])}) l` + `List.map (fun x -> ${mk_match('x', ocaml_arms_of_type_expr(type, [...path, 'array']), [...path, 'array'])}) l` ] ], boolean: _ => [[`\`Bool b`, 'b']], @@ -219,10 +219,10 @@ let ocaml_arms_of_type_expr = (o, path) => { [`\`Intlit s`, 'Base.Int.of_string s'] ] })[o.repr], - name: payload => [['remains', `parse_${typeNameOf(payload)} remains`]], + name: payload => [['remains', `${typeNameOf(payload)}_of_yojson remains`]], })[kind] || (_ => { log_full(o); - throw "ocaml_arms_of_type_expr: bad kind "+kind; + throw "ocaml_arms_of_type_expr: bad kind " + kind; }))(payload); }; @@ -232,10 +232,10 @@ let parse_type_name = s => { return s.split('/').slice(-1)[0]; }; -let int_repr_of_format = format => - (format.endsWith('int128') || format == 'uint64' || format == 'uint' /*`uint`s are `usize`s actually, so that's safer to assume it's a uint64, see https://github.com/GREsau/schemars/blob/386e3d7f5ac601795fb4e247291bbef31512ded3/schemars/src/json_schema_impls/primitives.rs#L85C16-L85C21*/) - ? 'string' - : (format == 'int64' || format == 'uint32' ? 'int64' : 'int'); +let int_repr_of_format = format => + (format.endsWith('int128') || format == 'uint64' || format == 'uint' /*`uint`s are `usize`s actually, so that's safer to assume it's a uint64, see https://github.com/GREsau/schemars/blob/386e3d7f5ac601795fb4e247291bbef31512ded3/schemars/src/json_schema_impls/primitives.rs#L85C16-L85C21*/) + ? 'string' + : (format == 'int64' || format == 'uint32' ? 'int64' : 'int'); let is_type = { option: def => { @@ -244,14 +244,14 @@ let is_type = { && is_type.expr(def.anyOf[0]) && exact_keys(def.anyOf[1], 'type') && def.anyOf[1].type === 'null' - ) + ) return { kind: 'option', payload: is_type.expr(def.anyOf[0]) }; return false; }, - + unit: def => { if (exact_keys(def, 'type') && def.type === 'null') @@ -272,7 +272,7 @@ let is_type = { }; return false; }, - + array: def => { if (exact_keys(def, 'type', 'items') && def.type === 'array' @@ -283,37 +283,37 @@ let is_type = { }; return false; }, - - expr: def => - (exact_keys(def, '$ref') ? { - kind: 'name', payload: parse_type_name(def['$ref']) - } : false) + + expr: def => + (exact_keys(def, '$ref') ? { + kind: 'name', payload: parse_type_name(def['$ref']) + } : false) || is_type.option(def) || is_type.array(def) || is_type.unit(def) || is_type.tuple(def) || (def.type === 'integer' - ? {kind: 'integer', repr: int_repr_of_format(def.format)} + ? { kind: 'integer', repr: int_repr_of_format(def.format) } : false) || (def.type === 'string' && def.maxLength === def.minLength && def.minLength === 1 - ? {kind: 'char'} + ? { kind: 'char' } : false) - || ( ( exact_keys(def, 'type') - && ['boolean', 'string'].includes(def.type) - ) ? {kind: def.type} : false - ) || false, + || ((exact_keys(def, 'type') + && ['boolean', 'string'].includes(def.type) + ) ? { kind: def.type } : false + ) || false, record: def => { if ((eq(keys(def), new Set(["type", "required", "properties"])) - || eq(keys(def), new Set(["type", "properties"])) - ) + || eq(keys(def), new Set(["type", "properties"])) + ) && def.type === "object" && (def.required || []).every(k => typeof k == 'string') && Object.values(def.properties).every(is_type.expr)) return Object.fromEntries(Object.entries(def.properties).map(([n, v]) => [n, is_type.expr(v)])); return false; }, - + variant: def => { let doc = def.description; if (exporters.enum.guard(def)) @@ -327,7 +327,7 @@ let is_type = { if (exact_keys(def, 'type', 'required', 'properties') && def.type === 'object' && Object.values(def.properties).length == 1 - ){ + ) { let [name, value] = Object.entries(def.properties)[0]; if (is_type.expr(value)) return [{ @@ -362,7 +362,7 @@ let is_type = { let export_record = (fields, path) => { let record_expression = fields.map(([field, type], i) => { - let p = [...path, 'field_'+field]; + let p = [...path, 'field_' + field]; let sub = mk_match('x', ocaml_arms_of_type_expr(type, p), p); let match = `match List.assoc_opt "${field}" l with Option.Some x -> begin ${sub} end | Option.None -> raise (MissingField {field = "${field}"; fields = l})`; return `${fieldNameOf(field)} = begin ${match} end`; @@ -376,62 +376,61 @@ let exporters = { oneOf: { guard: def => eq(keys(def), new Set(["oneOf"])) && def.oneOf.every(is_type.variant), - f: (name, {oneOf}) => { + f: (name, { oneOf }) => { let variants = oneOf.map(is_type.variant).flat(); - let type = variants.map(({kind, name: variant_name, payloadKind, payload, doc}) => { + let type = variants.map(({ kind, name: variant_name, payloadKind, payload, doc }) => { doc = mkdoc(doc); let variant = ensureUnique('variant', variantNameOf(variant_name)); return ({ record: () => { let fields = Object.entries(payload).map(([field, value]) => - fieldNameOf(field) + ' : ' + ocaml_of_type_expr(value, ['rec-variant:'+variant+':'+field])); + fieldNameOf(field) + ' : ' + ocaml_of_type_expr(value, ['rec-variant:' + variant + ':' + field])); return `${variant} of {${fields.join(';\n')}}${doc}`; }, - expr: () => `${variant} of (${ocaml_of_type_expr(payload, ['expr-variant:'+variant+':'+name])})${doc}`, + expr: () => `${variant} of (${ocaml_of_type_expr(payload, ['expr-variant:' + variant + ':' + name])})${doc}`, empty: () => `${variant}${doc}`, }[payloadKind] || (() => { throw "bad payloadKind: " + payloadKind; }))(); }).join('\n | '); - let parse_arms = variants.map(({kind, name: variant_name, payloadKind, payload}) => { + let parse_arms = variants.map(({ kind, name: variant_name, payloadKind, payload }) => { let variant = variantNameOf(variant_name); - let wrap = (arms, prefix='') => [ + let wrap = (arms, prefix = '') => [ [`\`Assoc ["${variant_name}", rec_value]`, - prefix + mk_match('rec_value', arms, ['rec-variant_'+variant+'_'+variant_name]) + prefix + mk_match('rec_value', arms, ['rec-variant_' + variant + '_' + variant_name]) ] ]; return ({ record: () => { - let [pat, expr] = export_record(Object.entries(payload), ['rec-variant_'+variant+'_'+variant_name]); - return wrap([[pat, variant+' '+expr]]); + let [pat, expr] = export_record(Object.entries(payload), ['rec-variant_' + variant + '_' + variant_name]); + return wrap([[pat, variant + ' ' + expr]]); }, - expr: () => wrap(ocaml_arms_of_type_expr(payload, ['expr-variant(PA):'+name+':'+variant+':'+variant_name]), variant + ' '), + expr: () => wrap(ocaml_arms_of_type_expr(payload, ['expr-variant(PA):' + name + ':' + variant + ':' + variant_name]), variant + ' '), empty: () => [[`\`String "${variant_name}"`, variant]], }[payloadKind] || (() => { throw "bad payloadKind: " + payloadKind; }))(); }).flat(); - let parse = mk_match('o', parse_arms, ['parse_'+name]); - let to_json = `match o with ${variants.map(({kind, name: variant_name, payloadKind, payload}) => { + let parse = mk_match('o', parse_arms, [name + '_of_yojson']); + let to_json = `match o with ${variants.map(({ kind, name: variant_name, payloadKind, payload }) => { let variant = variantNameOf(variant_name); let wrap = (x, e) => `${variant} ${x} -> \`Assoc ["${variant_name}", ${e}]`; return ({ record: () => { let fields = Object.entries(payload); return wrap( - `{${fields.map(([field, type], i) => `${fieldNameOf(field)}`).join('; ')}}`, - `\`Assoc [${ - fields.map(([field, type], i) => `("${field}", ${ocaml_yojson_of_type_expr(type, fieldNameOf(field), [name+':'+variant, 'variant', field])})`).join('; ') - }]` + `{${fields.map(([field, type], i) => `${fieldNameOf(field)}`).join('; ')}}`, + `\`Assoc [${fields.map(([field, type], i) => `("${field}", ${ocaml_yojson_of_type_expr(type, fieldNameOf(field), [name + ':' + variant, 'variant', field])})`).join('; ') + }]` ); }, - expr: () => wrap('x', ocaml_yojson_of_type_expr(payload, 'x', [name+':'+variant, 'payload'])), + expr: () => wrap('x', ocaml_yojson_of_type_expr(payload, 'x', [name + ':' + variant, 'payload'])), empty: () => `${variant} -> \`String "${variant_name}"`, }[payloadKind] || (() => { throw "bad payloadKind: " + payloadKind; }))(); }).join(' | ')}`; - return {type, parse, to_json}; + return { type, parse, to_json }; }, }, empty_struct: { @@ -447,23 +446,23 @@ let exporters = { // object is a *flat* record object: { guard: def => (eq(keys(def), new Set(["type", "required", "properties"])) - || eq(keys(def), new Set(["type", "properties"])) - ) + || eq(keys(def), new Set(["type", "properties"])) + ) && def.type === "object" && (def.required || []).every(k => typeof k == 'string') && Object.values(def.properties).every(is_type.expr), - f: (name, {required, properties}) => { + f: (name, { required, properties }) => { let fields = Object.entries(properties).map( ([name, prop]) => [name, is_type.expr(prop), prop.description] ); - let [pat, expr] = export_record(fields, ['struct_'+name]); - + let [pat, expr] = export_record(fields, ['struct_' + name]); + return { - type: `{ ${fields.map(([fname, type, doc]) => `${fieldNameOf(fname)} : ${ocaml_of_type_expr(type, ['struct_'+fname+'_'+name])}${mkdoc(doc)}`).join(';\n')} }`, - parse: mk_match('o', [[pat, expr]], ['struct_'+name]), + type: `{ ${fields.map(([fname, type, doc]) => `${fieldNameOf(fname)} : ${ocaml_of_type_expr(type, ['struct_' + fname + '_' + name])}${mkdoc(doc)}`).join(';\n')} }`, + parse: mk_match('o', [[pat, expr]], ['struct_' + name]), to_json: //`let {${fields.map(([fname, type, doc]) => fieldNameOf(fname)).join(';')}} = o in` - `\`Assoc [${fields.map(([fname, type, doc]) => `("${fname}", ${ocaml_yojson_of_type_expr(type, 'o.'+fieldNameOf(fname), ['todo'])})`).join('; ')}]` + `\`Assoc [${fields.map(([fname, type, doc]) => `("${fname}", ${ocaml_yojson_of_type_expr(type, 'o.' + fieldNameOf(fname), ['todo'])})`).join('; ')}]` }; }, }, @@ -473,7 +472,7 @@ let exporters = { f: (name, o) => { assert(o.enum.every(x => typeof x == "string"), 'not every enum is a string'); - if(o.enum.length == 0) { + if (o.enum.length == 0) { return { type: '|', parse: 'failwith "cannot parse an empty type"', @@ -489,16 +488,16 @@ let exporters = { let parse_string = `match s with ` + variants.map( - ({Δ, variant}) => `"${Δ}" -> ${variant}` + ({ Δ, variant }) => `"${Δ}" -> ${variant}` ).join(' | ') + ` | s -> failwith ("unexpected variant [" ^ s ^ "] while parsing enum [${name}]")`; - + return { - type: variants.map(({variant}) => variant).join(' | '), + type: variants.map(({ variant }) => variant).join(' | '), parse: ` match o with | \`String s -> (${parse_string}) | _ -> failwith "expected a string while parsing a ${name}" `, - to_json: `match o with ${variants.map(({variant, variantOriginName}) => `${variant} -> \`String "${variantOriginName}"`).join(' | ')}`, + to_json: `match o with ${variants.map(({ variant, variantOriginName }) => `${variant} -> \`String "${variantOriginName}"`).join(' | ')}`, }; }, }, @@ -506,36 +505,39 @@ let exporters = { let export_definition = (name, def) => { let suitable_exporters = Object.entries(exporters).filter( - ([_, {guard}]) => guard(def) + ([_, { guard }]) => guard(def) ); - if (suitable_exporters.length != 1){ + if (suitable_exporters.length != 1) { console.error(`ERROR: each definition should have exactly one suited exporter, but type "${name}" has the following exporter(s): ${JSON.stringify(suitable_exporters.map(([n, _]) => n))}.`); console.error('name', name); log_full(def); console.error('xname', name); - + throw "kind error"; } - let [_, {f}] = suitable_exporters[0]; + let [_, { f }] = suitable_exporters[0]; name = ensureUnique('type', typeNameOf(name)); let r = f(name, def); - if(r === null) + if (r === null) return `(* type ${name} *)`; - let {type, parse, to_json} = r; - return {name, type, parse, to_json}; + let { type, parse, to_json } = r; + return { name, type, parse, to_json }; // return [{type, parse}] // return `type ${name} = ${type}\nlet parse_${name} (o: Yojson.Safe.t): ${name} = ${parse}\n`; }; -function run(str){ +function run(str) { let contents = JSON.parse(str); const definitions = clean(contents.definitions); let sig = ``; - + let impl = `include struct -[@@@warning "-A"]`; +[@@@warning "-A"] +`; + + impl += `let hax_version = {escape|${contents['$id'].replace(/\|escape\}/g, '|_escape}')}|escape}`; let items = Object.entries(definitions) .map(([name, def]) => ['Node_for_TyKind' == name ? 'node_for_ty_kind_generated' : name, def]) @@ -545,7 +547,7 @@ function run(str){ ).filter(x => x instanceof Object); let derive_items = ['show', 'eq']; - + impl += ` module ParseError = struct exception MissingField of { @@ -560,16 +562,17 @@ module ParseError = struct end open ParseError + `; let derive_clause = derive_items.length ? `[@@deriving ${derive_items.join(', ')}]` : ''; impl += ( 'type ' - + items.map(({name, type}) => - `${name} = ${type}\n` - ).join('\nand ') - + derive_clause + + items.map(({ name, type }) => + `${name} = ${type}\n` + ).join('\nand ') + + derive_clause ); impl += ` and node_for__ty_kind = node_for_ty_kind_generated @@ -578,8 +581,9 @@ and node_for__def_id_contents = node_for_def_id_contents_generated type map_types = ${"[`TyKind of ty_kind | `DefIdContents of def_id_contents]"} let cache_map: (int64, ${"[ `Value of map_types | `JSON of Yojson.Safe.t ]"}) Base.Hashtbl.t = Base.Hashtbl.create (module Base.Int64) -let parse_table_id_node (type t) (name: string) (encode: t -> map_types) (decode: map_types -> t option) (parse: Yojson.Safe.t -> t) (o: Yojson.Safe.t): (t * int64) = - let label = "parse_table_id_node:" ^ name ^ ": " in +module Exn = struct +let table_id_node_of_yojson (type t) (name: string) (encode: t -> map_types) (decode: map_types -> t option) (parse: Yojson.Safe.t -> t) (o: Yojson.Safe.t): (t * int64) = + let label = "table_id_node_of_yojson:" ^ name ^ ": " in match o with | \`Assoc alist -> begin let id = match List.assoc_opt "id" alist with @@ -606,45 +610,80 @@ let parse_table_id_node (type t) (name: string) (encode: t -> map_types) (decode `; impl += (''); - impl += ('let rec ' + items.map(({name, type, parse}) => - `parse_${name} (o: Yojson.Safe.t): ${name} = ${parse}` + impl += ('let rec ' + items.map(({ name, type, parse }) => + `${name}_of_yojson (o: Yojson.Safe.t): ${name} = ${parse}` ).join('\nand ')); impl += ` -and parse_node_for__ty_kind (o: Yojson.Safe.t): node_for__ty_kind = +and node_for__ty_kind_of_yojson (o: Yojson.Safe.t): node_for__ty_kind = let (value, id) = - parse_table_id_node "TyKind" + table_id_node_of_yojson "TyKind" (fun value -> \`TyKind value) (function | \`TyKind value -> Some value | _ -> None) - parse_ty_kind + ty_kind_of_yojson o in {value; id} -and parse_node_for__def_id_contents (o: Yojson.Safe.t): node_for__def_id_contents = +and node_for__def_id_contents_of_yojson (o: Yojson.Safe.t): node_for__def_id_contents = let (value, id) = - parse_table_id_node "DefIdContents" + table_id_node_of_yojson "DefIdContents" (fun value -> \`DefIdContents value) (function | \`DefIdContents value -> Some value | _ -> None) - parse_def_id_contents + def_id_contents_of_yojson o in {value; id} `; impl += (''); - impl += ('let rec ' + items.map(({name, type, parse, to_json}) => - `to_json_${name} (o: ${name}): Yojson.Safe.t = ${to_json}` + impl += ('let rec ' + items.map(({ name, type, parse, to_json }) => + `yojson_of_${name} (o: ${name}): Yojson.Safe.t = ${to_json}` ).join('\nand ')); impl += ` -and to_json_node_for__ty_kind {value; id} = to_json_node_for_ty_kind_generated {value; id} -and to_json_node_for__def_id_contents {value; id} = to_json_node_for_def_id_contents_generated {value; id} +and yojson_of_node_for__ty_kind {value; id} = yojson_of_node_for_ty_kind_generated {value; id} +and yojson_of_node_for__def_id_contents {value; id} = yojson_of_node_for_def_id_contents_generated {value; id} +end + +open struct + let catch_parsing_errors (type a b) (label: string) (f: a -> b) (x: a): (b, Base.Error.t) Base.Result.t = + try Base.Result.Ok (f x) with + | e -> Base.Result.Error (Base.Error.of_exn ~backtrace:\`Get e) + let unwrap = function + | Base.Result.Ok value -> value + | Base.Result.Error err -> + let err = + let path = Utils.tempfile_path ~suffix:".log" in + Core.Out_channel.write_all path + ~data:(Base.Error.to_string_hum err); + path + in + prerr_endline [%string {| +Error: could not serialize or deserialize a hax value. +This error arises from an incompatibility betwen hax components: hax-engine, cargo-hax and hax-lib. +Potential fixes: + - Make sure the version of \`hax-lib\` for the crate your are trying to extract matches the version of hax currently installed (%{hax_version}). + - Run \`cargo clean\` + - Reinstall hax + +The full stack trace was dumped to %{err}. +|}]; + exit 1 +end `; + impl += (items.map(({ name, type, parse, to_json }) => + ` +let safe_yojson_of_${name} = catch_parsing_errors "yojson_of_${name}" Exn.yojson_of_${name} +let safe_${name}_of_yojson = catch_parsing_errors "${name}_of_yojson" Exn.${name}_of_yojson +let yojson_of_${name} x = unwrap (safe_yojson_of_${name} x) +let ${name}_of_yojson x = unwrap (safe_${name}_of_yojson x)` + ).join('\n')); + return impl + ' \n end'; } -function parse_args(){ +function parse_args() { let [script_name, input_path, output_path, ...rest] = process.argv.slice(1); - if(!input_path || !output_path || rest.length) { + if (!input_path || !output_path || rest.length) { console.log(` Usage: node ${script_name} INPUT_PATH OUTPUT_PATH @@ -657,17 +696,17 @@ Usage: node ${script_name} INPUT_PATH OUTPUT_PATH async function read(stream) { const chunks = []; - for await (const chunk of stream) chunks.push(chunk); + for await (const chunk of stream) chunks.push(chunk); return Buffer.concat(chunks).toString('utf8'); } -async function main(){ +async function main() { const fs = require('fs'); - let {input_path, output_path} = parse_args(); + let { input_path, output_path } = parse_args(); let out = run(input_path == '-' - ? await read(process.stdin) - : fs.readFileSync(input_path, 'utf-8') - ); + ? await read(process.stdin) + : fs.readFileSync(input_path, 'utf-8') + ); output_path == '-' ? process.stdout.write(out) : fs.writeFileSync(output_path, out); diff --git a/flake.nix b/flake.nix index 168556d28..17b4c6a22 100644 --- a/flake.nix +++ b/flake.nix @@ -190,6 +190,7 @@ pkgs.cargo-release pkgs.cargo-insta pkgs.openssl.dev + pkgs.libz.dev pkgs.pkg-config pkgs.rust-analyzer pkgs.toml2json diff --git a/frontend/exporter/src/traits/utils.rs b/frontend/exporter/src/traits/utils.rs index cc06e573e..202915236 100644 --- a/frontend/exporter/src/traits/utils.rs +++ b/frontend/exporter/src/traits/utils.rs @@ -76,6 +76,9 @@ pub fn required_predicates<'tcx>( .iter() .map(|(clause, _span)| *clause), ), + // The tuple struct/variant constructor functions inherit the generics and predicates from + // their parents. + Variant | Ctor(..) => return required_predicates(tcx, tcx.parent(def_id)), // We consider all predicates on traits to be outputs Trait => None, // `predicates_defined_on` ICEs on other def kinds. diff --git a/frontend/exporter/src/types/def_id.rs b/frontend/exporter/src/types/def_id.rs index a777ceded..d5e59b04a 100644 --- a/frontend/exporter/src/types/def_id.rs +++ b/frontend/exporter/src/types/def_id.rs @@ -18,7 +18,7 @@ use crate::prelude::*; use crate::{AdtInto, JsonSchema}; #[cfg(feature = "rustc")] -use rustc_span::def_id::DefId as RDefId; +use {rustc_hir as hir, rustc_span::def_id::DefId as RDefId}; pub type Symbol = String; @@ -29,6 +29,94 @@ impl<'t, S> SInto for rustc_span::symbol::Symbol { } } +/// Reflects [`hir::Safety`] +#[cfg_attr(not(feature = "extract_names_mode"), derive(AdtInto, JsonSchema))] +#[cfg_attr(not(feature = "extract_names_mode"), args(, from: hir::Safety, state: S as _s))] +#[derive_group(Serializers)] +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum Safety { + Unsafe, + Safe, +} + +pub type Mutability = bool; + +/// Reflects [`hir::def::CtorKind`] +#[derive_group(Serializers)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(not(feature = "extract_names_mode"), derive(JsonSchema, AdtInto))] +#[cfg_attr(not(feature = "extract_names_mode"), args(, from: hir::def::CtorKind, state: S as _s))] +pub enum CtorKind { + Fn, + Const, +} + +/// Reflects [`hir::def::CtorOf`] +#[derive_group(Serializers)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(not(feature = "extract_names_mode"), derive(JsonSchema, AdtInto))] +#[cfg_attr(not(feature = "extract_names_mode"), args(, from: hir::def::CtorOf, state: S as _s))] +pub enum CtorOf { + Struct, + Variant, +} + +/// Reflects [`rustc_span::hygiene::MacroKind`] +#[derive_group(Serializers)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(not(feature = "extract_names_mode"), derive(JsonSchema, AdtInto))] +#[cfg_attr(not(feature = "extract_names_mode"), args(, from: rustc_span::hygiene::MacroKind, state: S as _s))] +pub enum MacroKind { + Bang, + Attr, + Derive, +} + +/// Reflects [`rustc_hir::def::DefKind`] +#[derive_group(Serializers)] +#[cfg_attr(not(feature = "extract_names_mode"), derive(JsonSchema, AdtInto))] +#[cfg_attr(not(feature = "extract_names_mode"),args(, from: rustc_hir::def::DefKind, state: S as tcx))] +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +pub enum DefKind { + Mod, + Struct, + Union, + Enum, + Variant, + Trait, + TyAlias, + ForeignTy, + TraitAlias, + AssocTy, + TyParam, + Fn, + Const, + ConstParam, + Static { + safety: Safety, + mutability: Mutability, + nested: bool, + }, + Ctor(CtorOf, CtorKind), + AssocFn, + AssocConst, + Macro(MacroKind), + ExternCrate, + Use, + ForeignMod, + AnonConst, + InlineConst, + OpaqueTy, + Field, + LifetimeParam, + GlobalAsm, + Impl { + of_trait: bool, + }, + Closure, + SyntheticCoroutineBody, +} + /// Reflects [`rustc_hir::def_id::DefId`] #[derive_group(Serializers)] #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] @@ -54,6 +142,9 @@ pub struct DefIdContents { /// indexes unless you cannot do otherwise. pub index: (u32, u32), pub is_local: bool, + + /// The kind of definition this `DefId` points to. + pub kind: crate::DefKind, } #[cfg(feature = "rustc")] @@ -140,6 +231,7 @@ pub(crate) fn translate_def_id<'tcx, S: BaseState<'tcx>>(s: &S, def_id: RDefId) rustc_hir::def_id::DefIndex::as_u32(def_id.index), ), is_local: def_id.is_local(), + kind: tcx.def_kind(def_id).sinto(s), }; let contents = s.with_global_cache(|cache| id_table::Node::new(contents, &mut cache.id_table_session)); diff --git a/frontend/exporter/src/types/hir.rs b/frontend/exporter/src/types/hir.rs index 307120076..42bbf4b20 100644 --- a/frontend/exporter/src/types/hir.rs +++ b/frontend/exporter/src/types/hir.rs @@ -48,8 +48,6 @@ pub enum Movability { Movable, } -pub type Mutability = bool; - #[cfg(feature = "rustc")] impl SInto for hir::Mutability { fn sinto(&self, _s: &S) -> Mutability { @@ -60,24 +58,6 @@ impl SInto for hir::Mutability { } } -/// Reflects [`hir::def::CtorKind`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(, from: hir::def::CtorKind, state: S as _s)] -pub enum CtorKind { - Fn, - Const, -} - -/// Reflects [`hir::def::CtorOf`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(, from: hir::def::CtorOf, state: S as _s)] -pub enum CtorOf { - Struct, - Variant, -} - /// Reflects [`hir::RangeEnd`] #[derive(AdtInto)] #[args(, from: hir::RangeEnd, state: S as _s)] @@ -88,16 +68,6 @@ pub enum RangeEnd { Excluded, } -/// Reflects [`hir::Safety`] -#[derive(AdtInto)] -#[args(, from: hir::Safety, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum Safety { - Unsafe, - Safe, -} - /// Reflects [`hir::ImplicitSelfKind`] #[derive(AdtInto)] #[args(, from: hir::ImplicitSelfKind, state: S as _s)] @@ -840,10 +810,15 @@ fn region_bounds_at_current_owner<'tcx, S: UnderOwnerState<'tcx>>(s: &S) -> Gene }; let clauses: Vec> = if use_item_bounds { - tcx.item_bounds(s.owner_id()) + tcx.explicit_item_bounds(s.owner_id()) + .map_bound(|clauses| { + clauses + .iter() + .map(|(x, _span)| x) + .copied() + .collect::>() + }) .instantiate_identity() - .iter() - .collect() } else { predicates_defined_on(tcx, s.owner_id()) .predicates @@ -1172,7 +1147,6 @@ pub enum AttrKind { DocComment(CommentKind, Symbol), } -sinto_todo!(rustc_hir::def, DefKind); sinto_todo!(rustc_hir, GenericArgs<'a> as HirGenericArgs); sinto_todo!(rustc_hir, InlineAsm<'a>); sinto_todo!(rustc_hir, MissingLifetimeKind); diff --git a/frontend/exporter/src/types/mir.rs b/frontend/exporter/src/types/mir.rs index db5839bc9..ffc2535c2 100644 --- a/frontend/exporter/src/types/mir.rs +++ b/frontend/exporter/src/types/mir.rs @@ -5,6 +5,8 @@ use crate::prelude::*; use crate::sinto_as_usize; #[cfg(feature = "rustc")] +use rustc_middle::{mir, ty}; +#[cfg(feature = "rustc")] use tracing::trace; #[derive_group(Serializers)] @@ -410,75 +412,91 @@ pub(crate) fn get_function_from_def_id_and_generics<'tcx, S: BaseState<'tcx> + H (def_id.sinto(s), generics, trait_refs, source) } -/// Get a `FunOperand` from an `Operand` used in a function call. -/// Return the [DefId] of the function referenced by an operand, with the -/// parameters substitution. -/// The [Operand] comes from a [TerminatorKind::Call]. -#[cfg(feature = "rustc")] -fn get_function_from_operand<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>( - s: &S, - op: &rustc_middle::mir::Operand<'tcx>, -) -> (FunOperand, Vec, Vec, Option) { - // Match on the func operand: it should be a constant as we don't support - // closures for now. - use rustc_middle::mir::Operand; - use rustc_middle::ty::TyKind; - let ty = op.ty(&s.mir().local_decls, s.base().tcx); - trace!("type: {:?}", ty); - // If the type of the value is one of the singleton types that corresponds to each function, - // that's enough information. - if let TyKind::FnDef(def_id, generics) = ty.kind() { - let (fun_id, generics, trait_refs, trait_info) = - get_function_from_def_id_and_generics(s, *def_id, *generics); - return (FunOperand::Id(fun_id), generics, trait_refs, trait_info); - } - match op { - Operand::Constant(_) => { - unimplemented!("{:?}", op); - } - Operand::Move(place) => { - // Function pointer. A fn pointer cannot have bound variables or trait references, so - // we don't need to extract generics, trait refs, etc. - let place = place.sinto(s); - (FunOperand::Move(place), Vec::new(), Vec::new(), None) - } - Operand::Copy(_place) => { - unimplemented!("{:?}", op); - } - } -} - #[cfg(feature = "rustc")] fn translate_terminator_kind_call<'tcx, S: BaseState<'tcx> + HasMir<'tcx> + HasOwnerId>( s: &S, terminator: &rustc_middle::mir::TerminatorKind<'tcx>, ) -> TerminatorKind { - if let rustc_middle::mir::TerminatorKind::Call { + let tcx = s.base().tcx; + let mir::TerminatorKind::Call { func, args, destination, target, unwind, - call_source, fn_span, + .. } = terminator - { - let (fun, generics, trait_refs, trait_info) = get_function_from_operand(s, func); + else { + unreachable!() + }; - TerminatorKind::Call { - fun, + let ty = func.ty(&s.mir().local_decls, tcx); + let hax_ty: crate::Ty = ty.sinto(s); + let sig = match hax_ty.kind() { + TyKind::Arrow(sig) => sig, + TyKind::Closure(_, args) => &args.untupled_sig, + _ => supposely_unreachable_fatal!( + s, + "TerminatorKind_Call_expected_fn_type"; + { ty } + ), + }; + let fun_op = if let ty::TyKind::FnDef(def_id, generics) = ty.kind() { + // The type of the value is one of the singleton types that corresponds to each function, + // which is enough information. + let (def_id, generics, trait_refs, trait_info) = + get_function_from_def_id_and_generics(s, *def_id, *generics); + FunOperand::Static { + def_id, generics, - args: args.sinto(s), - destination: destination.sinto(s), - target: target.sinto(s), - unwind: unwind.sinto(s), - call_source: call_source.sinto(s), - fn_span: fn_span.sinto(s), trait_refs, trait_info, } } else { - unreachable!() + use mir::Operand; + match func { + Operand::Constant(_) => { + unimplemented!("{:?}", func); + } + Operand::Move(place) => { + // Function pointer or closure. + let place = place.sinto(s); + FunOperand::DynamicMove(place) + } + Operand::Copy(_place) => { + unimplemented!("{:?}", func); + } + } + }; + + let late_bound_generics = sig + .bound_vars + .iter() + .map(|var| match var { + BoundVariableKind::Region(r) => r, + BoundVariableKind::Ty(..) | BoundVariableKind::Const => { + supposely_unreachable_fatal!( + s, + "non_lifetime_late_bound"; + { var } + ) + } + }) + .map(|_| { + GenericArg::Lifetime(Region { + kind: RegionKind::ReErased, + }) + }) + .collect(); + TerminatorKind::Call { + fun: fun_op, + late_bound_generics, + args: args.sinto(s), + destination: destination.sinto(s), + target: target.sinto(s), + unwind: unwind.sinto(s), + fn_span: fn_span.sinto(s), } } @@ -562,13 +580,25 @@ pub enum SwitchTargets { SwitchInt(IntUintTy, Vec<(ScalarInt, BasicBlock)>, BasicBlock), } +/// A value of type `fn<...> A -> B` that can be called. #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] pub enum FunOperand { - /// Call to a top-level function designated by its id - Id(DefId), - /// Use of a closure - Move(Place), + /// Call to a statically-known function. + Static { + def_id: DefId, + /// If `Some`, this is a method call on the given trait reference. Otherwise this is a call + /// to a known function. + trait_info: Option, + /// If this is a trait method call, this only includes the method generics; the trait + /// generics are included in the `ImplExpr` in `trait_info`. + generics: Vec, + /// Trait predicates required by the function generics. Like for `generics`, this only + /// includes the predicates required by the method, if applicable. + trait_refs: Vec, + }, + /// Use of a closure or a function pointer value. Counts as a move from the given place. + DynamicMove(Place), } #[derive_group(Serializers)] @@ -607,18 +637,16 @@ pub enum TerminatorKind { )] Call { fun: FunOperand, - /// We truncate the substitution so as to only include the arguments - /// relevant to the method (and not the trait) if it is a trait method - /// call. See [ParamsInfo] for the full details. - generics: Vec, + /// A `FunOperand` is a value of type `fn<...> A -> B`. The generics in `<...>` are called + /// "late-bound" and are instantiated anew at each call site. This list provides the + /// generics used at this call-site. They are all lifetimes and at the time of writing are + /// all erased lifetimes. + late_bound_generics: Vec, args: Vec>, destination: Place, target: Option, unwind: UnwindAction, - call_source: CallSource, fn_span: Span, - trait_refs: Vec, - trait_info: Option, }, TailCall { func: Operand, @@ -934,26 +962,12 @@ pub enum AggregateKind { Option, Option, ), - #[custom_arm(rustc_middle::mir::AggregateKind::Closure(rust_id, generics) => { - let def_id : DefId = rust_id.sinto(s); - // The generics is meant to be converted to a function signature. Note - // that Rustc does its job: the PolyFnSig binds the captured local - // type, regions, etc. variables, which means we can treat the local - // closure like any top-level function. + #[custom_arm(rustc_middle::mir::AggregateKind::Closure(def_id, generics) => { let closure = generics.as_closure(); - let sig = closure.sig().sinto(s); - - // Solve the predicates from the parent (i.e., the item which defines the closure). - let tcx = s.base().tcx; - let parent_generics = closure.parent_args(); - let parent_generics_ref = tcx.mk_args(parent_generics); - // TODO: does this handle nested closures? - let parent = tcx.generics_of(rust_id).parent.unwrap(); - let trait_refs = solve_item_required_traits(s, parent, parent_generics_ref); - - AggregateKind::Closure(def_id, parent_generics.sinto(s), trait_refs, sig) + let args = ClosureArgs::sfrom(s, *def_id, closure); + AggregateKind::Closure(def_id.sinto(s), args) })] - Closure(DefId, Vec, Vec, PolyFnSig), + Closure(DefId, ClosureArgs), Coroutine(DefId, Vec), CoroutineClosure(DefId, Vec), RawPtr(Ty, Mutability), @@ -1208,7 +1222,6 @@ sinto_todo!(rustc_middle::mir, UserTypeProjection); sinto_todo!(rustc_middle::mir, MirSource<'tcx>); sinto_todo!(rustc_middle::mir, CoroutineInfo<'tcx>); sinto_todo!(rustc_middle::mir, VarDebugInfo<'tcx>); -sinto_todo!(rustc_middle::mir, CallSource); sinto_todo!(rustc_middle::mir, UnwindTerminateReason); sinto_todo!(rustc_middle::mir::coverage, CoverageKind); sinto_todo!(rustc_middle::mir::interpret, ConstAllocation<'a>); diff --git a/frontend/exporter/src/types/new/full_def.rs b/frontend/exporter/src/types/new/full_def.rs index cacd983e4..4473fa76c 100644 --- a/frontend/exporter/src/types/new/full_def.rs +++ b/frontend/exporter/src/types/new/full_def.rs @@ -252,7 +252,7 @@ pub enum FullDefKind { inline: InlineAttr, #[value(s.base().tcx.constness(s.owner_id()) == rustc_hir::Constness::Const)] is_const: bool, - #[value(s.base().tcx.fn_sig(s.owner_id()).instantiate_identity().sinto(s))] + #[value(get_method_sig(s).sinto(s))] sig: PolyFnSig, #[value(s.owner_id().as_local().map(|ldid| Body::body(ldid, s)))] body: Option, @@ -271,10 +271,8 @@ pub enum FullDefKind { is_const: bool, #[value({ let fun_type = s.base().tcx.type_of(s.owner_id()).instantiate_identity(); - match fun_type.kind() { - ty::TyKind::Closure(_, args) => args.as_closure().sinto(s), - _ => unreachable!(), - } + let ty::TyKind::Closure(_, args) = fun_type.kind() else { unreachable!() }; + ClosureArgs::sfrom(s, s.owner_id(), args.as_closure()) })] args: ClosureArgs, }, @@ -769,6 +767,70 @@ where } } +/// The signature of a method impl may be a subtype of the one expected from the trait decl, as in +/// the example below. For correctness, we must be able to map from the method generics declared in +/// the trait to the actual method generics. Because this would require type inference, we instead +/// simply return the declared signature. This will cause issues if it is possible to use such a +/// more-specific implementation with its more-specific type, but we have a few other issues with +/// lifetime-generic function pointers anyway so this is unlikely to cause problems. +/// +/// ```ignore +/// trait MyCompare: Sized { +/// fn compare(self, other: Other) -> bool; +/// } +/// impl<'a> MyCompare<&'a ()> for &'a () { +/// // This implementation is more general because it works for non-`'a` refs. Note that only +/// // late-bound vars may differ in this way. +/// // `<&'a () as MyCompare<&'a ()>>::compare` has type `fn<'b>(&'a (), &'b ()) -> bool`, +/// // but type `fn(&'a (), &'a ()) -> bool` was expected from the trait declaration. +/// fn compare<'b>(self, _other: &'b ()) -> bool { +/// true +/// } +/// } +/// ``` +#[cfg(feature = "rustc")] +fn get_method_sig<'tcx, S>(s: &S) -> ty::PolyFnSig<'tcx> +where + S: UnderOwnerState<'tcx>, +{ + let tcx = s.base().tcx; + let def_id = s.owner_id(); + let real_sig = tcx.fn_sig(def_id).instantiate_identity(); + let item = tcx.associated_item(def_id); + if !matches!(item.container, ty::AssocItemContainer::ImplContainer) { + return real_sig; + } + let Some(decl_method_id) = item.trait_item_def_id else { + return real_sig; + }; + let declared_sig = tcx.fn_sig(decl_method_id); + + // TODO(Nadrieril): Temporary hack: if the signatures have the same number of bound vars, we + // keep the real signature. While the declared signature is more correct, it is also less + // normalized and we can't normalize without erasing regions but regions are crucial in + // function signatures. Hence we cheat here, until charon gains proper normalization + // capabilities. + if declared_sig.skip_binder().bound_vars().len() == real_sig.bound_vars().len() { + return real_sig; + } + + let impl_def_id = item.container_id(tcx); + // The trait predicate that is implemented by the surrounding impl block. + let implemented_trait_ref = tcx + .impl_trait_ref(impl_def_id) + .unwrap() + .instantiate_identity(); + // Construct arguments for the declared method generics in the context of the implemented + // method generics. + let impl_args = ty::GenericArgs::identity_for_item(tcx, def_id); + let decl_args = impl_args.rebase_onto(tcx, impl_def_id, implemented_trait_ref.args); + let sig = declared_sig.instantiate(tcx, decl_args); + // Avoids accidentally using the same lifetime name twice in the same scope + // (once in impl parameters, second in the method declaration late-bound vars). + let sig = tcx.anonymize_bound_vars(sig); + sig +} + #[cfg(feature = "rustc")] fn get_ctor_contents<'tcx, S, Body>(s: &S, ctor_of: CtorOf) -> FullDefKind where diff --git a/frontend/exporter/src/types/span.rs b/frontend/exporter/src/types/span.rs index a179ca54e..6304b32a9 100644 --- a/frontend/exporter/src/types/span.rs +++ b/frontend/exporter/src/types/span.rs @@ -36,16 +36,6 @@ pub enum AstPass { ProcMacroHarness, } -/// Reflects [`rustc_span::hygiene::MacroKind`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(, from: rustc_span::hygiene::MacroKind, state: S as _s)] -pub enum MacroKind { - Bang, - Attr, - Derive, -} - /// Reflects [`rustc_span::hygiene::ExpnKind`] #[derive(AdtInto)] #[args(<'tcx, S: BaseState<'tcx>>, from: rustc_span::hygiene::ExpnKind, state: S as gstate)] diff --git a/frontend/exporter/src/types/ty.rs b/frontend/exporter/src/types/ty.rs index c0b12fd4d..7c18a4521 100644 --- a/frontend/exporter/src/types/ty.rs +++ b/frontend/exporter/src/types/ty.rs @@ -802,15 +802,21 @@ pub enum TyKind { let sig = tcx.fn_sig(*def).instantiate(tcx, generics); TyKind::Arrow(Box::new(sig.sinto(s))) }, - ty::TyKind::Closure (_def_id, generics) => { - let sig = generics.as_closure().sig(); - let sig = s.base().tcx.signature_unclosure(sig, rustc_hir::Safety::Safe); - TyKind::Arrow(Box::new(sig.sinto(s))) - }, )] - /// Reflects [`ty::TyKind::FnPtr`], [`ty::TyKind::FnDef`] and [`ty::TyKind::Closure`] + /// Reflects [`ty::TyKind::FnPtr`] and [`ty::TyKind::FnDef`] Arrow(Box), + #[custom_arm( + ty::TyKind::Closure (def_id, generics) => { + let closure = generics.as_closure(); + TyKind::Closure( + def_id.sinto(s), + ClosureArgs::sfrom(s, *def_id, closure), + ) + }, + )] + Closure(DefId, ClosureArgs), + #[custom_arm( ty::TyKind::Adt(adt_def, generics) => { let def_id = adt_def.did().sinto(s); @@ -1272,21 +1278,57 @@ pub enum AliasRelationDirection { } /// Reflects [`ty::ClosureArgs`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: ty::ClosureArgs>, state: S as s)] -#[derive(Clone, Debug, JsonSchema)] +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema)] #[derive_group(Serializers)] pub struct ClosureArgs { - #[value(self.kind().sinto(s))] + /// The base kind of this closure. The kinds are ordered by inclusion: any `Fn` works as an + /// `FnMut`, and any `FnMut` works as an `FnOnce`. pub kind: ClosureKind, - #[value(self.parent_args().sinto(s))] + /// The arguments to the parent (i.e., the item which defines the closure). pub parent_args: Vec, - #[value(self.sig().sinto(s))] - pub sig: PolyFnSig, - #[value(self.upvar_tys().sinto(s))] + /// The solved predicates from the parent (i.e., the item which defines the closure). + pub parent_trait_refs: Vec, + /// The proper `fn(A, B, C) -> D` signature of the closure. + pub untupled_sig: PolyFnSig, + /// The signature of the closure as one input and one output, where the input arguments are + /// tupled. This is relevant to implementing the `Fn*` traits. + pub tupled_sig: PolyFnSig, + /// The set of captured variables. Together they form the state of the closure. pub upvar_tys: Vec, } +#[cfg(feature = "rustc")] +impl ClosureArgs { + // Manual implementation because we need the `def_id` of the closure. + pub(crate) fn sfrom<'tcx, S>( + s: &S, + def_id: RDefId, + from: ty::ClosureArgs>, + ) -> Self + where + S: UnderOwnerState<'tcx>, + { + let tcx = s.base().tcx; + let sig = from.sig(); + ClosureArgs { + kind: from.kind().sinto(s), + parent_args: from.parent_args().sinto(s), + // The solved predicates from the parent (i.e., the item which defines the closure). + parent_trait_refs: { + // TODO: handle nested closures + let parent = tcx.generics_of(def_id).parent.unwrap(); + let parent_generics_ref = tcx.mk_args(from.parent_args()); + solve_item_required_traits(s, parent, parent_generics_ref) + }, + tupled_sig: sig.sinto(s), + untupled_sig: tcx + .signature_unclosure(sig, rustc_hir::Safety::Safe) + .sinto(s), + upvar_tys: from.upvar_tys().sinto(s), + } + } +} + /// Reflects [`ty::ClosureKind`] #[derive(AdtInto)] #[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::ClosureKind, state: S as _tcx)] diff --git a/hax-lib/macros/src/dummy.rs b/hax-lib/macros/src/dummy.rs index 5134763c6..429e04dea 100644 --- a/hax-lib/macros/src/dummy.rs +++ b/hax-lib/macros/src/dummy.rs @@ -29,7 +29,9 @@ identity_proc_macro_attribute!( process_init, process_write, process_read, + opaque, opaque_type, + transparent, refinement_type, fstar_replace, coq_replace, diff --git a/hax-lib/macros/src/implementation.rs b/hax-lib/macros/src/implementation.rs index f71f1891c..2189232c7 100644 --- a/hax-lib/macros/src/implementation.rs +++ b/hax-lib/macros/src/implementation.rs @@ -249,8 +249,8 @@ pub fn lemma(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { ); } } - use AttrPayload::NeverDropBody; - quote! { #attr #NeverDropBody #item }.into() + use AttrPayload::NeverErased; + quote! { #attr #NeverErased #item }.into() } /* @@ -617,13 +617,32 @@ pub fn attributes(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStr quote! { #item #(#extra_items)* }.into() } -/// Mark a struct or an enum opaque: the extraction will assume the +/// Mark an item opaque: the extraction will assume the /// type without revealing its definition. #[proc_macro_error] #[proc_macro_attribute] -pub fn opaque_type(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { +#[deprecated(note = "Please use 'opaque' instead")] +pub fn opaque_type(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { + opaque(attr, item) +} + +/// Mark an item opaque: the extraction will assume the +/// type without revealing its definition. +#[proc_macro_error] +#[proc_macro_attribute] +pub fn opaque(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { + let item: Item = parse_macro_input!(item); + let attr = AttrPayload::Erased; + quote! {#attr #item}.into() +} + +/// Mark an item transparent: the extraction will not +/// make it opaque regardless of the `-i` flag default. +#[proc_macro_error] +#[proc_macro_attribute] +pub fn transparent(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { let item: Item = parse_macro_input!(item); - let attr = AttrPayload::OpaqueType; + let attr = AttrPayload::NeverErased; quote! {#attr #item}.into() } diff --git a/hax-lib/macros/src/quote.rs b/hax-lib/macros/src/quote.rs index d02bdea78..8d13591c4 100644 --- a/hax-lib/macros/src/quote.rs +++ b/hax-lib/macros/src/quote.rs @@ -141,14 +141,14 @@ pub(super) fn item( }; let kind_attr = AttrPayload::ItemQuote(kind); let status_attr = AttrPayload::ItemStatus(ItemStatus::Included { late_skip: true }); - use AttrPayload::NeverDropBody; + use AttrPayload::NeverErased; quote! { #assoc_attr #item #attribute_to_inject #status_attr const _: () = { - #NeverDropBody + #NeverErased #uid_attr #kind_attr fn quote_contents() { diff --git a/hax-lib/macros/src/utils.rs b/hax-lib/macros/src/utils.rs index c1c974a39..35fe0fa76 100644 --- a/hax-lib/macros/src/utils.rs +++ b/hax-lib/macros/src/utils.rs @@ -301,7 +301,7 @@ pub fn make_fn_decoration( } else { quote! {} }; - use AttrPayload::NeverDropBody; + use AttrPayload::NeverErased; quote! { #[cfg(#DebugOrHaxCfgExpr)] #late_skip @@ -312,7 +312,7 @@ pub fn make_fn_decoration( #uid_attr #late_skip #[allow(unused)] - #NeverDropBody + #NeverErased #decoration_sig { #phi } diff --git a/hax-lib/macros/types/src/lib.rs b/hax-lib/macros/types/src/lib.rs index bbb1d4575..b342a1355 100644 --- a/hax-lib/macros/types/src/lib.rs +++ b/hax-lib/macros/types/src/lib.rs @@ -118,7 +118,7 @@ pub enum AttrPayload { /// Mark an item so that hax never drop its body (this is useful /// for pre- and post- conditions of a function we dropped the /// body of: pre and post are part of type signature) - NeverDropBody, + NeverErased, NewtypeAsRefinement, /// Mark an item as a lemma statement to prove in the backend Lemma, @@ -130,8 +130,8 @@ pub enum AttrPayload { PVConstructor, PVHandwritten, TraitMethodNoPrePost, - /// Make a type opaque - OpaqueType, + /// Make an item opaque + Erased, } pub const HAX_TOOL: &str = "_hax"; diff --git a/hax-lib/src/proc_macros.rs b/hax-lib/src/proc_macros.rs index 866c7cfda..65136716f 100644 --- a/hax-lib/src/proc_macros.rs +++ b/hax-lib/src/proc_macros.rs @@ -2,8 +2,8 @@ //! proc-macro crate cannot export anything but procedural macros. pub use hax_lib_macros::{ - attributes, ensures, exclude, impl_fn_decoration, include, lemma, loop_invariant, opaque_type, - refinement_type, requires, trait_fn_decoration, + attributes, ensures, exclude, impl_fn_decoration, include, lemma, loop_invariant, opaque, + opaque_type, refinement_type, requires, trait_fn_decoration, transparent, }; pub use hax_lib_macros::{ diff --git a/hax-types/Cargo.toml b/hax-types/Cargo.toml index caeba72f6..75bdea854 100644 --- a/hax-types/Cargo.toml +++ b/hax-types/Cargo.toml @@ -22,7 +22,7 @@ serde_json.workspace = true annotate-snippets.workspace = true hax-adt-into.workspace = true tracing.workspace = true -serde-brief ={ version = "*", features = ["std", "alloc"]} +serde-brief ={ version = "0.1", features = ["std", "alloc"]} zstd = "0.13.1" miette = "7.2.0" diff --git a/hax-types/src/cli_options/mod.rs b/hax-types/src/cli_options/mod.rs index f84795be7..d8b1a6551 100644 --- a/hax-types/src/cli_options/mod.rs +++ b/hax-types/src/cli_options/mod.rs @@ -303,7 +303,8 @@ pub struct TranslationOptions { /// {n} - `+:`: only includes the type of the selected items (no /// dependencies). This includes full struct and enums, but only - /// the type signature of functions, dropping their bodies. + /// the type signature of functions and trait impls (except when + /// they contain associated types), dropping their bodies. #[arg( value_parser = parse_inclusion_clause, value_delimiter = ' ', diff --git a/hax-types/src/diagnostics/mod.rs b/hax-types/src/diagnostics/mod.rs index 91670f03e..73a7ecc0e 100644 --- a/hax-types/src/diagnostics/mod.rs +++ b/hax-types/src/diagnostics/mod.rs @@ -68,6 +68,8 @@ impl std::fmt::Display for Diagnostics { Kind::NonTrivialAndMutFnInput => write!(f, "The support in hax of function with one or more inputs of type `&mut _` is limited. Onlu trivial patterns are allowed there: `fn f(x: &mut (T, U)) ...` is allowed while `f((x, y): &mut (T, U))` is rejected."), + Kind::FStarParseError { fstar_snippet, details: _ } => write!(f, "The following code snippet could not be parsed as valid F*:\n```\n{fstar_snippet}\n```"), + _ => write!(f, "{:?}", self.kind), } } @@ -135,7 +137,13 @@ pub enum Kind { /// An hax attribute (from `hax-lib-macros`) was rejected AttributeRejected { reason: String, - }, + } = 12, + + /// A snippet of F* code could not be parsed + FStarParseError { + fstar_snippet: String, + details: String, + } = 13, } impl Kind { diff --git a/hax-types/src/driver_api.rs b/hax-types/src/driver_api.rs index 51eb86bbc..7f065e0a1 100644 --- a/hax-types/src/driver_api.rs +++ b/hax-types/src/driver_api.rs @@ -28,6 +28,7 @@ pub struct HaxMeta { )>, pub def_ids: Vec, pub comments: Vec<(hax_frontend_exporter::Span, String)>, + pub hax_version: String, } use hax_frontend_exporter::id_table; @@ -49,7 +50,15 @@ where pub fn read(reader: impl std::io::Read) -> (Self, id_table::Table) { let reader = zstd::stream::read::Decoder::new(reader).unwrap(); let reader = std::io::BufReader::new(reader); - id_table::WithTable::destruct(serde_brief::from_reader(reader).unwrap()) + let haxmeta = id_table::WithTable::>::destruct( + serde_brief::from_reader(reader).unwrap(), + ); + if haxmeta.0.hax_version != crate::HAX_VERSION { + let version = haxmeta.0.hax_version; + let expected = crate::HAX_VERSION; + panic!("An invariant was broken: `*.haxmeta` was produced by hax version `{version}` while the current version of hax is `{expected}`. Please report this to https://github.com/hacspec/hax/issues."); + }; + haxmeta } } diff --git a/hax-types/src/engine_api.rs b/hax-types/src/engine_api.rs index 74fb3a52c..d68f653f3 100644 --- a/hax-types/src/engine_api.rs +++ b/hax-types/src/engine_api.rs @@ -6,6 +6,7 @@ type ThirBody = hax_frontend_exporter::ThirBody; #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub struct EngineOptions { + pub hax_version: String, pub backend: BackendOptions<()>, pub input: Vec>, pub impl_infos: Vec<( diff --git a/proof-libs/fstar/core/Alloc.Collections.Btree.Set.fsti b/proof-libs/fstar/core/Alloc.Collections.Btree.Set.fsti new file mode 100644 index 000000000..990c55d41 --- /dev/null +++ b/proof-libs/fstar/core/Alloc.Collections.Btree.Set.fsti @@ -0,0 +1,3 @@ +module Alloc.Collections.Btree.Set + +val t_BTreeSet (t:Type0) (u:unit): eqtype diff --git a/proof-libs/fstar/core/Alloc.Slice.fst b/proof-libs/fstar/core/Alloc.Slice.fsti similarity index 70% rename from proof-libs/fstar/core/Alloc.Slice.fst rename to proof-libs/fstar/core/Alloc.Slice.fsti index 2927bcc63..82a01332d 100644 --- a/proof-libs/fstar/core/Alloc.Slice.fst +++ b/proof-libs/fstar/core/Alloc.Slice.fsti @@ -3,3 +3,5 @@ open Rust_primitives.Arrays open Alloc.Vec let impl__to_vec #a (s: t_Slice a): t_Vec a Alloc.Alloc.t_Global = s + +val impl__concat #t1 #t2 (s: t_Slice t1): t_Slice t2 diff --git a/proof-libs/fstar/core/Alloc.Vec.fst b/proof-libs/fstar/core/Alloc.Vec.fst index 6e3b411e1..846e05694 100644 --- a/proof-libs/fstar/core/Alloc.Vec.fst +++ b/proof-libs/fstar/core/Alloc.Vec.fst @@ -1,7 +1,7 @@ module Alloc.Vec open Rust_primitives -unfold type t_Vec t (_: unit) = s:t_Slice t +unfold type t_Vec t (_: unit) = t_Slice t let impl__new #t (): t_Vec t () = FStar.Seq.empty @@ -26,6 +26,8 @@ let impl_1__len #t (#[(Tactics.exact (`()))]alloc:unit) (v: t_Vec t alloc) = assert (n <= maxint usize_inttype); mk_int #usize_inttype (Seq.length v) +let impl_1__as_slice #t (#[(Tactics.exact (`()))]alloc:unit) (v: t_Vec t alloc) : t_Slice t = v + let from_elem #a (item: a) (len: usize) = Seq.create (v len) item open Rust_primitives.Hax diff --git a/proof-libs/fstar/core/Core.Array.fst b/proof-libs/fstar/core/Core.Array.fsti similarity index 64% rename from proof-libs/fstar/core/Core.Array.fst rename to proof-libs/fstar/core/Core.Array.fsti index 44f48c19a..e02274be0 100644 --- a/proof-libs/fstar/core/Core.Array.fst +++ b/proof-libs/fstar/core/Core.Array.fsti @@ -8,5 +8,6 @@ let impl_23__map #a n #b (arr: t_Array a n) (f: a -> b): t_Array b n let impl_23__as_slice #a len (arr: t_Array a len): t_Slice a = arr -let from_fn #a len (f: usize -> a): t_Array a len = admit() +val from_fn #a len (f: usize -> a): Pure (t_Array a len) (requires True) (ensures (fun a -> forall i. Seq.index a i == f (sz i))) + diff --git a/proof-libs/fstar/core/Core.Fmt.fsti b/proof-libs/fstar/core/Core.Fmt.fsti index 50b2d5be2..b865ea0d4 100644 --- a/proof-libs/fstar/core/Core.Fmt.fsti +++ b/proof-libs/fstar/core/Core.Fmt.fsti @@ -23,3 +23,4 @@ val impl_2__new_v1 (sz1: usize) (sz2: usize) (pieces: t_Slice string) (args: t_S val impl_7__write_fmt (fmt: t_Formatter) (args: t_Arguments): t_Formatter & t_Result val impl_2__new_const (args: t_Slice string): t_Arguments + diff --git a/proof-libs/fstar/core/Core.Marker.fst b/proof-libs/fstar/core/Core.Marker.fst index f624d969e..8a3ac3d9d 100644 --- a/proof-libs/fstar/core/Core.Marker.fst +++ b/proof-libs/fstar/core/Core.Marker.fst @@ -1,6 +1,8 @@ + module Core.Marker -type t_PhantomData (t: Type) = t +type t_PhantomData (t:Type0) = + | PhantomData: t_PhantomData t class t_Send (h: Type) = { dummy_send_field: unit diff --git a/proof-libs/fstar/core/Core.Num.fsti b/proof-libs/fstar/core/Core.Num.fsti index 7c994c747..1b50fa95a 100644 --- a/proof-libs/fstar/core/Core.Num.fsti +++ b/proof-libs/fstar/core/Core.Num.fsti @@ -1,6 +1,8 @@ module Core.Num open Rust_primitives +let impl__u16__MAX: u16 = mk_u16 (maxint u16_inttype) + let impl__u8__wrapping_add: u8 -> u8 -> u8 = add_mod let impl__u8__wrapping_sub: u8 -> u8 -> u8 = sub_mod let impl__u16__wrapping_add: u16 -> u16 -> u16 = add_mod diff --git a/proof-libs/fstar/core/Core.Ops.Arith.fsti b/proof-libs/fstar/core/Core.Ops.Arith.fsti index a3d75535f..db0911455 100644 --- a/proof-libs/fstar/core/Core.Ops.Arith.fsti +++ b/proof-libs/fstar/core/Core.Ops.Arith.fsti @@ -46,3 +46,16 @@ class t_SubAssign self rhs = { f_sub_assign: x:self -> y:rhs -> Pure self (f_sub_assign_pre x y) (fun r -> f_sub_assign_post x y r); } +class t_MulAssign self rhs = { + f_mul_assign_pre: self -> rhs -> bool; + f_mul_assign_post: self -> rhs -> self -> bool; + f_mul_assign: x:self -> y:rhs -> Pure self (f_mul_assign_pre x y) (fun r -> f_mul_assign_post x y r); +} + +class t_DivAssign self rhs = { + f_div_assign_pre: self -> rhs -> bool; + f_div_assign_post: self -> rhs -> self -> bool; + f_div_assign: x:self -> y:rhs -> Pure self (f_div_assign_pre x y) (fun r -> f_div_assign_post x y r); +} + + diff --git a/proof-libs/fstar/core/Core.Option.fst b/proof-libs/fstar/core/Core.Option.fst index 9f514751d..6025de2c6 100644 --- a/proof-libs/fstar/core/Core.Option.fst +++ b/proof-libs/fstar/core/Core.Option.fst @@ -18,6 +18,8 @@ let impl__is_some #t_Self (self: t_Option t_Self): bool = Option_Some? self let impl__is_none #t_Self (self: t_Option t_Self): bool = Option_None? self +let impl__take #t (x: t_Option t) : (t_Option t & t_Option t) = (x, Option_None) + let impl__as_ref #t_Self (self: t_Option t_Self): t_Option t_Self = self let impl__unwrap_or_default diff --git a/proof-libs/fstar/core/Core.Result.fst b/proof-libs/fstar/core/Core.Result.fst index ab3cd15d9..00c033a7f 100644 --- a/proof-libs/fstar/core/Core.Result.fst +++ b/proof-libs/fstar/core/Core.Result.fst @@ -12,6 +12,8 @@ let impl__map_err #e1 #e2 (x: t_Result 't e1) (f: e1 -> e2): t_Result 't e2 let impl__is_ok #t #e (self: t_Result t e): bool = Result_Ok? self +let impl__expect #t #e (x: t_Result t e {Result_Ok? x}) (y: string): t = Result_Ok?.v x + let impl__map #t #e #u (self: t_Result t e) (f: t -> u): t_Result u e = match self with | Result_Ok v -> Result_Ok (f v) @@ -21,3 +23,4 @@ let impl__and_then #t #e #u (self: t_Result t e) (op: t -> t_Result u e): t_Resu match self with | Result_Ok v -> op v | Result_Err e -> Result_Err e + diff --git a/proof-libs/fstar/core/Rand_core.Os.fsti b/proof-libs/fstar/core/Rand_core.Os.fsti new file mode 100644 index 000000000..42fa75dc1 --- /dev/null +++ b/proof-libs/fstar/core/Rand_core.Os.fsti @@ -0,0 +1,9 @@ +module Rand_core.Os + +type t_OsRng + +[@FStar.Tactics.Typeclasses.tcinstance] +val impl_rng_core: Rand_core.t_RngCore t_OsRng + +[@FStar.Tactics.Typeclasses.tcinstance] +val impl_crypto_rng_core: Rand_core.t_CryptoRngCore t_OsRng diff --git a/proof-libs/fstar/core/Rand_core.fsti b/proof-libs/fstar/core/Rand_core.fsti index 60f53bb75..2dfc1cc25 100644 --- a/proof-libs/fstar/core/Rand_core.fsti +++ b/proof-libs/fstar/core/Rand_core.fsti @@ -10,3 +10,9 @@ class t_RngCore (t_Self: Type0) = { class t_CryptoRng (t_Self: Type0) = { marker_trait: unit } + +class t_CryptoRngCore (t_Self: Type0) = { + f_rngcore: t_Self -> t_Self +} + + diff --git a/proof-libs/fstar/core/Std.Collections.Hash.Map.fsti b/proof-libs/fstar/core/Std.Collections.Hash.Map.fsti index 82dd99551..8d88407cb 100644 --- a/proof-libs/fstar/core/Std.Collections.Hash.Map.fsti +++ b/proof-libs/fstar/core/Std.Collections.Hash.Map.fsti @@ -1,8 +1,5 @@ module Std.Collections.Hash.Map -open Core -open FStar.Mul +type t_HashMap (k v s:Type0) -type t_HashMap (v_K: Type0) (v_V: Type0) (v_S: Type0) = { - f__hax_placeholder:unit -} +val impl__new #k #v: unit -> t_HashMap k v Std.Hash.Random.t_RandomState diff --git a/proof-libs/fstar/core/Std.Hash.Random.fsti b/proof-libs/fstar/core/Std.Hash.Random.fsti index f2148d3fd..d6de0c9eb 100644 --- a/proof-libs/fstar/core/Std.Hash.Random.fsti +++ b/proof-libs/fstar/core/Std.Hash.Random.fsti @@ -1,5 +1,3 @@ module Std.Hash.Random -type t_RandomState = { - dummy_random_state_field: unit -} +type t_RandomState diff --git a/proof-libs/fstar/rust_primitives/Rust_primitives.Arrays.fst b/proof-libs/fstar/rust_primitives/Rust_primitives.Arrays.fst deleted file mode 100644 index df8e01342..000000000 --- a/proof-libs/fstar/rust_primitives/Rust_primitives.Arrays.fst +++ /dev/null @@ -1,21 +0,0 @@ -module Rust_primitives.Arrays - -open Rust_primitives.Integers - -let of_list (#t:Type) (l: list t {FStar.List.Tot.length l < maxint Lib.IntTypes.U16}): t_Slice t = Seq.seq_of_list l -let to_list (#t:Type) (s: t_Slice t): list t = Seq.seq_to_list s - -let to_of_list_lemma t l = Seq.lemma_list_seq_bij l -let of_to_list_lemma t l = Seq.lemma_seq_list_bij l - -let map_array #a #b #n (arr: t_Array a n) (f: a -> b): t_Array b n - = FStar.Seq.map_seq_len f arr; - FStar.Seq.map_seq f arr - -let createi #t l f = admit() // see issue #423 - -let lemma_index_concat x y i = admit() // see issue #423 - -let lemma_index_slice x y i = admit() // see issue #423 - -let eq_intro a b = admit() // see issue #423 diff --git a/proof-libs/fstar/rust_primitives/Rust_primitives.BitVectors.fst b/proof-libs/fstar/rust_primitives/Rust_primitives.BitVectors.fst deleted file mode 100644 index 5bb914e52..000000000 --- a/proof-libs/fstar/rust_primitives/Rust_primitives.BitVectors.fst +++ /dev/null @@ -1,66 +0,0 @@ -module Rust_primitives.BitVectors - -open FStar.Mul -open Rust_primitives.Arrays -open Rust_primitives.Integers - -#set-options "--fuel 0 --ifuel 1 --z3rlimit 40" - -let lemma_get_bit_bounded #t x d i = admit() // see issue #423 - -let lemma_get_bit_bounded' #t x d = admit() // see issue #423 - -let pow2_minus_one_mod_lemma1 (n: nat) (m: nat {m < n}) - : Lemma (((pow2 n - 1) / pow2 m) % 2 == 1) - = let d: pos = n - m in - Math.Lemmas.pow2_plus m d; - Math.Lemmas.lemma_div_plus (-1) (pow2 d) (pow2 m); - if d > 0 then Math.Lemmas.pow2_double_mult (d-1) - -let pow2_minus_one_mod_lemma2 (n: nat) (m: nat {n <= m}) - : Lemma (((pow2 n - 1) / pow2 m) % 2 == 0) - = Math.Lemmas.pow2_le_compat m n; - Math.Lemmas.small_div (pow2 n - 1) (pow2 m) - -let bit_vec_to_int_t #t (d: num_bits t) (bv: bit_vec d) = admit () - -let bit_vec_to_int_t_lemma #t (d: num_bits t) (bv: bit_vec d) i = admit () - -let bit_vec_to_int_t_array d bv = admit () // see issue #423 -let bit_vec_to_nat_array d bv = admit () // see issue #423 - -let get_bit_pow2_minus_one #t n nth - = reveal_opaque (`%get_bit) (get_bit (mk_int #t (pow2 n - 1)) nth); - if v nth < n then pow2_minus_one_mod_lemma1 n (v nth) - else pow2_minus_one_mod_lemma2 n (v nth) - -let mask_inv_opt_in_range #t (mask: int_t t {Some? (mask_inv_opt (v mask))}) - : Lemma (range (Some?.v (mask_inv_opt (v mask))) t) - [SMTPat (mask_inv_opt (v mask))] - = let n = (Some?.v (mask_inv_opt (v mask))) in - assert (pow2 n - 1 == v mask) - -let get_bit_pow2_minus_one_i32 x nth - = let n = Some?.v (mask_inv_opt x) in - mk_int_equiv_lemma #i32_inttype x; - get_bit_pow2_minus_one #i32_inttype n nth - -let get_bit_pow2_minus_one_i16 x nth - = let n = Some?.v (mask_inv_opt x) in - mk_int_equiv_lemma #i16_inttype x; - get_bit_pow2_minus_one #i16_inttype n nth - -let get_bit_pow2_minus_one_u32 x nth - = let n = Some?.v (mask_inv_opt x) in - mk_int_equiv_lemma #u32_inttype x; - get_bit_pow2_minus_one #u32_inttype n nth - -let get_bit_pow2_minus_one_u16 x nth - = let n = Some?.v (mask_inv_opt x) in - mk_int_equiv_lemma #u16_inttype x; - get_bit_pow2_minus_one #u16_inttype n nth - -let get_bit_pow2_minus_one_u8 t x nth - = let n = Some?.v (mask_inv_opt x) in - mk_int_equiv_lemma #u8_inttype x; - get_bit_pow2_minus_one #u8_inttype n nth diff --git a/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fst b/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fst deleted file mode 100644 index 8093a8a52..000000000 --- a/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fst +++ /dev/null @@ -1,27 +0,0 @@ -module Rust_primitives.Hax.Monomorphized_update_at - -open Rust_primitives -open Rust_primitives.Hax -open Core.Ops.Range - -let update_at_range #n s i x = - let res = update_at s i x in - admit(); // To be proved // see issue #423 - res - -let update_at_range_to #n s i x = - let res = update_at s i x in - admit(); // see issue #423 - res - -let update_at_range_from #n s i x = - let res = update_at s i x in - admit(); // see issue #423 - res - -let update_at_range_full s i x = - let res = update_at s i x in - admit(); // see issue #423 - res - - diff --git a/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fsti b/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fsti index dd335cf4e..db97e41cf 100644 --- a/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fsti +++ b/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fsti @@ -14,9 +14,6 @@ let update_at_usize (x: t) : t_Array t (length s) = Seq.upd #t s (v i) x - // : Pure (t_Array t (length s)) - // (requires (v i < Seq.length s)) - // (ensures (fun res -> res == Seq.upd s (v i) x)) val update_at_range #n (#t: Type0) diff --git a/proof-libs/fstar/rust_primitives/Rust_primitives.Integers.fst b/proof-libs/fstar/rust_primitives/Rust_primitives.Integers.fst deleted file mode 100644 index 2fce084a2..000000000 --- a/proof-libs/fstar/rust_primitives/Rust_primitives.Integers.fst +++ /dev/null @@ -1,73 +0,0 @@ -module Rust_primitives.Integers - -#set-options "--z3rlimit 50 --fuel 0 --ifuel 0" - - -let pow2_values x = - let p = pow2 x in - assert_norm (p == normalize_term (pow2 x)) - -let usize_inttype = LI.U32 -let isize_inttype = LI.S32 - -let v_injective #t a = LI.v_injective #t #LI.PUB a -let v_mk_int #t n = LI.v_mk_int #t #LI.PUB n - -let usize_to_uint32 x = x -let usize_to_uint64 x = Int.Cast.uint32_to_uint64 x -let size_to_uint64 x = Int.Cast.uint32_to_uint64 x - -let mk_int #t a = LI.mk_int #t #LI.PUB a -let mk_int_equiv_lemma #_ = admit () // see issue #423 -let mk_int_v_lemma #t a = () -let v_mk_int_lemma #t a = () -let add_mod_equiv_lemma #t a b = LI.add_mod_lemma #t #LI.PUB a b -let add_equiv_lemma #t a b = LI.add_lemma #t #LI.PUB a b -let incr_equiv_lemma #t a = LI.incr_lemma #t #LI.PUB a - -let mul_mod_equiv_lemma #t a b = LI.mul_mod_lemma #t #LI.PUB a b -let mul_equiv_lemma #t a b = LI.mul_lemma #t #LI.PUB a b -let sub_mod_equiv_lemma #t a b = LI.sub_mod_lemma #t #LI.PUB a b -let sub_equiv_lemma #t a b = LI.sub_lemma #t #LI.PUB a b -let decr_equiv_lemma #t a = LI.decr_lemma #t #LI.PUB a - -let div_equiv_lemma #t a b = admit(); LI.div_lemma #t a b // see issue #423 -let mod_equiv_lemma #t a b = admit(); LI.mod_lemma #t a b // see issue #423 - -let lognot #t a = LI.lognot #t #LI.PUB a -let lognot_lemma #t a = admit() // see issue #423 - -let logxor #t a b = LI.logxor #t #LI.PUB a b -let logxor_lemma #t a b = admit() // see issue #423 - -let logand #t a b = LI.logand #t #LI.PUB a b -let logand_lemma #t a b = admit() // see issue #423 -let logand_mask_lemma #t a b = admit() // see issue #423 - -let logor #t a b = LI.logor #t #LI.PUB a b -let logor_lemma #t a b = admit() // see issue #423 - -let shift_right_equiv_lemma #t a b = admit() // see issue #423 -let shift_left_equiv_lemma #t a b = admit() // see issue #423 - -let rotate_right #t a b = LI.rotate_right #t #LI.PUB a (cast b) -let rotate_right_equiv_lemma #t a b = () -let rotate_left #t a b = LI.rotate_left #t #LI.PUB a (cast b) -let rotate_left_equiv_lemma #t a b = () - -let abs_int_equiv_lemma #t a = admit() // see issue #423 - -let neg_equiv_lemma #_ _ = admit () - -let get_bit_and _x _y _i = admit () // see issue #423 -let get_bit_or _x _y _i = admit () // see issue #423 -let get_bit_shl _x _y _i = admit () // see issue #423 -let get_bit_shr _x _y _i = admit () // see issue #423 - -let get_bit_cast #t #u x nth - = reveal_opaque (`%get_bit) (get_bit x nth); - reveal_opaque (`%get_bit) (get_bit (cast_mod #t #u x <: int_t u) nth); - admit () // see issue #423 - -let get_bit_cast_extend #t #u x nth - = admit () // see issue #423 diff --git a/proof-libs/fstar/rust_primitives/Rust_primitives.Integers.fsti b/proof-libs/fstar/rust_primitives/Rust_primitives.Integers.fsti index 9f1c5b531..84171151e 100644 --- a/proof-libs/fstar/rust_primitives/Rust_primitives.Integers.fsti +++ b/proof-libs/fstar/rust_primitives/Rust_primitives.Integers.fsti @@ -238,14 +238,9 @@ val decr_equiv_lemma: #t:inttype -> a:int_t t{minint t < v a} -> Lemma (decr a == LI.decr #t #LI.PUB a) -let div (#t:inttype) (a:int_t t) (b:int_t t{v b <> 0}) = - assume(unsigned t \/ range (v a / v b) t); // see issue #423 +let div (#t:inttype) (a:int_t t) (b:int_t t{v b <> 0 /\ (unsigned t \/ range (v a / v b) t)}) = + assert (unsigned t \/ range (v a / v b) t); mk_int #t (v a / v b) - -val div_equiv_lemma: #t:inttype{~(LI.U128? t) /\ ~(LI.S128? t)} - -> a:int_t t - -> b:int_t t{v b <> 0 /\ (unsigned t \/ range FStar.Int.(v a / v b) t)} - -> Lemma (div a b == LI.div a b) let mod (#t:inttype) (a:int_t t) (b:int_t t{v b <> 0}) = mk_int #t (v a % v b) diff --git a/test-harness/src/snapshots/toolchain__attribute-opaque into-fstar.snap b/test-harness/src/snapshots/toolchain__attribute-opaque into-fstar.snap index 2275da427..6454004a7 100644 --- a/test-harness/src/snapshots/toolchain__attribute-opaque into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__attribute-opaque into-fstar.snap @@ -29,13 +29,126 @@ stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' diagnostics = [] [stdout.files] +"Attribute_opaque.fst" = ''' +module Attribute_opaque +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +assume +val t_OpaqueEnum': v_X: usize -> v_T: Type0 -> v_U: Type0 -> eqtype + +let t_OpaqueEnum (v_X: usize) (v_T v_U: Type0) = t_OpaqueEnum' v_X v_T v_U + +assume +val t_OpaqueStruct': v_X: usize -> v_T: Type0 -> v_U: Type0 -> eqtype + +let t_OpaqueStruct (v_X: usize) (v_T v_U: Type0) = t_OpaqueStruct' v_X v_T v_U + +assume +val impl__S1__f_s1': Prims.unit -> Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True) + +let impl__S1__f_s1 = impl__S1__f_s1' + +[@@ FStar.Tactics.Typeclasses.tcinstance] +assume +val impl_2': #v_U: Type0 -> {| i1: Core.Clone.t_Clone v_U |} -> t_TrGeneric i32 v_U + +let impl_2 (#v_U: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core.Clone.t_Clone v_U) = + impl_2' #v_U #i1 + +assume +val impl__S2__f_s2': Prims.unit -> Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True) + +let impl__S2__f_s2 = impl__S2__f_s2' + +assume +val v_C': u8 + +let v_C = v_C' + +assume +val f': x: bool -> y: bool -> Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True) + +let f = f' + +assume +val ff_generic': v_X: usize -> #v_T: Type0 -> #v_U: Type0 -> x: v_U + -> Prims.Pure (t_OpaqueEnum v_X v_T v_U) Prims.l_True (fun _ -> Prims.l_True) + +let ff_generic (v_X: usize) (#v_T #v_U: Type0) = ff_generic' v_X #v_T #v_U + +assume +val ff_pre_post': x: bool -> y: bool + -> Prims.Pure bool + (requires x) + (ensures + fun result -> + let result:bool = result in + result =. y) + +let ff_pre_post = ff_pre_post' + +[@@ FStar.Tactics.Typeclasses.tcinstance] +assume +val impl_T_for_u8': t_T u8 + +let impl_T_for_u8 = impl_T_for_u8' +''' "Attribute_opaque.fsti" = ''' module Attribute_opaque #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul -val t_OpaqueEnum (v_X: usize) (#v_T #v_U: Type0) : Type0 +type t_S1 = | S1 : t_S1 + +type t_S2 = | S2 : t_S2 + +val t_OpaqueEnum (v_X: usize) (v_T v_U: Type0) : eqtype + +val t_OpaqueStruct (v_X: usize) (v_T v_U: Type0) : eqtype + +class t_TrGeneric (v_Self: Type0) (v_U: Type0) = { + [@@@ FStar.Tactics.Typeclasses.no_method]_super_13225137425257751668:Core.Clone.t_Clone v_U; + f_f_pre:v_U -> Type0; + f_f_post:v_U -> v_Self -> Type0; + f_f:x0: v_U -> Prims.Pure v_Self (f_f_pre x0) (fun result -> f_f_post x0 result) +} + +val impl__S1__f_s1: Prims.unit -> Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True) + +[@@ FStar.Tactics.Typeclasses.tcinstance] +val impl_2 (#v_U: Type0) {| i1: Core.Clone.t_Clone v_U |} : t_TrGeneric i32 v_U + +val impl__S2__f_s2: Prims.unit -> Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True) + +val v_C:u8 + +val f (x y: bool) : Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True) + +val ff_generic (v_X: usize) (#v_T #v_U: Type0) (x: v_U) + : Prims.Pure (t_OpaqueEnum v_X v_T v_U) Prims.l_True (fun _ -> Prims.l_True) + +val ff_pre_post (x y: bool) + : Prims.Pure bool + (requires x) + (ensures + fun result -> + let result:bool = result in + result =. y) + +class t_T (v_Self: Type0) = { + f_U:Type0; + f_c:u8; + f_d_pre:Prims.unit -> Type0; + f_d_post:Prims.unit -> Prims.unit -> Type0; + f_d:x0: Prims.unit -> Prims.Pure Prims.unit (f_d_pre x0) (fun result -> f_d_post x0 result); + f_m_pre:self___: v_Self -> x: u8 -> pred: Type0{x =. 0uy ==> pred}; + f_m_post:v_Self -> u8 -> bool -> Type0; + f_m:x0: v_Self -> x1: u8 -> Prims.Pure bool (f_m_pre x0 x1) (fun result -> f_m_post x0 x1 result) +} -val t_OpaqueStruct (v_X: usize) (#v_T #v_U: Type0) : Type0 +[@@ FStar.Tactics.Typeclasses.tcinstance] +val impl_T_for_u8:t_T u8 ''' diff --git a/test-harness/src/snapshots/toolchain__attributes into-fstar.snap b/test-harness/src/snapshots/toolchain__attributes into-fstar.snap index e65784071..8705e5981 100644 --- a/test-harness/src/snapshots/toolchain__attributes into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__attributes into-fstar.snap @@ -92,6 +92,18 @@ open FStar.Mul unfold type t_Int = int +[@@ FStar.Tactics.Typeclasses.tcinstance] +assume +val impl_1': Core.Clone.t_Clone t_Int + +let impl_1 = impl_1' + +[@@ FStar.Tactics.Typeclasses.tcinstance] +assume +val impl': Core.Marker.t_Copy t_Int + +let impl = impl' + unfold instance impl: Core.Ops.Arith.t_Sub t_Int t_Int = { f_Output = t_Int; @@ -118,12 +130,12 @@ module Attributes.Newtype_pattern open Core open FStar.Mul +let v_MAX: usize = sz 10 + type t_SafeIndex = { f_i:f_i: usize{f_i <. v_MAX} } let impl__SafeIndex__as_usize (self: t_SafeIndex) : usize = self.f_i -let v_MAX: usize = sz 10 - [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1 (#v_T: Type0) : Core.Ops.Index.t_Index (t_Array v_T (sz 10)) t_SafeIndex = { @@ -144,6 +156,23 @@ module Attributes.Pre_post_on_traits_and_impls open Core open FStar.Mul +type t_ViaAdd = | ViaAdd : t_ViaAdd + +type t_ViaMul = | ViaMul : t_ViaMul + +class t_TraitWithRequiresAndEnsures (v_Self: Type0) = { + f_method_pre:self___: v_Self -> x: u8 -> pred: Type0{x <. 100uy ==> pred}; + f_method_post:self___: v_Self -> x: u8 -> r: u8 -> pred: Type0{pred ==> r >. 88uy}; + f_method:x0: v_Self -> x1: u8 + -> Prims.Pure u8 (f_method_pre x0 x1) (fun result -> f_method_post x0 x1 result) +} + +let test + (#v_T: Type0) + (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_TraitWithRequiresAndEnsures v_T) + (x: v_T) + : u8 = (f_method #v_T #FStar.Tactics.Typeclasses.solve x 99uy <: u8) -! 88uy + class t_Operation (v_Self: Type0) = { f_double_pre:x: u8 -> pred: @@ -162,17 +191,6 @@ class t_Operation (v_Self: Type0) = { f_double:x0: u8 -> Prims.Pure u8 (f_double_pre x0) (fun result -> f_double_post x0 result) } -class t_TraitWithRequiresAndEnsures (v_Self: Type0) = { - f_method_pre:self___: v_Self -> x: u8 -> pred: Type0{x <. 100uy ==> pred}; - f_method_post:self___: v_Self -> x: u8 -> r: u8 -> pred: Type0{pred ==> r >. 88uy}; - f_method:x0: v_Self -> x1: u8 - -> Prims.Pure u8 (f_method_pre x0 x1) (fun result -> f_method_post x0 x1 result) -} - -type t_ViaAdd = | ViaAdd : t_ViaAdd - -type t_ViaMul = | ViaMul : t_ViaMul - [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: t_Operation t_ViaAdd = { @@ -206,12 +224,6 @@ let impl_1: t_Operation t_ViaMul = (Rust_primitives.Hax.Int.from_machine result <: Hax_lib.Int.t_Int)); f_double = fun (x: u8) -> x *! 2uy } - -let test - (#v_T: Type0) - (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_TraitWithRequiresAndEnsures v_T) - (x: v_T) - : u8 = (f_method #v_T #FStar.Tactics.Typeclasses.solve x 99uy <: u8) -! 88uy ''' "Attributes.Refined_arithmetic.fst" = ''' module Attributes.Refined_arithmetic @@ -454,12 +466,6 @@ module Attributes open Core open FStar.Mul -type t_Foo = { - f_x:u32; - f_y:f_y: u32{f_y >. 3ul}; - f_z:f_z: u32{((f_y +! f_x <: u32) +! f_z <: u32) >. 3ul} -} - let inlined_code__V: u8 = 12uy let issue_844_ (v__x: u8) @@ -472,6 +478,12 @@ let issue_844_ (v__x: u8) let u32_max: u32 = 90000ul +type t_Foo = { + f_x:u32; + f_y:f_y: u32{f_y >. 3ul}; + f_z:f_z: u32{((f_y +! f_x <: u32) +! f_z <: u32) >. 3ul} +} + let swap_and_mut_req_ens (x y: u32) : Prims.Pure (u32 & u32 & u32) (requires x <. 40ul && y <. 300ul) diff --git a/test-harness/src/snapshots/toolchain__cyclic-modules into-fstar.snap b/test-harness/src/snapshots/toolchain__cyclic-modules into-fstar.snap index af07fc045..d964ad67b 100644 --- a/test-harness/src/snapshots/toolchain__cyclic-modules into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__cyclic-modules into-fstar.snap @@ -32,7 +32,7 @@ module Cyclic_modules.B open Core open FStar.Mul -include Cyclic_modules.Rec_bundle_318256792 {g as g} +include Cyclic_modules.Cyclic_bundle_367033742 {g as g} ''' "Cyclic_modules.C.fst" = ''' module Cyclic_modules.C @@ -42,8 +42,24 @@ open FStar.Mul let i (_: Prims.unit) : Prims.unit = () ''' -"Cyclic_modules.D.Rec_bundle_773034964.fst" = ''' -module Cyclic_modules.D.Rec_bundle_773034964 +"Cyclic_modules.Cyclic_bundle_367033742.fst" = ''' +module Cyclic_modules.Cyclic_bundle_367033742 +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +let f (_: Prims.unit) : Prims.unit = () + +let g (_: Prims.unit) : Prims.unit = f () + +let h (_: Prims.unit) : Prims.unit = + let _:Prims.unit = g () in + Cyclic_modules.C.i () + +let h2 (_: Prims.unit) : Prims.unit = Cyclic_modules.C.i () +''' +"Cyclic_modules.D.Cyclic_bundle_81544935.fst" = ''' +module Cyclic_modules.D.Cyclic_bundle_81544935 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul @@ -62,9 +78,9 @@ module Cyclic_modules.D open Core open FStar.Mul -include Cyclic_modules.D.Rec_bundle_773034964 {d1 as d1} +include Cyclic_modules.D.Cyclic_bundle_81544935 {d1 as d1} -include Cyclic_modules.D.Rec_bundle_773034964 {d2 as d2} +include Cyclic_modules.D.Cyclic_bundle_81544935 {d2 as d2} ''' "Cyclic_modules.De.fst" = ''' module Cyclic_modules.De @@ -72,10 +88,10 @@ module Cyclic_modules.De open Core open FStar.Mul -include Cyclic_modules.D.Rec_bundle_773034964 {de1 as de1} +include Cyclic_modules.D.Cyclic_bundle_81544935 {de1 as de1} ''' -"Cyclic_modules.Disjoint_cycle_a.Rec_bundle_317759688.fst" = ''' -module Cyclic_modules.Disjoint_cycle_a.Rec_bundle_317759688 +"Cyclic_modules.Disjoint_cycle_a.Cyclic_bundle_177270903.fst" = ''' +module Cyclic_modules.Disjoint_cycle_a.Cyclic_bundle_177270903 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul @@ -94,9 +110,9 @@ module Cyclic_modules.Disjoint_cycle_a open Core open FStar.Mul -include Cyclic_modules.Disjoint_cycle_a.Rec_bundle_317759688 {g as g} +include Cyclic_modules.Disjoint_cycle_a.Cyclic_bundle_177270903 {g as g} -include Cyclic_modules.Disjoint_cycle_a.Rec_bundle_317759688 {f as f} +include Cyclic_modules.Disjoint_cycle_a.Cyclic_bundle_177270903 {f as f} ''' "Cyclic_modules.Disjoint_cycle_b.fst" = ''' module Cyclic_modules.Disjoint_cycle_b @@ -104,9 +120,9 @@ module Cyclic_modules.Disjoint_cycle_b open Core open FStar.Mul -include Cyclic_modules.Disjoint_cycle_a.Rec_bundle_317759688 {h as h} +include Cyclic_modules.Disjoint_cycle_a.Cyclic_bundle_177270903 {h as h} -include Cyclic_modules.Disjoint_cycle_a.Rec_bundle_317759688 {i as i} +include Cyclic_modules.Disjoint_cycle_a.Cyclic_bundle_177270903 {i as i} ''' "Cyclic_modules.E.fst" = ''' module Cyclic_modules.E @@ -114,47 +130,47 @@ module Cyclic_modules.E open Core open FStar.Mul -include Cyclic_modules.D.Rec_bundle_773034964 {e1 as e1} +include Cyclic_modules.D.Cyclic_bundle_81544935 {e1 as e1} ''' -"Cyclic_modules.Enums_a.fst" = ''' -module Cyclic_modules.Enums_a +"Cyclic_modules.Enums_a.Cyclic_bundle_1009707801.fst" = ''' +module Cyclic_modules.Enums_a.Cyclic_bundle_1009707801 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul -include Cyclic_modules.Enums_b.Rec_bundle_994866580 {t_T240131830 as t_T} - -include Cyclic_modules.Enums_b.Rec_bundle_994866580 {T240131830_A as T_A} +type t_T_366415196 = + | T_366415196_A : t_T_366415196 + | T_366415196_B : t_T_366415196 + | T_366415196_C : Alloc.Vec.t_Vec t_T_240131830 Alloc.Alloc.t_Global -> t_T_366415196 -include Cyclic_modules.Enums_b.Rec_bundle_994866580 {T240131830_B as T_B} +and t_U = + | U_A : t_U + | U_B : t_U + | U_C : Alloc.Vec.t_Vec t_T_240131830 Alloc.Alloc.t_Global -> t_U -include Cyclic_modules.Enums_b.Rec_bundle_994866580 {T240131830_C as T_C} +and t_T_240131830 = + | T_240131830_A : t_T_240131830 + | T_240131830_B : t_T_240131830 + | T_240131830_C : Alloc.Vec.t_Vec t_U Alloc.Alloc.t_Global -> t_T_240131830 + | T_240131830_D : Alloc.Vec.t_Vec t_T_366415196 Alloc.Alloc.t_Global -> t_T_240131830 -include Cyclic_modules.Enums_b.Rec_bundle_994866580 {T240131830_D as T_D} +let f (_: Prims.unit) : t_T_366415196 = T_366415196_A <: t_T_366415196 ''' -"Cyclic_modules.Enums_b.Rec_bundle_994866580.fst" = ''' -module Cyclic_modules.Enums_b.Rec_bundle_994866580 +"Cyclic_modules.Enums_a.fst" = ''' +module Cyclic_modules.Enums_a #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul -type t_T366415196 = - | T366415196_A : t_T366415196 - | T366415196_B : t_T366415196 - | T366415196_C : Alloc.Vec.t_Vec t_T240131830 Alloc.Alloc.t_Global -> t_T366415196 +include Cyclic_modules.Enums_a.Cyclic_bundle_1009707801 {t_T_240131830 as t_T} -and t_U = - | U_A : t_U - | U_B : t_U - | U_C : Alloc.Vec.t_Vec t_T240131830 Alloc.Alloc.t_Global -> t_U +include Cyclic_modules.Enums_a.Cyclic_bundle_1009707801 {T_240131830_A as T_A} -and t_T240131830 = - | T240131830_A : t_T240131830 - | T240131830_B : t_T240131830 - | T240131830_C : Alloc.Vec.t_Vec t_U Alloc.Alloc.t_Global -> t_T240131830 - | T240131830_D : Alloc.Vec.t_Vec t_T366415196 Alloc.Alloc.t_Global -> t_T240131830 +include Cyclic_modules.Enums_a.Cyclic_bundle_1009707801 {T_240131830_B as T_B} -let f (_: Prims.unit) : t_T366415196 = T366415196_A <: t_T366415196 +include Cyclic_modules.Enums_a.Cyclic_bundle_1009707801 {T_240131830_C as T_C} + +include Cyclic_modules.Enums_a.Cyclic_bundle_1009707801 {T_240131830_D as T_D} ''' "Cyclic_modules.Enums_b.fst" = ''' module Cyclic_modules.Enums_b @@ -162,42 +178,42 @@ module Cyclic_modules.Enums_b open Core open FStar.Mul -include Cyclic_modules.Enums_b.Rec_bundle_994866580 {t_T366415196 as t_T} +include Cyclic_modules.Enums_a.Cyclic_bundle_1009707801 {t_T_366415196 as t_T} -include Cyclic_modules.Enums_b.Rec_bundle_994866580 {T366415196_A as T_A} +include Cyclic_modules.Enums_a.Cyclic_bundle_1009707801 {T_366415196_A as T_A} -include Cyclic_modules.Enums_b.Rec_bundle_994866580 {T366415196_B as T_B} +include Cyclic_modules.Enums_a.Cyclic_bundle_1009707801 {T_366415196_B as T_B} -include Cyclic_modules.Enums_b.Rec_bundle_994866580 {T366415196_C as T_C} +include Cyclic_modules.Enums_a.Cyclic_bundle_1009707801 {T_366415196_C as T_C} -include Cyclic_modules.Enums_b.Rec_bundle_994866580 {t_U as t_U} +include Cyclic_modules.Enums_a.Cyclic_bundle_1009707801 {t_U as t_U} -include Cyclic_modules.Enums_b.Rec_bundle_994866580 {U_A as U_A} +include Cyclic_modules.Enums_a.Cyclic_bundle_1009707801 {U_A as U_A} -include Cyclic_modules.Enums_b.Rec_bundle_994866580 {U_B as U_B} +include Cyclic_modules.Enums_a.Cyclic_bundle_1009707801 {U_B as U_B} -include Cyclic_modules.Enums_b.Rec_bundle_994866580 {U_C as U_C} +include Cyclic_modules.Enums_a.Cyclic_bundle_1009707801 {U_C as U_C} -include Cyclic_modules.Enums_b.Rec_bundle_994866580 {f as f} +include Cyclic_modules.Enums_a.Cyclic_bundle_1009707801 {f as f} ''' -"Cyclic_modules.Late_skip_a.fst" = ''' -module Cyclic_modules.Late_skip_a +"Cyclic_modules.Late_skip_a.Cyclic_bundle_658016071.fst" = ''' +module Cyclic_modules.Late_skip_a.Cyclic_bundle_658016071 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul -include Cyclic_modules.Late_skip_b.Rec_bundle_447022631 {f749016415 as f} +let rec ff_749016415 (_: Prims.unit) : Prims.unit = ff_377825240 () + +and ff_377825240 (_: Prims.unit) : Prims.Pure Prims.unit (requires true) (fun _ -> Prims.l_True) = + ff_749016415 () ''' -"Cyclic_modules.Late_skip_b.Rec_bundle_447022631.fst" = ''' -module Cyclic_modules.Late_skip_b.Rec_bundle_447022631 +"Cyclic_modules.Late_skip_a.fst" = ''' +module Cyclic_modules.Late_skip_a #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul -let rec f749016415 (_: Prims.unit) : Prims.unit = f377825240 () - -and f377825240 (_: Prims.unit) : Prims.Pure Prims.unit (requires true) (fun _ -> Prims.l_True) = - f749016415 () +include Cyclic_modules.Late_skip_a.Cyclic_bundle_658016071 {ff_749016415 as f} ''' "Cyclic_modules.Late_skip_b.fst" = ''' module Cyclic_modules.Late_skip_b @@ -205,18 +221,10 @@ module Cyclic_modules.Late_skip_b open Core open FStar.Mul -include Cyclic_modules.Late_skip_b.Rec_bundle_447022631 {f377825240 as f} -''' -"Cyclic_modules.M1.fst" = ''' -module Cyclic_modules.M1 -#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" -open Core -open FStar.Mul - -include Cyclic_modules.M2.Rec_bundle_489499412 {a as a} +include Cyclic_modules.Late_skip_a.Cyclic_bundle_658016071 {ff_377825240 as f} ''' -"Cyclic_modules.M2.Rec_bundle_489499412.fst" = ''' -module Cyclic_modules.M2.Rec_bundle_489499412 +"Cyclic_modules.M1.Cyclic_bundle_892895908.fst" = ''' +module Cyclic_modules.M1.Cyclic_bundle_892895908 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul @@ -231,17 +239,25 @@ let b (_: Prims.unit) : Prims.unit = let _:Prims.unit = a () in d () ''' +"Cyclic_modules.M1.fst" = ''' +module Cyclic_modules.M1 +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +include Cyclic_modules.M1.Cyclic_bundle_892895908 {a as a} +''' "Cyclic_modules.M2.fst" = ''' module Cyclic_modules.M2 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul -include Cyclic_modules.M2.Rec_bundle_489499412 {c as c} +include Cyclic_modules.M1.Cyclic_bundle_892895908 {c as c} -include Cyclic_modules.M2.Rec_bundle_489499412 {d as d} +include Cyclic_modules.M1.Cyclic_bundle_892895908 {d as d} -include Cyclic_modules.M2.Rec_bundle_489499412 {b as b} +include Cyclic_modules.M1.Cyclic_bundle_892895908 {b as b} ''' "Cyclic_modules.Rec.fst" = ''' module Cyclic_modules.Rec @@ -254,34 +270,34 @@ type t_T = | T_t2 : t_T let t_T_cast_to_repr (x: t_T) : isize = - match x with + match x <: t_T with | T_t1 -> isz 0 | T_t2 -> isz 1 let rec hf (x: t_T) : t_T = - match x with + match x <: t_T with | T_t1 -> hf (T_t2 <: t_T) | T_t2 -> x let rec g1 (x: t_T) : t_T = - match x with + match x <: t_T with | T_t1 -> g2 x | T_t2 -> T_t1 <: t_T and g2 (x: t_T) : t_T = - match x with + match x <: t_T with | T_t1 -> g1 x | T_t2 -> hf x ''' -"Cyclic_modules.Rec1_same_name.Rec_bundle_213192514.fst" = ''' -module Cyclic_modules.Rec1_same_name.Rec_bundle_213192514 +"Cyclic_modules.Rec1_same_name.Cyclic_bundle_563905053.fst" = ''' +module Cyclic_modules.Rec1_same_name.Cyclic_bundle_563905053 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul -let rec f533409751 (x: i32) : i32 = f91805216 x +let rec ff_533409751 (x: i32) : i32 = ff_91805216 x -and f91805216 (x: i32) : i32 = if x >. 0l then f533409751 (x -! 1l <: i32) else 0l +and ff_91805216 (x: i32) : i32 = if x >. 0l then ff_533409751 (x -! 1l <: i32) else 0l ''' "Cyclic_modules.Rec1_same_name.fst" = ''' module Cyclic_modules.Rec1_same_name @@ -289,7 +305,7 @@ module Cyclic_modules.Rec1_same_name open Core open FStar.Mul -include Cyclic_modules.Rec1_same_name.Rec_bundle_213192514 {f533409751 as f} +include Cyclic_modules.Rec1_same_name.Cyclic_bundle_563905053 {ff_533409751 as f} ''' "Cyclic_modules.Rec2_same_name.fst" = ''' module Cyclic_modules.Rec2_same_name @@ -297,61 +313,45 @@ module Cyclic_modules.Rec2_same_name open Core open FStar.Mul -include Cyclic_modules.Rec1_same_name.Rec_bundle_213192514 {f91805216 as f} +include Cyclic_modules.Rec1_same_name.Cyclic_bundle_563905053 {ff_91805216 as f} ''' -"Cyclic_modules.Rec_bundle_318256792.fst" = ''' -module Cyclic_modules.Rec_bundle_318256792 +"Cyclic_modules.Typ_a.Cyclic_bundle_830459646.fst" = ''' +module Cyclic_modules.Typ_a.Cyclic_bundle_830459646 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul -let f (_: Prims.unit) : Prims.unit = () - -let g (_: Prims.unit) : Prims.unit = f () - -let h (_: Prims.unit) : Prims.unit = - let _:Prims.unit = g () in - Cyclic_modules.C.i () +type t_T1 = | T1_T1 : t_T1 -let h2 (_: Prims.unit) : Prims.unit = Cyclic_modules.C.i () -''' -"Cyclic_modules.Typ_a.fst" = ''' -module Cyclic_modules.Typ_a -#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" -open Core -open FStar.Mul +type t_T = | T_T : t_T1 -> t_T -include Cyclic_modules.Typ_b.Rec_bundle_546955701 {t_T as t_T} +let t_T1_cast_to_repr (x: t_T1) : isize = match x <: t_T1 with | T1_T1 -> isz 0 -include Cyclic_modules.Typ_b.Rec_bundle_546955701 {T_T as T_T} +type t_T2 = | T2_T2 : t_T -> t_T2 -include Cyclic_modules.Typ_b.Rec_bundle_546955701 {t_TRec as t_TRec} +type t_TRec = + | TRec_T : t_T1Rec -> t_TRec + | TRec_Empty : t_TRec -include Cyclic_modules.Typ_b.Rec_bundle_546955701 {TRec_T as TRec_T} +and t_T1Rec = | T1Rec_T1 : Alloc.Boxed.t_Box t_T2Rec Alloc.Alloc.t_Global -> t_T1Rec -include Cyclic_modules.Typ_b.Rec_bundle_546955701 {TRec_Empty as TRec_Empty} +and t_T2Rec = | T2Rec_T2 : t_TRec -> t_T2Rec ''' -"Cyclic_modules.Typ_b.Rec_bundle_546955701.fst" = ''' -module Cyclic_modules.Typ_b.Rec_bundle_546955701 +"Cyclic_modules.Typ_a.fst" = ''' +module Cyclic_modules.Typ_a #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul -type t_T1 = | T1_T1 : t_T1 - -type t_T = | T_T : t_T1 -> t_T +include Cyclic_modules.Typ_a.Cyclic_bundle_830459646 {t_T as t_T} -let t_T1_cast_to_repr (x: t_T1) : isize = match x with | T1_T1 -> isz 0 - -type t_T2 = | T2_T2 : t_T -> t_T2 +include Cyclic_modules.Typ_a.Cyclic_bundle_830459646 {T_T as T_T} -type t_TRec = - | TRec_T : t_T1Rec -> t_TRec - | TRec_Empty : t_TRec +include Cyclic_modules.Typ_a.Cyclic_bundle_830459646 {t_TRec as t_TRec} -and t_T1Rec = | T1Rec_T1 : Alloc.Boxed.t_Box t_T2Rec Alloc.Alloc.t_Global -> t_T1Rec +include Cyclic_modules.Typ_a.Cyclic_bundle_830459646 {TRec_T as TRec_T} -and t_T2Rec = | T2Rec_T2 : t_TRec -> t_T2Rec +include Cyclic_modules.Typ_a.Cyclic_bundle_830459646 {TRec_Empty as TRec_Empty} ''' "Cyclic_modules.Typ_b.fst" = ''' module Cyclic_modules.Typ_b @@ -359,26 +359,26 @@ module Cyclic_modules.Typ_b open Core open FStar.Mul -include Cyclic_modules.Typ_b.Rec_bundle_546955701 {t_T1 as t_T1} +include Cyclic_modules.Typ_a.Cyclic_bundle_830459646 {t_T1 as t_T1} -include Cyclic_modules.Typ_b.Rec_bundle_546955701 {T1_T1 as T1_T1} +include Cyclic_modules.Typ_a.Cyclic_bundle_830459646 {T1_T1 as T1_T1} -include Cyclic_modules.Typ_b.Rec_bundle_546955701 {t_T1_cast_to_repr as t_T1_cast_to_repr} +include Cyclic_modules.Typ_a.Cyclic_bundle_830459646 {t_T1_cast_to_repr as t_T1_cast_to_repr} -include Cyclic_modules.Typ_b.Rec_bundle_546955701 {t_T2 as t_T2} +include Cyclic_modules.Typ_a.Cyclic_bundle_830459646 {t_T2 as t_T2} -include Cyclic_modules.Typ_b.Rec_bundle_546955701 {T2_T2 as T2_T2} +include Cyclic_modules.Typ_a.Cyclic_bundle_830459646 {T2_T2 as T2_T2} -include Cyclic_modules.Typ_b.Rec_bundle_546955701 {t_T1Rec as t_T1Rec} +include Cyclic_modules.Typ_a.Cyclic_bundle_830459646 {t_T1Rec as t_T1Rec} -include Cyclic_modules.Typ_b.Rec_bundle_546955701 {T1Rec_T1 as T1Rec_T1} +include Cyclic_modules.Typ_a.Cyclic_bundle_830459646 {T1Rec_T1 as T1Rec_T1} -include Cyclic_modules.Typ_b.Rec_bundle_546955701 {t_T2Rec as t_T2Rec} +include Cyclic_modules.Typ_a.Cyclic_bundle_830459646 {t_T2Rec as t_T2Rec} -include Cyclic_modules.Typ_b.Rec_bundle_546955701 {T2Rec_T2 as T2Rec_T2} +include Cyclic_modules.Typ_a.Cyclic_bundle_830459646 {T2Rec_T2 as T2Rec_T2} ''' -"Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539.fst" = ''' -module Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539 +"Cyclic_modules.Variant_constructor_a.Cyclic_bundle_748213522.fst" = ''' +module Cyclic_modules.Variant_constructor_a.Cyclic_bundle_748213522 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul @@ -400,15 +400,15 @@ module Cyclic_modules.Variant_constructor_a open Core open FStar.Mul -include Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539 {t_Context as t_Context} +include Cyclic_modules.Variant_constructor_a.Cyclic_bundle_748213522 {t_Context as t_Context} -include Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539 {Context_A as Context_A} +include Cyclic_modules.Variant_constructor_a.Cyclic_bundle_748213522 {Context_A as Context_A} -include Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539 {Context_B as Context_B} +include Cyclic_modules.Variant_constructor_a.Cyclic_bundle_748213522 {Context_B as Context_B} -include Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539 {test as impl__Context__test} +include Cyclic_modules.Variant_constructor_a.Cyclic_bundle_748213522 {test as impl__Context__test} -include Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539 {f as f} +include Cyclic_modules.Variant_constructor_a.Cyclic_bundle_748213522 {f as f} ''' "Cyclic_modules.Variant_constructor_b.fst" = ''' module Cyclic_modules.Variant_constructor_b @@ -416,7 +416,7 @@ module Cyclic_modules.Variant_constructor_b open Core open FStar.Mul -include Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539 {h as h} +include Cyclic_modules.Variant_constructor_a.Cyclic_bundle_748213522 {h as h} ''' "Cyclic_modules.fst" = ''' module Cyclic_modules @@ -424,9 +424,9 @@ module Cyclic_modules open Core open FStar.Mul -include Cyclic_modules.Rec_bundle_318256792 {f as f} +include Cyclic_modules.Cyclic_bundle_367033742 {f as f} -include Cyclic_modules.Rec_bundle_318256792 {h as h} +include Cyclic_modules.Cyclic_bundle_367033742 {h as h} -include Cyclic_modules.Rec_bundle_318256792 {h2 as h2} +include Cyclic_modules.Cyclic_bundle_367033742 {h2 as h2} ''' diff --git a/test-harness/src/snapshots/toolchain__enum-repr into-fstar.snap b/test-harness/src/snapshots/toolchain__enum-repr into-fstar.snap index 600085987..30788080c 100644 --- a/test-harness/src/snapshots/toolchain__enum-repr into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__enum-repr into-fstar.snap @@ -44,7 +44,7 @@ type t_EnumWithRepr = | EnumWithRepr_ImplicitDiscrEmptyStruct : t_EnumWithRepr let t_EnumWithRepr_cast_to_repr (x: t_EnumWithRepr) : u16 = - match x with + match x <: t_EnumWithRepr with | EnumWithRepr_ExplicitDiscr1 -> discriminant_EnumWithRepr_ExplicitDiscr1 | EnumWithRepr_ExplicitDiscr2 -> discriminant_EnumWithRepr_ExplicitDiscr2 | EnumWithRepr_ImplicitDiscrEmptyTuple -> discriminant_EnumWithRepr_ExplicitDiscr2 +! 1us diff --git a/test-harness/src/snapshots/toolchain__generics into-fstar.snap b/test-harness/src/snapshots/toolchain__generics into-fstar.snap index fa558e443..26fe6fce5 100644 --- a/test-harness/src/snapshots/toolchain__generics into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__generics into-fstar.snap @@ -37,6 +37,36 @@ type t_Defaults (v_T: Type0) (v_N: usize) = | Defaults : t_Array v_T v_N -> t_De let f (_: t_Defaults Prims.unit (sz 2)) : Prims.unit = () ''' +"Generics.Impl_generics.fst" = ''' +module Generics.Impl_generics +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +type t_Test = | Test : t_Test + +let impl__Test__set_alpn_protocols + (#v_S #impl_995885649_: Type0) + (#[FStar.Tactics.Typeclasses.tcresolve ()] i2: Core.Convert.t_AsRef v_S string) + (#[FStar.Tactics.Typeclasses.tcresolve ()] + i3: + Core.Iter.Traits.Collect.t_IntoIterator impl_995885649_) + (self: t_Test) + (v__protocols: impl_995885649_) + : Core.Result.t_Result Prims.unit Prims.unit = + Core.Result.Result_Ok (() <: Prims.unit) <: Core.Result.t_Result Prims.unit Prims.unit + +let impl__Test__set_ciphersuites + (#v_S #impl_995885649_: Type0) + (#[FStar.Tactics.Typeclasses.tcresolve ()] i2: Core.Convert.t_AsRef v_S string) + (#[FStar.Tactics.Typeclasses.tcresolve ()] + i3: + Core.Iter.Traits.Collect.t_IntoIterator impl_995885649_) + (self: t_Test) + (ciphers: impl_995885649_) + : Core.Result.t_Result Prims.unit Prims.unit = + Core.Result.Result_Ok (() <: Prims.unit) <: Core.Result.t_Result Prims.unit Prims.unit +''' "Generics.fst" = ''' module Generics #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" diff --git a/test-harness/src/snapshots/toolchain__guards into-fstar.snap b/test-harness/src/snapshots/toolchain__guards into-fstar.snap index ff12f664a..cf6aebabc 100644 --- a/test-harness/src/snapshots/toolchain__guards into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__guards into-fstar.snap @@ -33,78 +33,88 @@ open Core open FStar.Mul let equivalent (x: Core.Option.t_Option (Core.Result.t_Result i32 i32)) : i32 = - match x with + match x <: Core.Option.t_Option (Core.Result.t_Result i32 i32) with | Core.Option.Option_None -> 0l | _ -> match - match x with - | Core.Option.Option_Some v -> - (match v with - | Core.Result.Result_Ok y -> Core.Option.Option_Some y <: Core.Option.t_Option i32 - | _ -> Core.Option.Option_None <: Core.Option.t_Option i32) - | _ -> Core.Option.Option_None <: Core.Option.t_Option i32 + (match x <: Core.Option.t_Option (Core.Result.t_Result i32 i32) with + | Core.Option.Option_Some v -> + (match v <: Core.Result.t_Result i32 i32 with + | Core.Result.Result_Ok y -> Core.Option.Option_Some y <: Core.Option.t_Option i32 + | _ -> Core.Option.Option_None <: Core.Option.t_Option i32) + | _ -> Core.Option.Option_None <: Core.Option.t_Option i32) + <: + Core.Option.t_Option i32 with | Core.Option.Option_Some y -> y | Core.Option.Option_None -> - match x with + match x <: Core.Option.t_Option (Core.Result.t_Result i32 i32) with | Core.Option.Option_Some (Core.Result.Result_Err y) -> y | _ -> 1l let if_guard (x: Core.Option.t_Option i32) : i32 = match - match x with - | Core.Option.Option_Some v -> - (match v >. 0l with - | true -> Core.Option.Option_Some v <: Core.Option.t_Option i32 - | _ -> Core.Option.Option_None <: Core.Option.t_Option i32) - | _ -> Core.Option.Option_None <: Core.Option.t_Option i32 + (match x <: Core.Option.t_Option i32 with + | Core.Option.Option_Some v -> + (match v >. 0l <: bool with + | true -> Core.Option.Option_Some v <: Core.Option.t_Option i32 + | _ -> Core.Option.Option_None <: Core.Option.t_Option i32) + | _ -> Core.Option.Option_None <: Core.Option.t_Option i32) + <: + Core.Option.t_Option i32 with | Core.Option.Option_Some x -> x | Core.Option.Option_None -> 0l let if_let_guard (x: Core.Option.t_Option (Core.Result.t_Result i32 i32)) : i32 = - match x with + match x <: Core.Option.t_Option (Core.Result.t_Result i32 i32) with | Core.Option.Option_None -> 0l | _ -> match - match x with - | Core.Option.Option_Some v -> - (match v with - | Core.Result.Result_Ok y -> Core.Option.Option_Some y <: Core.Option.t_Option i32 - | _ -> Core.Option.Option_None <: Core.Option.t_Option i32) - | _ -> Core.Option.Option_None <: Core.Option.t_Option i32 + (match x <: Core.Option.t_Option (Core.Result.t_Result i32 i32) with + | Core.Option.Option_Some v -> + (match v <: Core.Result.t_Result i32 i32 with + | Core.Result.Result_Ok y -> Core.Option.Option_Some y <: Core.Option.t_Option i32 + | _ -> Core.Option.Option_None <: Core.Option.t_Option i32) + | _ -> Core.Option.Option_None <: Core.Option.t_Option i32) + <: + Core.Option.t_Option i32 with | Core.Option.Option_Some x -> x | Core.Option.Option_None -> - match x with + match x <: Core.Option.t_Option (Core.Result.t_Result i32 i32) with | Core.Option.Option_Some (Core.Result.Result_Err y) -> y | _ -> 1l let multiple_guards (x: Core.Option.t_Option (Core.Result.t_Result i32 i32)) : i32 = - match x with + match x <: Core.Option.t_Option (Core.Result.t_Result i32 i32) with | Core.Option.Option_None -> 0l | _ -> match - match x with - | Core.Option.Option_Some (Core.Result.Result_Ok v) -> - (match Core.Option.Option_Some (v +! 1l) <: Core.Option.t_Option i32 with - | Core.Option.Option_Some 1l -> Core.Option.Option_Some 0l <: Core.Option.t_Option i32 - | _ -> Core.Option.Option_None <: Core.Option.t_Option i32) - | _ -> Core.Option.Option_None <: Core.Option.t_Option i32 + (match x <: Core.Option.t_Option (Core.Result.t_Result i32 i32) with + | Core.Option.Option_Some (Core.Result.Result_Ok v) -> + (match Core.Option.Option_Some (v +! 1l) <: Core.Option.t_Option i32 with + | Core.Option.Option_Some 1l -> Core.Option.Option_Some 0l <: Core.Option.t_Option i32 + | _ -> Core.Option.Option_None <: Core.Option.t_Option i32) + | _ -> Core.Option.Option_None <: Core.Option.t_Option i32) + <: + Core.Option.t_Option i32 with | Core.Option.Option_Some x -> x | Core.Option.Option_None -> match - match x with - | Core.Option.Option_Some v -> - (match v with - | Core.Result.Result_Ok y -> Core.Option.Option_Some y <: Core.Option.t_Option i32 - | _ -> Core.Option.Option_None <: Core.Option.t_Option i32) - | _ -> Core.Option.Option_None <: Core.Option.t_Option i32 + (match x <: Core.Option.t_Option (Core.Result.t_Result i32 i32) with + | Core.Option.Option_Some v -> + (match v <: Core.Result.t_Result i32 i32 with + | Core.Result.Result_Ok y -> Core.Option.Option_Some y <: Core.Option.t_Option i32 + | _ -> Core.Option.Option_None <: Core.Option.t_Option i32) + | _ -> Core.Option.Option_None <: Core.Option.t_Option i32) + <: + Core.Option.t_Option i32 with | Core.Option.Option_Some x -> x | Core.Option.Option_None -> - match x with + match x <: Core.Option.t_Option (Core.Result.t_Result i32 i32) with | Core.Option.Option_Some (Core.Result.Result_Err y) -> y | _ -> 1l ''' diff --git a/test-harness/src/snapshots/toolchain__interface-only into-fstar.snap b/test-harness/src/snapshots/toolchain__interface-only into-fstar.snap index 9aaa938a6..dcdee90a4 100644 --- a/test-harness/src/snapshots/toolchain__interface-only into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__interface-only into-fstar.snap @@ -27,7 +27,7 @@ stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' diagnostics = [] [stdout.files] -"Interface_only.fsti" = ''' +"Interface_only.fst" = ''' module Interface_only #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core @@ -39,34 +39,89 @@ type t_Holder (v_T: Type0) = { f_value:Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global type t_Param (v_SIZE: usize) = { f_value:t_Array u8 v_SIZE } +class t_T2 (v_Self: Type0) = { + f_d_pre:Prims.unit -> Type0; + f_d_post:Prims.unit -> Prims.unit -> Type0; + f_d:x0: Prims.unit -> Prims.Pure Prims.unit (f_d_pre x0) (fun result -> f_d_post x0 result) +} + +/// Items can be forced to be transparent +[@@ FStar.Tactics.Typeclasses.tcinstance] +let impl_T2_for_u8: t_T2 u8 = + { + f_d_pre = (fun (_: Prims.unit) -> false); + f_d_post = (fun (_: Prims.unit) (out: Prims.unit) -> true); + f_d = fun (_: Prims.unit) -> () + } + /// Non-inherent implementations are extracted, their bodies are not /// dropped. This might be a bit surprising: see /// https://github.com/hacspec/hax/issues/616. [@@ FStar.Tactics.Typeclasses.tcinstance] -val impl:Core.Convert.t_From t_Bar Prims.unit +assume +val impl': Core.Convert.t_From t_Bar Prims.unit + +let impl = impl' /// If you need to drop the body of a method, please hoist it: [@@ FStar.Tactics.Typeclasses.tcinstance] -val impl_1:Core.Convert.t_From t_Bar u8 +assume +val impl_1': Core.Convert.t_From t_Bar u8 -val from__from: u8 -> Prims.Pure t_Bar Prims.l_True (fun _ -> Prims.l_True) +let impl_1 = impl_1' + +assume +val from__from': u8 -> t_Bar + +let from__from = from__from' [@@ FStar.Tactics.Typeclasses.tcinstance] -val impl_2 (#v_T: Type0) : Core.Convert.t_From (t_Holder v_T) Prims.unit +assume +val impl_2': #v_T: Type0 -> Core.Convert.t_From (t_Holder v_T) Prims.unit + +let impl_2 (#v_T: Type0) = impl_2' #v_T [@@ FStar.Tactics.Typeclasses.tcinstance] -val impl_3 (v_SIZE: usize) : Core.Convert.t_From (t_Param v_SIZE) Prims.unit +assume +val impl_3': v_SIZE: usize -> Core.Convert.t_From (t_Param v_SIZE) Prims.unit + +let impl_3 (v_SIZE: usize) = impl_3' v_SIZE /// This item contains unsafe blocks and raw references, two features /// not supported by hax. Thanks to the `-i` flag and the `+:` /// modifier, `f` is still extractable as an interface. /// Expressions within type are still extracted, as well as pre- and /// post-conditions. -val f (x: u8) - : Prims.Pure (t_Array u8 (sz 4)) +assume +val f': x: u8 + -> Prims.Pure (t_Array u8 (sz 4)) (requires x <. 254uy) (ensures fun r -> let r:t_Array u8 (sz 4) = r in (r.[ sz 0 ] <: u8) >. x) + +let f = f' + +assume +val ff_generic': v_X: usize -> #v_U: Type0 -> v__x: v_U -> t_Param v_X + +let ff_generic (v_X: usize) (#v_U: Type0) = ff_generic' v_X #v_U + +class t_T (v_Self: Type0) = { + f_Assoc:Type0; + f_d_pre:Prims.unit -> Type0; + f_d_post:Prims.unit -> Prims.unit -> Type0; + f_d:x0: Prims.unit -> Prims.Pure Prims.unit (f_d_pre x0) (fun result -> f_d_post x0 result) +} + +/// Impls with associated types are not erased +[@@ FStar.Tactics.Typeclasses.tcinstance] +let impl_T_for_u8: t_T u8 = + { + f_Assoc = u8; + f_d_pre = (fun (_: Prims.unit) -> true); + f_d_post = (fun (_: Prims.unit) (out: Prims.unit) -> true); + f_d = fun (_: Prims.unit) -> () + } ''' diff --git a/test-harness/src/snapshots/toolchain__let-else into-fstar.snap b/test-harness/src/snapshots/toolchain__let-else into-fstar.snap index 44ed74878..856375311 100644 --- a/test-harness/src/snapshots/toolchain__let-else into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__let-else into-fstar.snap @@ -33,12 +33,12 @@ open Core open FStar.Mul let let_else (opt: Core.Option.t_Option u32) : bool = - match opt with + match opt <: Core.Option.t_Option u32 with | Core.Option.Option_Some x -> true | _ -> false let let_else_different_type (opt: Core.Option.t_Option u32) : bool = - match opt with + match opt <: Core.Option.t_Option u32 with | Core.Option.Option_Some x -> let_else (Core.Option.Option_Some (x +! 1ul <: u32) <: Core.Option.t_Option u32) | _ -> false diff --git a/test-harness/src/snapshots/toolchain__literals into-coq.snap b/test-harness/src/snapshots/toolchain__literals into-coq.snap index f2da7be00..4ef4fb981 100644 --- a/test-harness/src/snapshots/toolchain__literals into-coq.snap +++ b/test-harness/src/snapshots/toolchain__literals into-coq.snap @@ -132,6 +132,12 @@ Definition patterns (_ : unit) : unit := end in tt. + + + + + + Definition panic_with_msg (_ : unit) : unit := never_to_any (panic_fmt (impl_2__new_const (["with msg"%string]))). diff --git a/test-harness/src/snapshots/toolchain__literals into-fstar.snap b/test-harness/src/snapshots/toolchain__literals into-fstar.snap index efe3f1ef0..355ca6097 100644 --- a/test-harness/src/snapshots/toolchain__literals into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__literals into-fstar.snap @@ -38,7 +38,7 @@ type t_Foo = { f_field:u8 } let v_CONSTANT: t_Foo = { f_field = 3uy } <: t_Foo let casts (x8: u8) (x16: u16) (x32: u32) (x64: u64) (xs: usize) : Prims.unit = - let (_: u64):u64 = + let _:u64 = ((((cast (x8 <: u8) <: u64) +! (cast (x16 <: u16) <: u64) <: u64) +! (cast (x32 <: u32) <: u64) <: u64) +! @@ -47,28 +47,28 @@ let casts (x8: u8) (x16: u16) (x32: u32) (x64: u64) (xs: usize) : Prims.unit = u64) +! (cast (xs <: usize) <: u64) in - let (_: u32):u32 = + let _:u32 = ((((cast (x8 <: u8) <: u32) +! (cast (x16 <: u16) <: u32) <: u32) +! x32 <: u32) +! (cast (x64 <: u64) <: u32) <: u32) +! (cast (xs <: usize) <: u32) in - let (_: u16):u16 = + let _:u16 = ((((cast (x8 <: u8) <: u16) +! x16 <: u16) +! (cast (x32 <: u32) <: u16) <: u16) +! (cast (x64 <: u64) <: u16) <: u16) +! (cast (xs <: usize) <: u16) in - let (_: u8):u8 = + let _:u8 = (((x8 +! (cast (x16 <: u16) <: u8) <: u8) +! (cast (x32 <: u32) <: u8) <: u8) +! (cast (x64 <: u64) <: u8) <: u8) +! (cast (xs <: usize) <: u8) in - let (_: i64):i64 = + let _:i64 = ((((cast (x8 <: u8) <: i64) +! (cast (x16 <: u16) <: i64) <: i64) +! (cast (x32 <: u32) <: i64) <: i64) +! @@ -77,7 +77,7 @@ let casts (x8: u8) (x16: u16) (x32: u32) (x64: u64) (xs: usize) : Prims.unit = i64) +! (cast (xs <: usize) <: i64) in - let (_: i32):i32 = + let _:i32 = ((((cast (x8 <: u8) <: i32) +! (cast (x16 <: u16) <: i32) <: i32) +! (cast (x32 <: u32) <: i32) <: i32) +! @@ -86,7 +86,7 @@ let casts (x8: u8) (x16: u16) (x32: u32) (x64: u64) (xs: usize) : Prims.unit = i32) +! (cast (xs <: usize) <: i32) in - let (_: i16):i16 = + let _:i16 = ((((cast (x8 <: u8) <: i16) +! (cast (x16 <: u16) <: i16) <: i16) +! (cast (x32 <: u32) <: i16) <: i16) +! @@ -95,7 +95,7 @@ let casts (x8: u8) (x16: u16) (x32: u32) (x64: u64) (xs: usize) : Prims.unit = i16) +! (cast (xs <: usize) <: i16) in - let (_: i8):i8 = + let _:i8 = ((((cast (x8 <: u8) <: i8) +! (cast (x16 <: u16) <: i8) <: i8) +! (cast (x32 <: u32) <: i8) <: i8) +! @@ -115,7 +115,7 @@ let math_integers (x: Hax_lib.Int.t_Int) : Prims.Pure u8 (requires x > (0 <: Hax_lib.Int.t_Int) && x < (16 <: Hax_lib.Int.t_Int)) (fun _ -> Prims.l_True) = - let (_: Hax_lib.Int.t_Int):Hax_lib.Int.t_Int = Rust_primitives.Hax.Int.from_machine (sz 3) in + let _:Hax_lib.Int.t_Int = Rust_primitives.Hax.Int.from_machine (sz 3) in let _:bool = ((-340282366920938463463374607431768211455000) <: Hax_lib.Int.t_Int) > (340282366920938463463374607431768211455000 <: Hax_lib.Int.t_Int) @@ -129,31 +129,31 @@ let math_integers (x: Hax_lib.Int.t_Int) let _:Hax_lib.Int.t_Int = x - x in let _:Hax_lib.Int.t_Int = x * x in let _:Hax_lib.Int.t_Int = x / x in - let (_: i16):i16 = Hax_lib.Int.impl__Int__to_i16 x in - let (_: i32):i32 = Hax_lib.Int.impl__Int__to_i32 x in - let (_: i64):i64 = Hax_lib.Int.impl__Int__to_i64 x in - let (_: i128):i128 = Hax_lib.Int.impl__Int__to_i128 x in - let (_: isize):isize = Hax_lib.Int.impl__Int__to_isize x in - let (_: u16):u16 = Hax_lib.Int.impl__Int__to_u16 x in - let (_: u32):u32 = Hax_lib.Int.impl__Int__to_u32 x in - let (_: u64):u64 = Hax_lib.Int.impl__Int__to_u64 x in - let (_: u128):u128 = Hax_lib.Int.impl__Int__to_u128 x in - let (_: usize):usize = Hax_lib.Int.impl__Int__to_usize x in + let _:i16 = Hax_lib.Int.impl__Int__to_i16 x in + let _:i32 = Hax_lib.Int.impl__Int__to_i32 x in + let _:i64 = Hax_lib.Int.impl__Int__to_i64 x in + let _:i128 = Hax_lib.Int.impl__Int__to_i128 x in + let _:isize = Hax_lib.Int.impl__Int__to_isize x in + let _:u16 = Hax_lib.Int.impl__Int__to_u16 x in + let _:u32 = Hax_lib.Int.impl__Int__to_u32 x in + let _:u64 = Hax_lib.Int.impl__Int__to_u64 x in + let _:u128 = Hax_lib.Int.impl__Int__to_u128 x in + let _:usize = Hax_lib.Int.impl__Int__to_usize x in Hax_lib.Int.impl__Int__to_u8 (x + (x * x <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) let null: char = '\0' let numeric (_: Prims.unit) : Prims.unit = - let (_: usize):usize = sz 123 in - let (_: isize):isize = isz (-42) in - let (_: isize):isize = isz 42 in - let (_: i32):i32 = (-42l) in - let (_: u128):u128 = pub_u128 22222222222222222222 in + let _:usize = sz 123 in + let _:isize = isz (-42) in + let _:isize = isz 42 in + let _:i32 = (-42l) in + let _:u128 = pub_u128 22222222222222222222 in () let patterns (_: Prims.unit) : Prims.unit = let _:Prims.unit = - match 1uy with + match 1uy <: u8 with | 2uy -> () <: Prims.unit | _ -> () <: Prims.unit in @@ -179,6 +179,24 @@ let patterns (_: Prims.unit) : Prims.unit = in () +[@@ FStar.Tactics.Typeclasses.tcinstance] +assume +val impl': Core.Marker.t_StructuralPartialEq t_Foo + +let impl = impl' + +[@@ FStar.Tactics.Typeclasses.tcinstance] +assume +val impl_1': Core.Cmp.t_PartialEq t_Foo t_Foo + +let impl_1 = impl_1' + +[@@ FStar.Tactics.Typeclasses.tcinstance] +assume +val impl_2': Core.Cmp.t_Eq t_Foo + +let impl_2 = impl_2' + let panic_with_msg (_: Prims.unit) : Prims.unit = Rust_primitives.Hax.never_to_any (Core.Panicking.panic_fmt (Core.Fmt.impl_2__new_const (sz 1) (let list = ["with msg"] in @@ -190,7 +208,7 @@ let panic_with_msg (_: Prims.unit) : Prims.unit = Rust_primitives.Hax.t_Never) let empty_array (_: Prims.unit) : Prims.unit = - let (_: t_Slice u8):t_Slice u8 = + let _:t_Slice u8 = (let list:Prims.list u8 = [] in FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 0); Rust_primitives.Hax.array_of_list 0 list) diff --git a/test-harness/src/snapshots/toolchain__loops into-fstar.snap b/test-harness/src/snapshots/toolchain__loops into-fstar.snap index 2c35945ca..75dc6849a 100644 --- a/test-harness/src/snapshots/toolchain__loops into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__loops into-fstar.snap @@ -71,6 +71,9 @@ let impl__M__decoded_message (self: t_M) (Core.Ops.Control_flow.t_ControlFlow (Core.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) (Prims.unit & Prims.unit)) Prims.unit) + <: + Core.Ops.Control_flow.t_ControlFlow + (Core.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) Prims.unit with | Core.Ops.Control_flow.ControlFlow_Break ret -> ret | Core.Ops.Control_flow.ControlFlow_Continue _ -> @@ -188,6 +191,8 @@ let double_sum2_return (v: t_Slice i32) : i32 = <: Core.Ops.Control_flow.t_ControlFlow (Core.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & (i32 & i32))) (i32 & i32)) + <: + Core.Ops.Control_flow.t_ControlFlow i32 (i32 & i32) with | Core.Ops.Control_flow.ControlFlow_Break ret -> ret | Core.Ops.Control_flow.ControlFlow_Continue (sum, sum2) -> sum +! sum2 @@ -218,6 +223,8 @@ let double_sum_return (v: t_Slice i32) : i32 = <: Core.Ops.Control_flow.t_ControlFlow (Core.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32) + <: + Core.Ops.Control_flow.t_ControlFlow i32 i32 with | Core.Ops.Control_flow.ControlFlow_Break ret -> ret | Core.Ops.Control_flow.ControlFlow_Continue sum -> sum *! 2l @@ -314,6 +321,8 @@ let nested_return (_: Prims.unit) : i32 = <: Core.Ops.Control_flow.t_ControlFlow (Core.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32) + <: + Core.Ops.Control_flow.t_ControlFlow i32 i32 with | Core.Ops.Control_flow.ControlFlow_Break ret -> ret | Core.Ops.Control_flow.ControlFlow_Continue sum -> sum *! 2l @@ -651,7 +660,7 @@ let enumerated_chunked_slice (#v_T: Type0) (slice: t_Slice v_T) : u64 = (fun count i -> let count:u64 = count in let i:usize = i in - i <= Core.Slice.impl__len #v_T slice) + i <= Core.Slice.impl__len #v_T slice <: usize) count (fun count i -> let count:u64 = count in diff --git a/test-harness/src/snapshots/toolchain__pattern-or into-fstar.snap b/test-harness/src/snapshots/toolchain__pattern-or into-fstar.snap index 9fdc477aa..84708e9bd 100644 --- a/test-harness/src/snapshots/toolchain__pattern-or into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__pattern-or into-fstar.snap @@ -38,14 +38,14 @@ type t_E = | E_B : t_E let t_E_cast_to_repr (x: t_E) : isize = - match x with + match x <: t_E with | E_A -> isz 0 | E_B -> isz 1 -let bar (x: t_E) : Prims.unit = match x with | E_A | E_B -> () <: Prims.unit +let bar (x: t_E) : Prims.unit = match x <: t_E with | E_A | E_B -> () <: Prims.unit let deep (x: (i32 & Core.Option.t_Option i32)) : i32 = - match x with + match x <: (i32 & Core.Option.t_Option i32) with | 1l, Core.Option.Option_Some 3l | 1l, Core.Option.Option_Some 4l | 2l, Core.Option.Option_Some 3l @@ -53,7 +53,7 @@ let deep (x: (i32 & Core.Option.t_Option i32)) : i32 = | x, _ -> x let deep_capture (x: Core.Result.t_Result (i32 & i32) (i32 & i32)) : i32 = - match x with + match x <: Core.Result.t_Result (i32 & i32) (i32 & i32) with | Core.Result.Result_Ok (1l, x) | Core.Result.Result_Ok (2l, x) | Core.Result.Result_Err (3l, x) @@ -61,7 +61,7 @@ let deep_capture (x: Core.Result.t_Result (i32 & i32) (i32 & i32)) : i32 = | Core.Result.Result_Ok (x, _) | Core.Result.Result_Err (x, _) -> x let equivalent (x: (i32 & Core.Option.t_Option i32)) : i32 = - match x with + match x <: (i32 & Core.Option.t_Option i32) with | 1l, Core.Option.Option_Some 3l | 1l, Core.Option.Option_Some 4l | 2l, Core.Option.Option_Some 3l @@ -69,7 +69,7 @@ let equivalent (x: (i32 & Core.Option.t_Option i32)) : i32 = | x, _ -> x let nested (x: Core.Option.t_Option i32) : i32 = - match x with + match x <: Core.Option.t_Option i32 with | Core.Option.Option_Some 1l | Core.Option.Option_Some 2l -> 1l | Core.Option.Option_Some x -> x | Core.Option.Option_None -> 0l diff --git a/test-harness/src/snapshots/toolchain__patterns into-fstar.snap b/test-harness/src/snapshots/toolchain__patterns into-fstar.snap new file mode 100644 index 000000000..d3e3cf0f5 --- /dev/null +++ b/test-harness/src/snapshots/toolchain__patterns into-fstar.snap @@ -0,0 +1,41 @@ +--- +source: test-harness/src/harness.rs +expression: snapshot +info: + kind: + Translate: + backend: fstar + info: + name: patterns + manifest: patterns/Cargo.toml + description: ~ + spec: + optional: false + broken: false + issue_id: ~ + positive: true + snapshot: + stderr: true + stdout: true + include_flag: ~ + backend_options: ~ +--- +exit = 0 +stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' + +[stdout] +diagnostics = [] + +[stdout.files] +"Patterns.fst" = ''' +module Patterns +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +type t_Other = | Other : i32 -> t_Other + +type t_Test = | Test_C1 : t_Other -> t_Test + +let impl__test (self: t_Test) : i32 = match self <: t_Test with | Test_C1 c -> c._0 +''' diff --git a/test-harness/src/snapshots/toolchain__reordering into-fstar.snap b/test-harness/src/snapshots/toolchain__reordering into-fstar.snap index 385642dd2..50d0e39c8 100644 --- a/test-harness/src/snapshots/toolchain__reordering into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__reordering into-fstar.snap @@ -39,7 +39,7 @@ type t_Foo = type t_Bar = | Bar : t_Foo -> t_Bar let t_Foo_cast_to_repr (x: t_Foo) : isize = - match x with + match x <: t_Foo with | Foo_A -> isz 0 | Foo_B -> isz 1 diff --git a/test-harness/src/snapshots/toolchain__side-effects into-fstar.snap b/test-harness/src/snapshots/toolchain__side-effects into-fstar.snap index a2038278d..66c41f5fa 100644 --- a/test-harness/src/snapshots/toolchain__side-effects into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__side-effects into-fstar.snap @@ -71,10 +71,12 @@ let test (x y: Core.Option.t_Option i32) : Core.Option.t_Option i32 = x (fun i -> let i:i32 = i in - match y with + match y <: Core.Option.t_Option i32 with | Core.Option.Option_Some hoist1 -> Core.Option.Option_Some (i +! hoist1 <: i32) <: Core.Option.t_Option i32 | Core.Option.Option_None -> Core.Option.Option_None <: Core.Option.t_Option i32) + <: + Core.Option.t_Option (Core.Option.t_Option i32) with | Core.Option.Option_Some some -> some | Core.Option.Option_None -> Core.Option.Option_None <: Core.Option.t_Option i32 @@ -198,14 +200,14 @@ let assign_non_trivial_lhs (foo: t_Foo) : t_Foo = /// Question mark without error coercion let direct_result_question_mark (y: Core.Result.t_Result Prims.unit u32) : Core.Result.t_Result i8 u32 = - match y with + match y <: Core.Result.t_Result Prims.unit u32 with | Core.Result.Result_Ok _ -> Core.Result.Result_Ok 0y <: Core.Result.t_Result i8 u32 | Core.Result.Result_Err err -> Core.Result.Result_Err err <: Core.Result.t_Result i8 u32 /// Question mark with an error coercion let direct_result_question_mark_coercion (y: Core.Result.t_Result i8 u16) : Core.Result.t_Result i8 u32 = - match y with + match y <: Core.Result.t_Result i8 u16 with | Core.Result.Result_Ok hoist5 -> Core.Result.Result_Ok hoist5 <: Core.Result.t_Result i8 u32 | Core.Result.Result_Err err -> Core.Result.Result_Err (Core.Convert.f_from #u32 #u16 #FStar.Tactics.Typeclasses.solve err) @@ -219,7 +221,7 @@ let early_returns (x: u32) : u32 = else if x >. 30ul then - match true with + match true <: bool with | true -> 34ul | _ -> let x, hoist9:(u32 & u32) = x, 3ul <: (u32 & u32) in @@ -256,7 +258,7 @@ let local_mutation (x: u32) : u32 = Core.Num.impl__u32__wrapping_add x y else let (x, y), hoist19:((u32 & u32) & u32) = - match x with + match x <: u32 with | 12ul -> let y:u32 = Core.Num.impl__u32__wrapping_add x y in (x, y <: (u32 & u32)), 3ul <: ((u32 & u32) & u32) @@ -281,11 +283,11 @@ let monad_lifting (x: u8) : Core.Result.t_Result t_A t_B = /// Test question mark on `Option`s with some control flow let options (x y: Core.Option.t_Option u8) (z: Core.Option.t_Option u64) : Core.Option.t_Option u8 = - match x with + match x <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist26 -> if hoist26 >. 10uy then - match x with + match x <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist28 -> (match Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add hoist28 3uy) @@ -293,14 +295,14 @@ let options (x y: Core.Option.t_Option u8) (z: Core.Option.t_Option u64) : Core. Core.Option.t_Option u8 with | Core.Option.Option_Some hoist34 -> - (match hoist34 with + (match hoist34 <: u8 with | 3uy -> (match Core.Option.Option_None <: Core.Option.t_Option u8 with | Core.Option.Option_Some some -> let v:u8 = some in - (match x with + (match x <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist35 -> - (match y with + (match y <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist36 -> Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add (Core.Num.impl__u8__wrapping_add v @@ -316,12 +318,12 @@ let options (x y: Core.Option.t_Option u8) (z: Core.Option.t_Option u64) : Core. Core.Option.Option_None <: Core.Option.t_Option u8) | Core.Option.Option_None -> Core.Option.Option_None <: Core.Option.t_Option u8) | 4uy -> - (match z with + (match z <: Core.Option.t_Option u64 with | Core.Option.Option_Some hoist23 -> let v:u8 = 4uy +! (if hoist23 >. 4uL <: bool then 0uy else 3uy) in - (match x with + (match x <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist35 -> - (match y with + (match y <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist36 -> Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add (Core.Num.impl__u8__wrapping_add v @@ -338,9 +340,9 @@ let options (x y: Core.Option.t_Option u8) (z: Core.Option.t_Option u64) : Core. | Core.Option.Option_None -> Core.Option.Option_None <: Core.Option.t_Option u8) | _ -> let v:u8 = 12uy in - match x with + match x <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist35 -> - (match y with + (match y <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist36 -> Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add (Core.Num.impl__u8__wrapping_add v hoist35 @@ -355,9 +357,9 @@ let options (x y: Core.Option.t_Option u8) (z: Core.Option.t_Option u64) : Core. | Core.Option.Option_None -> Core.Option.Option_None <: Core.Option.t_Option u8) | Core.Option.Option_None -> Core.Option.Option_None <: Core.Option.t_Option u8 else - (match x with + (match x <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist31 -> - (match y with + (match y <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist30 -> (match Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add hoist31 hoist30) @@ -365,14 +367,14 @@ let options (x y: Core.Option.t_Option u8) (z: Core.Option.t_Option u64) : Core. Core.Option.t_Option u8 with | Core.Option.Option_Some hoist34 -> - (match hoist34 with + (match hoist34 <: u8 with | 3uy -> (match Core.Option.Option_None <: Core.Option.t_Option u8 with | Core.Option.Option_Some some -> let v:u8 = some in - (match x with + (match x <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist35 -> - (match y with + (match y <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist36 -> Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add (Core.Num.impl__u8__wrapping_add v @@ -389,12 +391,12 @@ let options (x y: Core.Option.t_Option u8) (z: Core.Option.t_Option u64) : Core. | Core.Option.Option_None -> Core.Option.Option_None <: Core.Option.t_Option u8) | 4uy -> - (match z with + (match z <: Core.Option.t_Option u64 with | Core.Option.Option_Some hoist23 -> let v:u8 = 4uy +! (if hoist23 >. 4uL <: bool then 0uy else 3uy) in - (match x with + (match x <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist35 -> - (match y with + (match y <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist36 -> Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add (Core.Num.impl__u8__wrapping_add v @@ -412,9 +414,9 @@ let options (x y: Core.Option.t_Option u8) (z: Core.Option.t_Option u64) : Core. Core.Option.Option_None <: Core.Option.t_Option u8) | _ -> let v:u8 = 12uy in - match x with + match x <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist35 -> - (match y with + (match y <: Core.Option.t_Option u8 with | Core.Option.Option_Some hoist36 -> Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add (Core.Num.impl__u8__wrapping_add v @@ -460,7 +462,7 @@ let question_mark (x: u32) : Core.Result.t_Result u32 u32 = let simplifiable_question_mark (c: bool) (x: Core.Option.t_Option i32) : Core.Option.t_Option i32 = if c then - match x with + match x <: Core.Option.t_Option i32 with | Core.Option.Option_Some hoist40 -> let a:i32 = hoist40 +! 10l in let b:i32 = 20l in diff --git a/test-harness/src/snapshots/toolchain__traits into-fstar.snap b/test-harness/src/snapshots/toolchain__traits into-fstar.snap index fcc166dc9..9e505d3a1 100644 --- a/test-harness/src/snapshots/toolchain__traits into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__traits into-fstar.snap @@ -35,11 +35,11 @@ open FStar.Mul class t_BlockSizeUser (v_Self: Type0) = { f_BlockSize:Type0 } class t_ParBlocksSizeUser (v_Self: Type0) = { - [@@@ FStar.Tactics.Typeclasses.no_method]_super_10960599340086055385:t_BlockSizeUser v_Self + [@@@ FStar.Tactics.Typeclasses.no_method]_super_17750095326162477464:t_BlockSizeUser v_Self } class t_BlockBackend (v_Self: Type0) = { - [@@@ FStar.Tactics.Typeclasses.no_method]_super_15949286759387124191:t_ParBlocksSizeUser v_Self; + [@@@ FStar.Tactics.Typeclasses.no_method]_super_7661532914804666209:t_ParBlocksSizeUser v_Self; f_proc_block_pre:Alloc.Vec.t_Vec _ Alloc.Alloc.t_Global -> Type0; f_proc_block_post:Alloc.Vec.t_Vec _ Alloc.Alloc.t_Global -> Prims.unit -> Type0; f_proc_block:x0: Alloc.Vec.t_Vec _ Alloc.Alloc.t_Global @@ -55,7 +55,7 @@ open FStar.Mul class t_Bar (v_Self: Type0) (v_T: Type0) = { __marker_trait_t_Bar:Prims.unit } class t_Foo (v_Self: Type0) = { - [@@@ FStar.Tactics.Typeclasses.no_method]_super_5461126672499050919:t_Bar v_Self f_U; + [@@@ FStar.Tactics.Typeclasses.no_method]_super_16210070100893052778:t_Bar v_Self f_U; f_U:Type0 } ''' @@ -157,6 +157,23 @@ let v__f (#v_X: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Foo v_X let _:u8 = f_to_t #v_X #u8 #FStar.Tactics.Typeclasses.solve x in () ''' +"Traits.Impl_expr_in_goal.fst" = ''' +module Traits.Impl_expr_in_goal +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +class t_T2 (v_Self: Type0) = { __marker_trait_t_T2:Prims.unit } + +class t_T1 (v_Self: Type0) = { f_Assoc:Type0 } + +[@@ FStar.Tactics.Typeclasses.tcinstance] +let impl + (#v_U: Type0) + (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_T1 v_U) + (#[FStar.Tactics.Typeclasses.tcresolve ()] i2: t_T2 i1.f_Assoc) + : t_T2 v_U = { __marker_trait = () } +''' "Traits.Implicit_dependencies_issue_667_.Define_type.fst" = ''' module Traits.Implicit_dependencies_issue_667_.Define_type #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" @@ -387,11 +404,11 @@ let method_caller () class t_SubTrait (v_Self: Type0) (v_TypeArg: Type0) (v_ConstArg: usize) = { - [@@@ FStar.Tactics.Typeclasses.no_method]_super_11748868061750783190:t_Trait v_Self + [@@@ FStar.Tactics.Typeclasses.no_method]_super_1779568480311729828:t_Trait v_Self v_TypeArg v_ConstArg; f_AssocType:Type0; - f_AssocType_5566993444404141271:t_Trait f_AssocType v_TypeArg v_ConstArg + f_AssocType_7414800425644916102:t_Trait f_AssocType v_TypeArg v_ConstArg } ''' "Traits.Interlaced_consts_types.fst" = ''' @@ -468,11 +485,11 @@ open FStar.Mul class t_Trait1 (v_Self: Type0) = { f_T:Type0; - f_T_7805326132379548775:t_Trait1 f_T + f_T_2328060197809802853:t_Trait1 f_T } class t_Trait2 (v_Self: Type0) = { - [@@@ FStar.Tactics.Typeclasses.no_method]_super_4567617955834163411:t_Trait1 v_Self; + [@@@ FStar.Tactics.Typeclasses.no_method]_super_4351024728553910126:t_Trait1 v_Self; f_U:Type0 } ''' @@ -544,12 +561,12 @@ let impl__Error__for_application_callback (_: Prims.unit) : Prims.unit -> t_Err let _:Prims.unit = temp_0_ in Error_Fail <: t_Error -let t_Error_cast_to_repr (x: t_Error) : isize = match x with | Error_Fail -> isz 0 +let t_Error_cast_to_repr (x: t_Error) : isize = match x <: t_Error with | Error_Fail -> isz 0 type t_Struct = | Struct : t_Struct class t_SuperTrait (v_Self: Type0) = { - [@@@ FStar.Tactics.Typeclasses.no_method]_super_9442900250278684536:Core.Clone.t_Clone v_Self; + [@@@ FStar.Tactics.Typeclasses.no_method]_super_9529721400157967266:Core.Clone.t_Clone v_Self; f_function_of_super_trait_pre:v_Self -> Type0; f_function_of_super_trait_post:v_Self -> u32 -> Type0; f_function_of_super_trait:x0: v_Self @@ -561,7 +578,7 @@ class t_SuperTrait (v_Self: Type0) = { [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_SuperTrait_for_i32: t_SuperTrait i32 = { - _super_9442900250278684536 = FStar.Tactics.Typeclasses.solve; + _super_9529721400157967266 = FStar.Tactics.Typeclasses.solve; f_function_of_super_trait_pre = (fun (self: i32) -> true); f_function_of_super_trait_post = (fun (self: i32) (out: u32) -> true); f_function_of_super_trait = fun (self: i32) -> cast (Core.Num.impl__i32__abs self <: i32) <: u32 @@ -613,8 +630,7 @@ let use_impl_trait (_: Prims.unit) : Prims.unit = class t_Foo (v_Self: Type0) = { f_AssocType:Type0; - f_AssocType_15012754260415912210:t_SuperTrait f_AssocType; - f_AssocType_3242921639065184873:Core.Clone.t_Clone f_AssocType; + f_AssocType_12248650268031145847:t_SuperTrait f_AssocType; f_N:usize; f_assoc_f_pre:Prims.unit -> Type0; f_assoc_f_post:Prims.unit -> Prims.unit -> Type0; @@ -624,12 +640,12 @@ class t_Foo (v_Self: Type0) = { f_method_f_post:v_Self -> Prims.unit -> Type0; f_method_f:x0: v_Self -> Prims.Pure Prims.unit (f_method_f_pre x0) (fun result -> f_method_f_post x0 result); - f_assoc_type_pre:{| i3: Core.Marker.t_Copy f_AssocType |} -> f_AssocType -> Type0; - f_assoc_type_post:{| i3: Core.Marker.t_Copy f_AssocType |} -> f_AssocType -> Prims.unit -> Type0; - f_assoc_type:{| i3: Core.Marker.t_Copy f_AssocType |} -> x0: f_AssocType + f_assoc_type_pre:{| i2: Core.Marker.t_Copy f_AssocType |} -> f_AssocType -> Type0; + f_assoc_type_post:{| i2: Core.Marker.t_Copy f_AssocType |} -> f_AssocType -> Prims.unit -> Type0; + f_assoc_type:{| i2: Core.Marker.t_Copy f_AssocType |} -> x0: f_AssocType -> Prims.Pure Prims.unit - (f_assoc_type_pre #i3 x0) - (fun result -> f_assoc_type_post #i3 x0 result) + (f_assoc_type_pre #i2 x0) + (fun result -> f_assoc_type_post #i2 x0 result) } class t_Lang (v_Self: Type0) = { @@ -651,7 +667,7 @@ let g (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Foo v_T) (x let impl_Foo_for_tuple_: t_Foo Prims.unit = { f_AssocType = i32; - f_AssocType_15012754260415912210 = FStar.Tactics.Typeclasses.solve; + f_AssocType_12248650268031145847 = FStar.Tactics.Typeclasses.solve; f_N = sz 32; f_assoc_f_pre = (fun (_: Prims.unit) -> true); f_assoc_f_post = (fun (_: Prims.unit) (out: Prims.unit) -> true); diff --git a/test-harness/src/snapshots/toolchain__unsafe into-fstar.snap b/test-harness/src/snapshots/toolchain__unsafe into-fstar.snap index 310e2d094..4991e948c 100644 --- a/test-harness/src/snapshots/toolchain__unsafe into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__unsafe into-fstar.snap @@ -35,7 +35,8 @@ open FStar.Mul type t_Impossible = -let t_Impossible_cast_to_repr (x: t_Impossible) : Rust_primitives.Hax.t_Never = match x with +let t_Impossible_cast_to_repr (x: t_Impossible) : Rust_primitives.Hax.t_Never = + match x <: t_Impossible with let get_unchecked_example (slice: t_Slice u8) : Prims.Pure u8 diff --git a/tests/Cargo.lock b/tests/Cargo.lock index 5fcbe4dc7..1a2c1f3b8 100644 --- a/tests/Cargo.lock +++ b/tests/Cargo.lock @@ -320,7 +320,7 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "hax-lib" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "hax-lib-macros", "num-bigint", @@ -329,7 +329,7 @@ dependencies = [ [[package]] name = "hax-lib-macros" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "hax-lib-macros-types", "paste", @@ -341,7 +341,7 @@ dependencies = [ [[package]] name = "hax-lib-macros-types" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "proc-macro2", "quote", @@ -352,14 +352,14 @@ dependencies = [ [[package]] name = "hax-lib-protocol" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "libcrux", ] [[package]] name = "hax-lib-protocol-macros" -version = "0.1.0-alpha.1" +version = "0.1.0-rc.1" dependencies = [ "proc-macro-error", "proc-macro2", @@ -664,6 +664,10 @@ checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" name = "pattern-or" version = "0.1.0" +[[package]] +name = "patterns" +version = "0.1.0" + [[package]] name = "ping-pong" version = "0.1.0" diff --git a/tests/Cargo.toml b/tests/Cargo.toml index df3b03ffa..5e73e647e 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -23,6 +23,7 @@ members = [ "dyn", "reordering", "nested-derefs", + "patterns", "proverif-minimal", "proverif-basic-structs", "proverif-ping-pong", diff --git a/tests/attribute-opaque/src/lib.rs b/tests/attribute-opaque/src/lib.rs index 7840b4fa8..28671f800 100644 --- a/tests/attribute-opaque/src/lib.rs +++ b/tests/attribute-opaque/src/lib.rs @@ -1,11 +1,85 @@ -#[hax_lib::opaque_type] +#[hax_lib::opaque] struct OpaqueStruct { field: [T; X], other_field: U, } -#[hax_lib::opaque_type] +#[hax_lib::opaque] enum OpaqueEnum { A([T; X]), B(U), } + +#[hax_lib::opaque] +fn f_generic(x: U) -> OpaqueEnum { + OpaqueEnum::B(x) +} + +#[hax_lib::opaque] +fn f(x: bool, y: bool) -> bool { + x && y +} + +#[hax_lib::opaque] +#[hax_lib::requires(x)] +#[hax_lib::ensures(|result| result == y)] +fn f_pre_post(x: bool, y: bool) -> bool { + x && y +} + +#[hax_lib::attributes] +trait T { + type U; + const c: u8; + fn d(); + #[hax_lib::requires(x == 0)] + fn m(&self, x: u8) -> bool; +} + +#[hax_lib::attributes] +#[hax_lib::opaque] +impl T for u8 { + type U = u8; + const c: u8 = 0; + fn d() { + unsafe { + let my_num: i32 = 10; + let _my_num_ptr: *const i32 = &my_num; + let mut my_speed: i32 = 88; + let _my_speed_ptr: *mut i32 = &mut my_speed; + } + } + #[hax_lib::requires(x == 0)] + #[hax_lib::ensures(|result| result)] + fn m(&self, x: u8) -> bool { + *self >= x + } +} + +trait TrGeneric { + fn f(x: U) -> Self; +} + +#[hax_lib::opaque] +impl TrGeneric for i32 { + fn f(_x: U) -> Self { + 0 + } +} + +#[hax_lib::opaque] +const C: u8 = 0 + 0; + +struct S1(); + +impl S1 { + #[hax_lib::opaque] + fn f_s1() {} +} + +struct S2(); + +#[hax_lib::opaque] +impl S2 { + fn f_s2() {} +} diff --git a/tests/cli/interface-only/src/lib.rs b/tests/cli/interface-only/src/lib.rs index 411c3de99..1e85ef7fe 100644 --- a/tests/cli/interface-only/src/lib.rs +++ b/tests/cli/interface-only/src/lib.rs @@ -65,3 +65,29 @@ impl From<()> for Param { Param { value: [0; SIZE] } } } + +fn f_generic(_x: U) -> Param { + Param { value: [0; X] } +} + +trait T { + type Assoc; + fn d(); +} + +/// Impls with associated types are not erased +impl T for u8 { + type Assoc = u8; + fn d() {} +} +trait T2 { + fn d(); +} + +/// Items can be forced to be transparent +#[hax_lib::transparent] +#[hax_lib::attributes] +impl T2 for u8 { + #[hax_lib::requires(false)] + fn d() {} +} diff --git a/tests/generics/src/lib.rs b/tests/generics/src/lib.rs index cb14662fd..131c8da89 100644 --- a/tests/generics/src/lib.rs +++ b/tests/generics/src/lib.rs @@ -51,3 +51,24 @@ mod defaults_generics { struct Defaults([T; N]); fn f(_: Defaults) {} } + +/// See https://github.com/hacspec/hax/issues/1176 +mod impl_generics { + struct Test(); + + impl Test { + fn set_ciphersuites(&self, ciphers: impl IntoIterator) -> Result<(), ()> + where + S: AsRef, + { + Ok(()) + } + + fn set_alpn_protocols(&self, _protocols: impl IntoIterator) -> Result<(), ()> + where + S: AsRef, + { + Ok(()) + } + } +} diff --git a/tests/patterns/Cargo.toml b/tests/patterns/Cargo.toml new file mode 100644 index 000000000..f6b4e63d6 --- /dev/null +++ b/tests/patterns/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "patterns" +version = "0.1.0" +edition = "2021" + +[dependencies] + +[package.metadata.hax-tests] +into."fstar" = { issue_id = "1170" } \ No newline at end of file diff --git a/tests/patterns/src/lib.rs b/tests/patterns/src/lib.rs new file mode 100644 index 000000000..058e37b70 --- /dev/null +++ b/tests/patterns/src/lib.rs @@ -0,0 +1,15 @@ +#![allow(dead_code)] + +struct Other<'a>(&'a i32); + +enum Test<'a> { + C1(Other<'a>), +} + +impl<'a> Test<'a> { + fn test(&self) -> i32 { + match self { + Self::C1(c) => *c.0, + } + } +} diff --git a/tests/traits/src/lib.rs b/tests/traits/src/lib.rs index 9fea11184..1e38b06f0 100644 --- a/tests/traits/src/lib.rs +++ b/tests/traits/src/lib.rs @@ -295,3 +295,14 @@ mod default_traits_parameters { } trait Bar::U> {} } + +// issue 1218 +mod impl_expr_in_goal { + trait T1 { + type Assoc; + } + + trait T2 {} + + impl T2 for U where U::Assoc: T2 {} +}