diff --git a/cli/driver/src/exporter.rs b/cli/driver/src/exporter.rs index 96adfabb2..ff3239cf3 100644 --- a/cli/driver/src/exporter.rs +++ b/cli/driver/src/exporter.rs @@ -286,6 +286,7 @@ impl Callbacks for ExtractionCallbacks { .flatten() .collect(), def_ids, + hax_version: hax_types::HAX_VERSION.into(), }; haxmeta.write(&mut file, cache_map); } diff --git a/cli/subcommands/build.rs b/cli/subcommands/build.rs index b1a0402c2..68c88cd2e 100644 --- a/cli/subcommands/build.rs +++ b/cli/subcommands/build.rs @@ -14,7 +14,7 @@ fn rustc_version_env_var() { } fn json_schema_static_asset() { - let schema = schemars::schema_for!(( + let mut schema = schemars::schema_for!(( hax_frontend_exporter::Item, hax_types::cli_options::Options, hax_types::diagnostics::Diagnostics, @@ -25,6 +25,7 @@ fn json_schema_static_asset() { hax_types::engine_api::protocol::ToEngine, hax_lib_macros_types::AttrPayload, )); + schema.schema.metadata.get_or_insert_default().id = Some(hax_types::HAX_VERSION.into()); serde_json::to_writer( std::fs::File::create(format!("{}/schema.json", std::env::var("OUT_DIR").unwrap())) .unwrap(), diff --git a/cli/subcommands/src/cargo_hax.rs b/cli/subcommands/src/cargo_hax.rs index 2d2f91749..9a23c5dbd 100644 --- a/cli/subcommands/src/cargo_hax.rs +++ b/cli/subcommands/src/cargo_hax.rs @@ -240,6 +240,7 @@ fn run_engine( message_format: MessageFormat, ) -> bool { let engine_options = EngineOptions { + hax_version: haxmeta.hax_version, backend: backend.clone(), input: haxmeta.items, impl_infos: haxmeta.impl_infos, @@ -532,6 +533,7 @@ fn compute_haxmeta_files(options: &Options) -> (Vec, i32) { } else { 0 }; + (haxmeta_files, exit_code) } diff --git a/engine/backends/fstar/fstar_backend.ml b/engine/backends/fstar/fstar_backend.ml index 714760ff5..4dc350d1f 100644 --- a/engine/backends/fstar/fstar_backend.ml +++ b/engine/backends/fstar/fstar_backend.ml @@ -111,6 +111,24 @@ module Make struct open Ctx + module StringToFStar = struct + let catch_parsing_error (type a b) kind span (f : a -> b) x = + try f x + with e -> + let kind = + Types.FStarParseError + { + fstar_snippet = ""; + details = + "While parsing a " ^ kind ^ ", error: " + ^ Base.Error.to_string_hum (Base.Error.of_exn e); + } + in + Error.raise { span; kind } + + let term span = catch_parsing_error "term" span F.term_of_string + end + let doc_to_string : PPrint.document -> string = FStar_Pprint.pretty_string 1.0 (Z.of_int ctx.line_width) @@ -667,7 +685,7 @@ struct kind = UnsupportedMacro { id = [%show: global_ident] macro }; span = e.span; } - | Quote quote -> pquote e.span quote |> F.term_of_string + | Quote quote -> pquote e.span quote |> StringToFStar.term e.span | _ -> . (** Prints a `quote` to a string *) diff --git a/engine/bin/lib.ml b/engine/bin/lib.ml index 5695dd087..71f302bf8 100644 --- a/engine/bin/lib.ml +++ b/engine/bin/lib.ml @@ -186,10 +186,25 @@ let parse_options () = let table, json = Hax_io.read_json () |> Option.value_exn |> parse_id_table_node in + let version = + try Yojson.Safe.Util.(member "hax_version" json |> to_string) + with _ -> "unknown" + in + if String.equal version Types.hax_version |> not then ( + prerr_endline + [%string + {| +The versions of `hax-engine` and of `cargo-hax` are different: + - `hax-engine` version: %{Types.hax_version} + - `cargo-hax` version: %{version} + +Please reinstall hax. +|}]; + Stdlib.exit 1); table |> List.iter ~f:(fun (id, json) -> Hashtbl.add_exn Types.cache_map ~key:id ~data:(`JSON json)); - let options = Types.parse_engine_options json in + let options = [%of_yojson: Types.engine_options] json in Profiling.enabled := options.backend.profile; options diff --git a/engine/lib/ast_builder.ml b/engine/lib/ast_builder.ml index a092f9225..926fe262b 100644 --- a/engine/lib/ast_builder.ml +++ b/engine/lib/ast_builder.ml @@ -210,7 +210,8 @@ module Make (F : Features.T) = struct end module type S = module type of Make0 (struct - let span = failwith "dummy" + (* This [failwith] is OK: this module is never actually used for computation. It is useful only for typing. *) + let span = failwith "type only module: this will never be computed" end) module Make (Span : sig diff --git a/engine/lib/ast_utils.ml b/engine/lib/ast_utils.ml index 34c37f867..2aa8b2956 100644 --- a/engine/lib/ast_utils.ml +++ b/engine/lib/ast_utils.ml @@ -952,8 +952,11 @@ module Make (F : Features.T) = struct module Debug : sig val expr : ?label:string -> AST.expr -> unit (** Prints an expression pretty-printed as Rust, with its full - AST encoded as JSON, available as a file, so that one can - `jless` or `jq` into it. *) + AST encoded as JSON, available as a file, so that one can + `jless` or `jq` into it. *) + + val item' : ?label:string -> AST.item -> string + val item : ?label:string -> AST.item -> unit end = struct let expr ?(label = "") (e : AST.expr) : unit = let path = tempfile_path ~suffix:".json" in @@ -964,6 +967,18 @@ module Make (F : Features.T) = struct ^ "\n```\x1b[34m JSON-encoded AST available at \x1b[1m" ^ path ^ "\x1b[0m (hint: use `jless " ^ path ^ "`)" |> Stdio.prerr_endline + + let item' ?(label = "") (e : AST.item) : string = + let path = tempfile_path ~suffix:".json" in + Core.Out_channel.write_all path + ~data:([%yojson_of: AST.item] e |> Yojson.Safe.pretty_to_string); + let e = LiftToFullAst.item e in + "```rust " ^ label ^ "\n" ^ Print_rust.pitem_str e + ^ "\n```\x1b[34m JSON-encoded AST available at \x1b[1m" ^ path + ^ "\x1b[0m (hint: use `jless " ^ path ^ "`)" + + let item ?(label = "") (e : AST.item) = + item' ~label e |> Stdio.prerr_endline end let unbox_expr' (next : expr -> expr) (e : expr) : expr = diff --git a/engine/lib/attr_payloads.ml b/engine/lib/attr_payloads.ml index 5b8312274..c3c330c64 100644 --- a/engine/lib/attr_payloads.ml +++ b/engine/lib/attr_payloads.ml @@ -6,7 +6,18 @@ let payloads : attrs -> (Types.ha_payload * span) list = let parse = (* we have to parse ["JSON"]: first a string, then a ha_payload *) function - | `String s -> Yojson.Safe.from_string s |> Types.parse_ha_payload + | `String s -> ( + match Yojson.Safe.from_string s |> Types.safe_ha_payload_of_yojson with + | Error _ -> + Stdlib.prerr_endline + [%string + {| +The hax engine could not parse a hax attribute. +This means that the crate being extracted and the version of hax engine are incompatible. +Please make sure the `hax-lib` dependency of the extracted crate matches hax-engine's version (%{Types.hax_version}). +|}]; + Stdlib.exit 1 + | Ok value -> value) | x -> Stdlib.failwith @@ "Attr_payloads: payloads: expected a string while parsing JSON, got " @@ -23,7 +34,7 @@ let payloads : attrs -> (Types.ha_payload * span) list = (** Create a attribute out of a [payload] *) let to_attr (payload : Types.ha_payload) (span : span) : attr = let json = - `String (Yojson.Safe.to_string (Types.to_json_ha_payload payload)) + `String (Yojson.Safe.to_string ([%yojson_of: Types.ha_payload] payload)) in let kind : attr_kind = Tool { path = "_hax::json"; tokens = Yojson.Safe.to_string json } diff --git a/engine/lib/dependencies.ml b/engine/lib/dependencies.ml index e46fcb555..6637e60c8 100644 --- a/engine/lib/dependencies.ml +++ b/engine/lib/dependencies.ml @@ -465,9 +465,19 @@ module Make (F : Features.T) = struct ]) in let renamings = - Map.of_alist_exn - (module Concrete_ident) - (renamings @ variant_and_constructors_renamings) + match + Map.of_alist + (module Concrete_ident) + (renamings @ variant_and_constructors_renamings) + with + | `Duplicate_key dup -> + failwith + [%string + "Fatal error: in dependency analysis, we construct a renaming \ + key-value list with a guarantee of unicity in keys. However, \ + we found the following key twice:\n\ + %{[%show: concrete_ident] dup}"] + | `Ok value -> value in let rename = let renamer _lvl i = Map.find renamings i |> Option.value ~default:i in @@ -493,7 +503,15 @@ module Make (F : Features.T) = struct include Comparable.Make (T) end in let bundle_of_item = - Hashtbl.of_alist_exn (module ComparableItem) bundle_transforms + match Hashtbl.of_alist (module ComparableItem) bundle_transforms with + | `Duplicate_key dup -> + failwith + [%string + "Fatal error: in dependency analysis, [bundles_transforms] is \ + expected to be a key-value list with a guarantee of unicity in \ + keys. However, we found the following key (an item) twice:\n\ + %{U.Debug.item' dup}"] + | `Ok value -> value in let maybe_transform_item item = match Hashtbl.find bundle_of_item item with diff --git a/engine/lib/hax_io.ml b/engine/lib/hax_io.ml index 0038375be..d0e836e32 100644 --- a/engine/lib/hax_io.ml +++ b/engine/lib/hax_io.ml @@ -36,10 +36,10 @@ include ( end) let read () : Types.to_engine = - read_json () |> Option.value_exn |> Types.parse_to_engine + read_json () |> Option.value_exn |> [%of_yojson: Types.to_engine] let write (msg : Types.from_engine) : unit = - Types.to_json_from_engine msg |> write_json + [%yojson_of: Types.from_engine] msg |> write_json let close () : unit = write Exit; diff --git a/engine/lib/import_thir.ml b/engine/lib/import_thir.ml index 3df61d156..cea5112ac 100644 --- a/engine/lib/import_thir.ml +++ b/engine/lib/import_thir.ml @@ -24,7 +24,7 @@ let unimplemented ~issue_id (span : Thir.span list) (details : string) = let kind = T.Unimplemented { - issue_id = Some (MyInt64.of_int_exn issue_id); + issue_id = Some (MyInt64.of_int issue_id); details = String.(if details = "" then None else Some details); } in @@ -676,7 +676,12 @@ end) : EXPR = struct LocalVar { name = id.name; - id = Local_ident.mk_id Cnst (MyInt64.to_int_exn id.index); + id = + Local_ident.mk_id Cnst + (MyInt64.to_int id.index + |> Option.value_or_thunk ~default:(fun _ -> + assertion_failure [ e.span ] + "Expected const id to fit in an OCaml native int")); } | Repeat { value; count } -> let value = c_expr value in @@ -1038,7 +1043,16 @@ end) : EXPR = struct assertion_failure [ span ] "Ty::Alias with AliasTyKind::Weak" | Param { index; name } -> (* TODO: [id] might not unique *) - TParam { name; id = Local_ident.mk_id Typ (MyInt64.to_int_exn index) } + TParam + { + name; + id = + Local_ident.mk_id Typ + (MyInt64.to_int index + |> Option.value_or_thunk ~default:(fun _ -> + assertion_failure [ span ] + "Expected param id to fit in an OCaml native int")); + } | Error -> assertion_failure [ span ] "got type `Error`: Rust compilation probably failed." @@ -1172,7 +1186,11 @@ end) : EXPR = struct { typ_span = Option.map ~f:Span.of_thir param.ty_span; typ = c_ty (Option.value ~default:span param.ty_span) param.ty; - pat = c_pat (Option.value_exn param.pat); + pat = + c_pat + (Option.value_or_thunk param.pat ~default:(fun _ -> + assertion_failure [ span ] + "c_param: expected param.pat to be non-empty")); attrs = c_attrs param.attributes; } @@ -1492,14 +1510,17 @@ and c_item_unwrapped ~ident ~type_only (item : Thir.item) : item list = |> not in let c_body = if drop_body then c_expr_drop_body else c_expr in + let assert_item_def_id () = + Option.value_or_thunk item.def_id ~default:(fun _ -> + assertion_failure [ item.span ] "Expected this item to have a `def_id`") + in (* TODO: things might be unnamed (e.g. constants) *) match (item.kind : Thir.item_kind) with | Const (_, generics, body) -> mk @@ Fn { - name = - Concrete_ident.of_def_id Value (Option.value_exn item.def_id); + name = Concrete_ident.of_def_id Value (assert_item_def_id ()); generics = c_generics generics; body = c_body body; params = []; @@ -1509,7 +1530,7 @@ and c_item_unwrapped ~ident ~type_only (item : Thir.item) : item list = mk @@ TyAlias { - name = Concrete_ident.of_def_id Type (Option.value_exn item.def_id); + name = Concrete_ident.of_def_id Type (assert_item_def_id ()); generics = c_generics generics; ty = c_ty item.span ty; } @@ -1517,8 +1538,7 @@ and c_item_unwrapped ~ident ~type_only (item : Thir.item) : item list = mk @@ Fn { - name = - Concrete_ident.of_def_id Value (Option.value_exn item.def_id); + name = Concrete_ident.of_def_id Value (assert_item_def_id ()); generics = c_generics generics; body = c_body body; params = c_fn_params item.span params; @@ -1527,11 +1547,11 @@ and c_item_unwrapped ~ident ~type_only (item : Thir.item) : item list = | (Enum (_, generics, _) | Struct (_, generics)) when erased -> let generics = c_generics generics in let is_struct = match item.kind with Struct _ -> true | _ -> false in - let def_id = Option.value_exn item.def_id in + let def_id = assert_item_def_id () in let name = Concrete_ident.of_def_id Type def_id in mk @@ Type { name; generics; variants = []; is_struct } | Enum (variants, generics, repr) -> - let def_id = Option.value_exn item.def_id in + let def_id = assert_item_def_id () in let generics = c_generics generics in let is_struct = false in let kind = Concrete_ident.Kind.Constructor { is_struct } in @@ -1591,7 +1611,7 @@ and c_item_unwrapped ~ident ~type_only (item : Thir.item) : item list = if is_primitive then cast_fun :: result else result | Struct (v, generics) -> let generics = c_generics generics in - let def_id = Option.value_exn item.def_id in + let def_id = assert_item_def_id () in let is_struct = true in (* repeating the attributes of the item in the variant: TODO is that ok? *) let v = @@ -1632,9 +1652,7 @@ and c_item_unwrapped ~ident ~type_only (item : Thir.item) : item list = ~f:(fun { attributes; _ } -> not (should_skip attributes)) items in - let name = - Concrete_ident.of_def_id Trait (Option.value_exn item.def_id) - in + let name = Concrete_ident.of_def_id Trait (assert_item_def_id ()) in let { params; constraints } = c_generics generics in let self = let id = Local_ident.mk_id Typ 0 (* todo *) in diff --git a/engine/lib/phases/phase_cf_into_monads.ml b/engine/lib/phases/phase_cf_into_monads.ml index 373bc06ef..d5565c4fb 100644 --- a/engine/lib/phases/phase_cf_into_monads.ml +++ b/engine/lib/phases/phase_cf_into_monads.ml @@ -171,18 +171,19 @@ struct arms in let arms = - if List.is_empty arms then [] - else - let m = - List.map ~f:(fun ({ monad; _ }, _) -> monad) arms - |> List.reduce_exn ~f:(KnownMonads.lub span) - in - List.map - ~f:(fun (mself, (arm_pat, span, body, guard)) -> - let body = KnownMonads.lift "Match" body mself.monad m in - let arm_pat = { arm_pat with typ = body.typ } in - ({ arm = { arm_pat; body; guard }; span } : B.arm)) - arms + let m = + List.map ~f:(fun ({ monad; _ }, _) -> monad) arms + |> List.reduce ~f:(KnownMonads.lub span) + in + match m with + | None -> [] (* [arms] is empty *) + | Some m -> + List.map + ~f:(fun (mself, (arm_pat, span, body, guard)) -> + let body = KnownMonads.lift "Match" body mself.monad m in + let arm_pat = { arm_pat with typ = body.typ } in + ({ arm = { arm_pat; body; guard }; span } : B.arm)) + arms in let typ = match arms with [] -> UB.never_typ | hd :: _ -> hd.arm.body.typ diff --git a/engine/lib/phases/phase_hoist_disjunctive_patterns.ml b/engine/lib/phases/phase_hoist_disjunctive_patterns.ml index 70e92d163..cc637e2c8 100644 --- a/engine/lib/phases/phase_hoist_disjunctive_patterns.ml +++ b/engine/lib/phases/phase_hoist_disjunctive_patterns.ml @@ -72,6 +72,7 @@ module Make (F : Features.T) = List.map (treat_args [ [] ] fields_as_pat) ~f:(fun fields_as_pat -> let fields = + (* exn justification: `rev_map fields` and `fields` have the same length *) List.map2_exn fields_as_pat fields ~f:(fun pat { field; _ } -> { field; pat }) in diff --git a/engine/lib/phases/phase_transform_hax_lib_inline.ml b/engine/lib/phases/phase_transform_hax_lib_inline.ml index 6d739ebaa..8ee92d0c3 100644 --- a/engine/lib/phases/phase_transform_hax_lib_inline.ml +++ b/engine/lib/phases/phase_transform_hax_lib_inline.ml @@ -106,11 +106,21 @@ module%inlined_contents Make (F : Features.T) = struct let id = extract_pattern e |> Option.bind ~f:first_global_ident - |> Option.value_exn + |> Option.value_or_thunk ~default:(fun _ -> + Error.assertion_failure span + "Could not extract pattern (case constructor): \ + this may be a bug in the quote macros in \ + hax-lib.") in `Expr { e with e = GlobalVar id } | Some "_pat" -> - let pat = extract_pattern e |> Option.value_exn in + let pat = + extract_pattern e + |> Option.value_or_thunk ~default:(fun _ -> + Error.assertion_failure span + "Could not extract pattern (case pat): this may \ + be a bug in the quote macros in hax-lib.") + in `Pat pat | Some "_ty" -> let typ = diff --git a/engine/names/extract/build.rs b/engine/names/extract/build.rs index d5e8f563f..1ffdc56b6 100644 --- a/engine/names/extract/build.rs +++ b/engine/names/extract/build.rs @@ -140,7 +140,7 @@ fn reader_to_str(s: String) -> String { result += "\n"; result += "module Values = struct\n"; for (json, name) in &def_ids { - result += &format!("{TAB}let parsed_{name} = Types.parse_def_id (Yojson.Safe.from_string {}{ESCAPE_KEY}|{}|{ESCAPE_KEY}{})\n", "{", json, "}"); + result += &format!("{TAB}let parsed_{name} = Types.def_id_of_yojson (Yojson.Safe.from_string {}{ESCAPE_KEY}|{}|{ESCAPE_KEY}{})\n", "{", json, "}"); } result += "end\n\n"; @@ -155,7 +155,7 @@ fn reader_to_str(s: String) -> String { result += &format!("let impl_infos_json_list = match Yojson.Safe.from_string {}{ESCAPE_KEY}|{}|{ESCAPE_KEY}{} with | `List l -> l | _ -> failwith \"Expected a list of `def_id * impl_infos`\"\n\n", "{", serde_json::to_string(&impl_infos).unwrap(), "}"); result += - &format!("let impl_infos = Base.List.map ~f:(function | `List [did; ii] -> (Types.parse_def_id did, Types.parse_impl_infos ii) | _ -> failwith \"Expected tuple\") impl_infos_json_list"); + &format!("let impl_infos = Base.List.map ~f:(function | `List [did; ii] -> (Types.def_id_of_yojson did, Types.impl_infos_of_yojson ii) | _ -> failwith \"Expected tuple\") impl_infos_json_list"); result } diff --git a/engine/utils/ocaml_of_json_schema/ocaml_of_json_schema.js b/engine/utils/ocaml_of_json_schema/ocaml_of_json_schema.js index 2b6bf3b5c..3a9b866b4 100644 --- a/engine/utils/ocaml_of_json_schema/ocaml_of_json_schema.js +++ b/engine/utils/ocaml_of_json_schema/ocaml_of_json_schema.js @@ -1,4 +1,4 @@ -const keys = p => +const keys = p => new Set( Object.keys(p) .filter(k => ![ @@ -7,13 +7,13 @@ const keys = p => .filter(k => p?.additionalProperties !== false || k != 'additionalProperties') ); const eq = (xs, ys) => - xs.size === ys.size && - [...xs].every((x) => ys.has(x)); + xs.size === ys.size && + [...xs].every((x) => ys.has(x)); let todo = (todo = "todo") => null; let assert = (fact, msg = "assert") => { - if(!fact) + if (!fact) throw msg; }; @@ -29,7 +29,7 @@ const clean = o => { if (o instanceof Object && exact_keys(o, 'allOf') && o.allOf.length == 1 - ) { + ) { let first = o.allOf[0]; delete o['allOf']; for (let k in first) @@ -40,13 +40,13 @@ const clean = o => { && o.type instanceof Array && o.type.length === 2 && o.type.includes('null') - ) { + ) { let type = o.type.filter(x => x != 'null')[0]; let other = JSON.parse(JSON.stringify(o)); other.type = type; for (let k in o) delete o[k]; - o.anyOf = [other, {type: 'null'}]; + o.anyOf = [other, { type: 'null' }]; } if (o instanceof Array) { return o @@ -74,17 +74,17 @@ let variantNameOf = s => { return v + "'"; return v; }; -let typeNameOf = s => s.replace(/[A-Z]/g, (l, i) => `${i?'_':''}${l.toLowerCase()}`); +let typeNameOf = s => s.replace(/[A-Z]/g, (l, i) => `${i ? '_' : ''}${l.toLowerCase()}`); let fieldNameOf = s => { - let ocaml_keywords = [ "and", "as", "assert", "asr", "begin", "class", "constraint", - "do", "done", "downto", "else", "end", "exception", "external", - "false", "for", "fun", "function", "functor", "if", "in", - "include", "inherit", "initializer", "land", "lazy", "let", - "lor", "lsl", "lsr", "lxor", "match", "method", "mod", "module", - "mutable", "new", "nonrec", "object", "of", "open", "or", - "private", "rec", "sig", "struct", "then", "to", "true", "try", - "type", "val", "virtual", "when", "while", "with" - ]; + let ocaml_keywords = ["and", "as", "assert", "asr", "begin", "class", "constraint", + "do", "done", "downto", "else", "end", "exception", "external", + "false", "for", "fun", "function", "functor", "if", "in", + "include", "inherit", "initializer", "land", "lazy", "let", + "lor", "lsl", "lsr", "lxor", "match", "method", "mod", "module", + "mutable", "new", "nonrec", "object", "of", "open", "or", + "private", "rec", "sig", "struct", "then", "to", "true", "try", + "type", "val", "virtual", "when", "while", "with" + ]; if (ocaml_keywords.includes(s)) return s + "'"; return s; @@ -93,7 +93,7 @@ let fieldNameOf = s => { let ensureUnique = (() => { let cache = {}; return (kind, v, disambiguer) => { - let key = JSON.stringify({kind, v}); + let key = JSON.stringify({ kind, v }); // TODO: enble check below, find a good solution // if(cache[key]) // throw `dup ${kind}, ${v}`; @@ -103,18 +103,18 @@ let ensureUnique = (() => { })(); const util = require('util'); -let log_full = o => console.error(util.inspect(o, {showHidden: false, depth: null, colors: true})); +let log_full = o => console.error(util.inspect(o, { showHidden: false, depth: null, colors: true })); let trace1 = (name, f) => (input) => { let output = f(input); - log_full({name, input, output}); + log_full({ name, input, output }); return output; }; let ocaml_of_type_expr = (o, path) => { if (!path) throw "Path missing!"; - let {kind, payload} = o; + let { kind, payload } = o; return (({ option: type => `(${ocaml_of_type_expr(type, [...path, 'option'])} option)`, unit: _ => `unit`, @@ -131,13 +131,13 @@ let ocaml_of_type_expr = (o, path) => { name: payload => typeNameOf(payload), })[kind] || (_ => { log_full(o); - throw "ocaml_of_type_expr: bad kind "+kind; + throw "ocaml_of_type_expr: bad kind " + kind; }))(payload); }; let mk_match = (scrut, arms, path) => { - if(!path){ + if (!path) { console.trace(); throw "Path missing!"; } @@ -154,13 +154,13 @@ let wrap_paren = s => `(${s})`; let ocaml_yojson_of_type_expr = (o, subject, path) => { if (!path) throw "Path missing!"; - let {kind, payload} = o; + let { kind, payload } = o; return `(${(({ option: type => `match ${subject} with | Option.Some x -> ${ocaml_yojson_of_type_expr(type, 'x', [...path, 'Some'])} | _ -> \`Null`, unit: _ => `\`Null`, tuple: types => `let (${types.map((t, i) => 'x' + i)}) = ${subject} in \`List [${types.map((t, i) => ocaml_yojson_of_type_expr(t, 'x' + i, [...path, 'tuple', i])).join(';')}]`, - array: type => + array: type => `\`List (List.map (fun x -> ${ocaml_yojson_of_type_expr(type, 'x', [...path, 'array'])}) ${subject})`, boolean: _ => `\`Bool ${subject}`, string: _ => `\`String ${subject}`, @@ -170,10 +170,10 @@ let ocaml_yojson_of_type_expr = (o, subject, path) => { int: `\`Int ${subject}` })[o.repr], char: _ => `\`String (Base.Char.to_string ${subject})`, - name: payload => `to_json_${typeNameOf(payload)} ${subject}`, + name: payload => `yojson_of_${typeNameOf(payload)} ${subject}`, })[kind] || (_ => { log_full(o); - throw "ocaml_arms_of_type_expr: bad kind "+kind; + throw "ocaml_arms_of_type_expr: bad kind " + kind; }))(payload)})`; }; @@ -181,7 +181,7 @@ let ocaml_yojson_of_type_expr = (o, subject, path) => { let ocaml_arms_of_type_expr = (o, path) => { if (!path) throw "Path missing!"; - let {kind, payload} = o; + let { kind, payload } = o; return (({ option: type => [ [`\`Null`, `Option.None`], @@ -189,17 +189,17 @@ let ocaml_arms_of_type_expr = (o, path) => { ], unit: _ => [[`\`Null`, '()']], tuple: types => { - let sub_matches = types.map((type, i) => + let sub_matches = types.map((type, i) => mk_match(`v${i}`, ocaml_arms_of_type_expr(type, [...path, 'tuple', i]), [...path, 'tuple'])); return [ [`\`List [${types.map((_, i) => `v${i}`).join(';')}]`, - `(${sub_matches.join(',')})` + `(${sub_matches.join(',')})` ], ]; }, array: type => [ [`\`List l`, - `List.map (fun x -> ${mk_match('x', ocaml_arms_of_type_expr(type, [...path, 'array']), [...path, 'array'])}) l` + `List.map (fun x -> ${mk_match('x', ocaml_arms_of_type_expr(type, [...path, 'array']), [...path, 'array'])}) l` ] ], boolean: _ => [[`\`Bool b`, 'b']], @@ -219,10 +219,10 @@ let ocaml_arms_of_type_expr = (o, path) => { [`\`Intlit s`, 'Base.Int.of_string s'] ] })[o.repr], - name: payload => [['remains', `parse_${typeNameOf(payload)} remains`]], + name: payload => [['remains', `${typeNameOf(payload)}_of_yojson remains`]], })[kind] || (_ => { log_full(o); - throw "ocaml_arms_of_type_expr: bad kind "+kind; + throw "ocaml_arms_of_type_expr: bad kind " + kind; }))(payload); }; @@ -232,10 +232,10 @@ let parse_type_name = s => { return s.split('/').slice(-1)[0]; }; -let int_repr_of_format = format => - (format.endsWith('int128') || format == 'uint64' || format == 'uint' /*`uint`s are `usize`s actually, so that's safer to assume it's a uint64, see https://github.com/GREsau/schemars/blob/386e3d7f5ac601795fb4e247291bbef31512ded3/schemars/src/json_schema_impls/primitives.rs#L85C16-L85C21*/) - ? 'string' - : (format == 'int64' || format == 'uint32' ? 'int64' : 'int'); +let int_repr_of_format = format => + (format.endsWith('int128') || format == 'uint64' || format == 'uint' /*`uint`s are `usize`s actually, so that's safer to assume it's a uint64, see https://github.com/GREsau/schemars/blob/386e3d7f5ac601795fb4e247291bbef31512ded3/schemars/src/json_schema_impls/primitives.rs#L85C16-L85C21*/) + ? 'string' + : (format == 'int64' || format == 'uint32' ? 'int64' : 'int'); let is_type = { option: def => { @@ -244,14 +244,14 @@ let is_type = { && is_type.expr(def.anyOf[0]) && exact_keys(def.anyOf[1], 'type') && def.anyOf[1].type === 'null' - ) + ) return { kind: 'option', payload: is_type.expr(def.anyOf[0]) }; return false; }, - + unit: def => { if (exact_keys(def, 'type') && def.type === 'null') @@ -272,7 +272,7 @@ let is_type = { }; return false; }, - + array: def => { if (exact_keys(def, 'type', 'items') && def.type === 'array' @@ -283,37 +283,37 @@ let is_type = { }; return false; }, - - expr: def => - (exact_keys(def, '$ref') ? { - kind: 'name', payload: parse_type_name(def['$ref']) - } : false) + + expr: def => + (exact_keys(def, '$ref') ? { + kind: 'name', payload: parse_type_name(def['$ref']) + } : false) || is_type.option(def) || is_type.array(def) || is_type.unit(def) || is_type.tuple(def) || (def.type === 'integer' - ? {kind: 'integer', repr: int_repr_of_format(def.format)} + ? { kind: 'integer', repr: int_repr_of_format(def.format) } : false) || (def.type === 'string' && def.maxLength === def.minLength && def.minLength === 1 - ? {kind: 'char'} + ? { kind: 'char' } : false) - || ( ( exact_keys(def, 'type') - && ['boolean', 'string'].includes(def.type) - ) ? {kind: def.type} : false - ) || false, + || ((exact_keys(def, 'type') + && ['boolean', 'string'].includes(def.type) + ) ? { kind: def.type } : false + ) || false, record: def => { if ((eq(keys(def), new Set(["type", "required", "properties"])) - || eq(keys(def), new Set(["type", "properties"])) - ) + || eq(keys(def), new Set(["type", "properties"])) + ) && def.type === "object" && (def.required || []).every(k => typeof k == 'string') && Object.values(def.properties).every(is_type.expr)) return Object.fromEntries(Object.entries(def.properties).map(([n, v]) => [n, is_type.expr(v)])); return false; }, - + variant: def => { let doc = def.description; if (exporters.enum.guard(def)) @@ -327,7 +327,7 @@ let is_type = { if (exact_keys(def, 'type', 'required', 'properties') && def.type === 'object' && Object.values(def.properties).length == 1 - ){ + ) { let [name, value] = Object.entries(def.properties)[0]; if (is_type.expr(value)) return [{ @@ -362,7 +362,7 @@ let is_type = { let export_record = (fields, path) => { let record_expression = fields.map(([field, type], i) => { - let p = [...path, 'field_'+field]; + let p = [...path, 'field_' + field]; let sub = mk_match('x', ocaml_arms_of_type_expr(type, p), p); let match = `match List.assoc_opt "${field}" l with Option.Some x -> begin ${sub} end | Option.None -> raise (MissingField {field = "${field}"; fields = l})`; return `${fieldNameOf(field)} = begin ${match} end`; @@ -376,62 +376,61 @@ let exporters = { oneOf: { guard: def => eq(keys(def), new Set(["oneOf"])) && def.oneOf.every(is_type.variant), - f: (name, {oneOf}) => { + f: (name, { oneOf }) => { let variants = oneOf.map(is_type.variant).flat(); - let type = variants.map(({kind, name: variant_name, payloadKind, payload, doc}) => { + let type = variants.map(({ kind, name: variant_name, payloadKind, payload, doc }) => { doc = mkdoc(doc); let variant = ensureUnique('variant', variantNameOf(variant_name)); return ({ record: () => { let fields = Object.entries(payload).map(([field, value]) => - fieldNameOf(field) + ' : ' + ocaml_of_type_expr(value, ['rec-variant:'+variant+':'+field])); + fieldNameOf(field) + ' : ' + ocaml_of_type_expr(value, ['rec-variant:' + variant + ':' + field])); return `${variant} of {${fields.join(';\n')}}${doc}`; }, - expr: () => `${variant} of (${ocaml_of_type_expr(payload, ['expr-variant:'+variant+':'+name])})${doc}`, + expr: () => `${variant} of (${ocaml_of_type_expr(payload, ['expr-variant:' + variant + ':' + name])})${doc}`, empty: () => `${variant}${doc}`, }[payloadKind] || (() => { throw "bad payloadKind: " + payloadKind; }))(); }).join('\n | '); - let parse_arms = variants.map(({kind, name: variant_name, payloadKind, payload}) => { + let parse_arms = variants.map(({ kind, name: variant_name, payloadKind, payload }) => { let variant = variantNameOf(variant_name); - let wrap = (arms, prefix='') => [ + let wrap = (arms, prefix = '') => [ [`\`Assoc ["${variant_name}", rec_value]`, - prefix + mk_match('rec_value', arms, ['rec-variant_'+variant+'_'+variant_name]) + prefix + mk_match('rec_value', arms, ['rec-variant_' + variant + '_' + variant_name]) ] ]; return ({ record: () => { - let [pat, expr] = export_record(Object.entries(payload), ['rec-variant_'+variant+'_'+variant_name]); - return wrap([[pat, variant+' '+expr]]); + let [pat, expr] = export_record(Object.entries(payload), ['rec-variant_' + variant + '_' + variant_name]); + return wrap([[pat, variant + ' ' + expr]]); }, - expr: () => wrap(ocaml_arms_of_type_expr(payload, ['expr-variant(PA):'+name+':'+variant+':'+variant_name]), variant + ' '), + expr: () => wrap(ocaml_arms_of_type_expr(payload, ['expr-variant(PA):' + name + ':' + variant + ':' + variant_name]), variant + ' '), empty: () => [[`\`String "${variant_name}"`, variant]], }[payloadKind] || (() => { throw "bad payloadKind: " + payloadKind; }))(); }).flat(); - let parse = mk_match('o', parse_arms, ['parse_'+name]); - let to_json = `match o with ${variants.map(({kind, name: variant_name, payloadKind, payload}) => { + let parse = mk_match('o', parse_arms, [name + '_of_yojson']); + let to_json = `match o with ${variants.map(({ kind, name: variant_name, payloadKind, payload }) => { let variant = variantNameOf(variant_name); let wrap = (x, e) => `${variant} ${x} -> \`Assoc ["${variant_name}", ${e}]`; return ({ record: () => { let fields = Object.entries(payload); return wrap( - `{${fields.map(([field, type], i) => `${fieldNameOf(field)}`).join('; ')}}`, - `\`Assoc [${ - fields.map(([field, type], i) => `("${field}", ${ocaml_yojson_of_type_expr(type, fieldNameOf(field), [name+':'+variant, 'variant', field])})`).join('; ') - }]` + `{${fields.map(([field, type], i) => `${fieldNameOf(field)}`).join('; ')}}`, + `\`Assoc [${fields.map(([field, type], i) => `("${field}", ${ocaml_yojson_of_type_expr(type, fieldNameOf(field), [name + ':' + variant, 'variant', field])})`).join('; ') + }]` ); }, - expr: () => wrap('x', ocaml_yojson_of_type_expr(payload, 'x', [name+':'+variant, 'payload'])), + expr: () => wrap('x', ocaml_yojson_of_type_expr(payload, 'x', [name + ':' + variant, 'payload'])), empty: () => `${variant} -> \`String "${variant_name}"`, }[payloadKind] || (() => { throw "bad payloadKind: " + payloadKind; }))(); }).join(' | ')}`; - return {type, parse, to_json}; + return { type, parse, to_json }; }, }, empty_struct: { @@ -447,23 +446,23 @@ let exporters = { // object is a *flat* record object: { guard: def => (eq(keys(def), new Set(["type", "required", "properties"])) - || eq(keys(def), new Set(["type", "properties"])) - ) + || eq(keys(def), new Set(["type", "properties"])) + ) && def.type === "object" && (def.required || []).every(k => typeof k == 'string') && Object.values(def.properties).every(is_type.expr), - f: (name, {required, properties}) => { + f: (name, { required, properties }) => { let fields = Object.entries(properties).map( ([name, prop]) => [name, is_type.expr(prop), prop.description] ); - let [pat, expr] = export_record(fields, ['struct_'+name]); - + let [pat, expr] = export_record(fields, ['struct_' + name]); + return { - type: `{ ${fields.map(([fname, type, doc]) => `${fieldNameOf(fname)} : ${ocaml_of_type_expr(type, ['struct_'+fname+'_'+name])}${mkdoc(doc)}`).join(';\n')} }`, - parse: mk_match('o', [[pat, expr]], ['struct_'+name]), + type: `{ ${fields.map(([fname, type, doc]) => `${fieldNameOf(fname)} : ${ocaml_of_type_expr(type, ['struct_' + fname + '_' + name])}${mkdoc(doc)}`).join(';\n')} }`, + parse: mk_match('o', [[pat, expr]], ['struct_' + name]), to_json: //`let {${fields.map(([fname, type, doc]) => fieldNameOf(fname)).join(';')}} = o in` - `\`Assoc [${fields.map(([fname, type, doc]) => `("${fname}", ${ocaml_yojson_of_type_expr(type, 'o.'+fieldNameOf(fname), ['todo'])})`).join('; ')}]` + `\`Assoc [${fields.map(([fname, type, doc]) => `("${fname}", ${ocaml_yojson_of_type_expr(type, 'o.' + fieldNameOf(fname), ['todo'])})`).join('; ')}]` }; }, }, @@ -473,7 +472,7 @@ let exporters = { f: (name, o) => { assert(o.enum.every(x => typeof x == "string"), 'not every enum is a string'); - if(o.enum.length == 0) { + if (o.enum.length == 0) { return { type: '|', parse: 'failwith "cannot parse an empty type"', @@ -489,16 +488,16 @@ let exporters = { let parse_string = `match s with ` + variants.map( - ({Δ, variant}) => `"${Δ}" -> ${variant}` + ({ Δ, variant }) => `"${Δ}" -> ${variant}` ).join(' | ') + ` | s -> failwith ("unexpected variant [" ^ s ^ "] while parsing enum [${name}]")`; - + return { - type: variants.map(({variant}) => variant).join(' | '), + type: variants.map(({ variant }) => variant).join(' | '), parse: ` match o with | \`String s -> (${parse_string}) | _ -> failwith "expected a string while parsing a ${name}" `, - to_json: `match o with ${variants.map(({variant, variantOriginName}) => `${variant} -> \`String "${variantOriginName}"`).join(' | ')}`, + to_json: `match o with ${variants.map(({ variant, variantOriginName }) => `${variant} -> \`String "${variantOriginName}"`).join(' | ')}`, }; }, }, @@ -506,36 +505,39 @@ let exporters = { let export_definition = (name, def) => { let suitable_exporters = Object.entries(exporters).filter( - ([_, {guard}]) => guard(def) + ([_, { guard }]) => guard(def) ); - if (suitable_exporters.length != 1){ + if (suitable_exporters.length != 1) { console.error(`ERROR: each definition should have exactly one suited exporter, but type "${name}" has the following exporter(s): ${JSON.stringify(suitable_exporters.map(([n, _]) => n))}.`); console.error('name', name); log_full(def); console.error('xname', name); - + throw "kind error"; } - let [_, {f}] = suitable_exporters[0]; + let [_, { f }] = suitable_exporters[0]; name = ensureUnique('type', typeNameOf(name)); let r = f(name, def); - if(r === null) + if (r === null) return `(* type ${name} *)`; - let {type, parse, to_json} = r; - return {name, type, parse, to_json}; + let { type, parse, to_json } = r; + return { name, type, parse, to_json }; // return [{type, parse}] // return `type ${name} = ${type}\nlet parse_${name} (o: Yojson.Safe.t): ${name} = ${parse}\n`; }; -function run(str){ +function run(str) { let contents = JSON.parse(str); const definitions = clean(contents.definitions); let sig = ``; - + let impl = `include struct -[@@@warning "-A"]`; +[@@@warning "-A"] +`; + + impl += `let hax_version = {escape|${contents['$id'].replace(/\|escape\}/g, '|_escape}')}|escape}`; let items = Object.entries(definitions) .map(([name, def]) => ['Node_for_TyKind' == name ? 'node_for_ty_kind_generated' : name, def]) @@ -545,7 +547,7 @@ function run(str){ ).filter(x => x instanceof Object); let derive_items = ['show', 'eq']; - + impl += ` module ParseError = struct exception MissingField of { @@ -560,16 +562,17 @@ module ParseError = struct end open ParseError + `; let derive_clause = derive_items.length ? `[@@deriving ${derive_items.join(', ')}]` : ''; impl += ( 'type ' - + items.map(({name, type}) => - `${name} = ${type}\n` - ).join('\nand ') - + derive_clause + + items.map(({ name, type }) => + `${name} = ${type}\n` + ).join('\nand ') + + derive_clause ); impl += ` and node_for__ty_kind = node_for_ty_kind_generated @@ -578,8 +581,9 @@ and node_for__def_id_contents = node_for_def_id_contents_generated type map_types = ${"[`TyKind of ty_kind | `DefIdContents of def_id_contents]"} let cache_map: (int64, ${"[ `Value of map_types | `JSON of Yojson.Safe.t ]"}) Base.Hashtbl.t = Base.Hashtbl.create (module Base.Int64) -let parse_table_id_node (type t) (name: string) (encode: t -> map_types) (decode: map_types -> t option) (parse: Yojson.Safe.t -> t) (o: Yojson.Safe.t): (t * int64) = - let label = "parse_table_id_node:" ^ name ^ ": " in +module Exn = struct +let table_id_node_of_yojson (type t) (name: string) (encode: t -> map_types) (decode: map_types -> t option) (parse: Yojson.Safe.t -> t) (o: Yojson.Safe.t): (t * int64) = + let label = "table_id_node_of_yojson:" ^ name ^ ": " in match o with | \`Assoc alist -> begin let id = match List.assoc_opt "id" alist with @@ -606,45 +610,80 @@ let parse_table_id_node (type t) (name: string) (encode: t -> map_types) (decode `; impl += (''); - impl += ('let rec ' + items.map(({name, type, parse}) => - `parse_${name} (o: Yojson.Safe.t): ${name} = ${parse}` + impl += ('let rec ' + items.map(({ name, type, parse }) => + `${name}_of_yojson (o: Yojson.Safe.t): ${name} = ${parse}` ).join('\nand ')); impl += ` -and parse_node_for__ty_kind (o: Yojson.Safe.t): node_for__ty_kind = +and node_for__ty_kind_of_yojson (o: Yojson.Safe.t): node_for__ty_kind = let (value, id) = - parse_table_id_node "TyKind" + table_id_node_of_yojson "TyKind" (fun value -> \`TyKind value) (function | \`TyKind value -> Some value | _ -> None) - parse_ty_kind + ty_kind_of_yojson o in {value; id} -and parse_node_for__def_id_contents (o: Yojson.Safe.t): node_for__def_id_contents = +and node_for__def_id_contents_of_yojson (o: Yojson.Safe.t): node_for__def_id_contents = let (value, id) = - parse_table_id_node "DefIdContents" + table_id_node_of_yojson "DefIdContents" (fun value -> \`DefIdContents value) (function | \`DefIdContents value -> Some value | _ -> None) - parse_def_id_contents + def_id_contents_of_yojson o in {value; id} `; impl += (''); - impl += ('let rec ' + items.map(({name, type, parse, to_json}) => - `to_json_${name} (o: ${name}): Yojson.Safe.t = ${to_json}` + impl += ('let rec ' + items.map(({ name, type, parse, to_json }) => + `yojson_of_${name} (o: ${name}): Yojson.Safe.t = ${to_json}` ).join('\nand ')); impl += ` -and to_json_node_for__ty_kind {value; id} = to_json_node_for_ty_kind_generated {value; id} -and to_json_node_for__def_id_contents {value; id} = to_json_node_for_def_id_contents_generated {value; id} +and yojson_of_node_for__ty_kind {value; id} = yojson_of_node_for_ty_kind_generated {value; id} +and yojson_of_node_for__def_id_contents {value; id} = yojson_of_node_for_def_id_contents_generated {value; id} +end + +open struct + let catch_parsing_errors (type a b) (label: string) (f: a -> b) (x: a): (b, Base.Error.t) Base.Result.t = + try Base.Result.Ok (f x) with + | e -> Base.Result.Error (Base.Error.of_exn ~backtrace:\`Get e) + let unwrap = function + | Base.Result.Ok value -> value + | Base.Result.Error err -> + let err = + let path = Utils.tempfile_path ~suffix:".log" in + Core.Out_channel.write_all path + ~data:(Base.Error.to_string_hum err); + path + in + prerr_endline [%string {| +Error: could not serialize or deserialize a hax value. +This error arises from an incompatibility betwen hax components: hax-engine, cargo-hax and hax-lib. +Potential fixes: + - Make sure the version of \`hax-lib\` for the crate your are trying to extract matches the version of hax currently installed (%{hax_version}). + - Run \`cargo clean\` + - Reinstall hax + +The full stack trace was dumped to %{err}. +|}]; + exit 1 +end `; + impl += (items.map(({ name, type, parse, to_json }) => + ` +let safe_yojson_of_${name} = catch_parsing_errors "yojson_of_${name}" Exn.yojson_of_${name} +let safe_${name}_of_yojson = catch_parsing_errors "${name}_of_yojson" Exn.${name}_of_yojson +let yojson_of_${name} x = unwrap (safe_yojson_of_${name} x) +let ${name}_of_yojson x = unwrap (safe_${name}_of_yojson x)` + ).join('\n')); + return impl + ' \n end'; } -function parse_args(){ +function parse_args() { let [script_name, input_path, output_path, ...rest] = process.argv.slice(1); - if(!input_path || !output_path || rest.length) { + if (!input_path || !output_path || rest.length) { console.log(` Usage: node ${script_name} INPUT_PATH OUTPUT_PATH @@ -657,17 +696,17 @@ Usage: node ${script_name} INPUT_PATH OUTPUT_PATH async function read(stream) { const chunks = []; - for await (const chunk of stream) chunks.push(chunk); + for await (const chunk of stream) chunks.push(chunk); return Buffer.concat(chunks).toString('utf8'); } -async function main(){ +async function main() { const fs = require('fs'); - let {input_path, output_path} = parse_args(); + let { input_path, output_path } = parse_args(); let out = run(input_path == '-' - ? await read(process.stdin) - : fs.readFileSync(input_path, 'utf-8') - ); + ? await read(process.stdin) + : fs.readFileSync(input_path, 'utf-8') + ); output_path == '-' ? process.stdout.write(out) : fs.writeFileSync(output_path, out); diff --git a/hax-types/src/diagnostics/mod.rs b/hax-types/src/diagnostics/mod.rs index 91670f03e..73a7ecc0e 100644 --- a/hax-types/src/diagnostics/mod.rs +++ b/hax-types/src/diagnostics/mod.rs @@ -68,6 +68,8 @@ impl std::fmt::Display for Diagnostics { Kind::NonTrivialAndMutFnInput => write!(f, "The support in hax of function with one or more inputs of type `&mut _` is limited. Onlu trivial patterns are allowed there: `fn f(x: &mut (T, U)) ...` is allowed while `f((x, y): &mut (T, U))` is rejected."), + Kind::FStarParseError { fstar_snippet, details: _ } => write!(f, "The following code snippet could not be parsed as valid F*:\n```\n{fstar_snippet}\n```"), + _ => write!(f, "{:?}", self.kind), } } @@ -135,7 +137,13 @@ pub enum Kind { /// An hax attribute (from `hax-lib-macros`) was rejected AttributeRejected { reason: String, - }, + } = 12, + + /// A snippet of F* code could not be parsed + FStarParseError { + fstar_snippet: String, + details: String, + } = 13, } impl Kind { diff --git a/hax-types/src/driver_api.rs b/hax-types/src/driver_api.rs index 51eb86bbc..7f065e0a1 100644 --- a/hax-types/src/driver_api.rs +++ b/hax-types/src/driver_api.rs @@ -28,6 +28,7 @@ pub struct HaxMeta { )>, pub def_ids: Vec, pub comments: Vec<(hax_frontend_exporter::Span, String)>, + pub hax_version: String, } use hax_frontend_exporter::id_table; @@ -49,7 +50,15 @@ where pub fn read(reader: impl std::io::Read) -> (Self, id_table::Table) { let reader = zstd::stream::read::Decoder::new(reader).unwrap(); let reader = std::io::BufReader::new(reader); - id_table::WithTable::destruct(serde_brief::from_reader(reader).unwrap()) + let haxmeta = id_table::WithTable::>::destruct( + serde_brief::from_reader(reader).unwrap(), + ); + if haxmeta.0.hax_version != crate::HAX_VERSION { + let version = haxmeta.0.hax_version; + let expected = crate::HAX_VERSION; + panic!("An invariant was broken: `*.haxmeta` was produced by hax version `{version}` while the current version of hax is `{expected}`. Please report this to https://github.com/hacspec/hax/issues."); + }; + haxmeta } } diff --git a/hax-types/src/engine_api.rs b/hax-types/src/engine_api.rs index 74fb3a52c..d68f653f3 100644 --- a/hax-types/src/engine_api.rs +++ b/hax-types/src/engine_api.rs @@ -6,6 +6,7 @@ type ThirBody = hax_frontend_exporter::ThirBody; #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub struct EngineOptions { + pub hax_version: String, pub backend: BackendOptions<()>, pub input: Vec>, pub impl_infos: Vec<(